all repos — honk @ 9404eb134ab3290c50f2898202d78b5ab2e0bc38

my fork of honk

fun.go (view raw)

  1//
  2// Copyright (c) 2019 Ted Unangst <tedu@tedunangst.com>
  3//
  4// Permission to use, copy, modify, and distribute this software for any
  5// purpose with or without fee is hereby granted, provided that the above
  6// copyright notice and this permission notice appear in all copies.
  7//
  8// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
  9// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
 10// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
 11// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
 12// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
 13// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
 14// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
 15
 16package main
 17
 18import (
 19	"crypto/rand"
 20	"crypto/rsa"
 21	"crypto/sha512"
 22	"fmt"
 23	"html/template"
 24	"io"
 25	"log"
 26	"net/http"
 27	"net/url"
 28	"os"
 29	"regexp"
 30	"strings"
 31	"time"
 32
 33	"golang.org/x/net/html"
 34	"humungus.tedunangst.com/r/webs/cache"
 35	"humungus.tedunangst.com/r/webs/htfilter"
 36	"humungus.tedunangst.com/r/webs/httpsig"
 37	"humungus.tedunangst.com/r/webs/templates"
 38)
 39
 40var allowedclasses = make(map[string]bool)
 41
 42func init() {
 43	allowedclasses["kw"] = true
 44	allowedclasses["bi"] = true
 45	allowedclasses["st"] = true
 46	allowedclasses["nm"] = true
 47	allowedclasses["tp"] = true
 48	allowedclasses["op"] = true
 49	allowedclasses["cm"] = true
 50	allowedclasses["al"] = true
 51	allowedclasses["dl"] = true
 52}
 53
 54func reverbolate(userid int64, honks []*Honk) {
 55	for _, h := range honks {
 56		h.What += "ed"
 57		if h.What == "tonked" {
 58			h.What = "honked back"
 59			h.Style += " subtle"
 60		}
 61		if !h.Public {
 62			h.Style += " limited"
 63		}
 64		translate(h, false)
 65		if h.Whofore == 2 || h.Whofore == 3 {
 66			h.URL = h.XID
 67			if h.What != "bonked" {
 68				h.Noise = re_memes.ReplaceAllString(h.Noise, "")
 69				h.Noise = mentionize(h.Noise)
 70				h.Noise = ontologize(h.Noise)
 71			}
 72			h.Username, h.Handle = handles(h.Honker)
 73		} else {
 74			_, h.Handle = handles(h.Honker)
 75			short := shortname(userid, h.Honker)
 76			if short != "" {
 77				h.Username = short
 78			} else {
 79				h.Username = h.Handle
 80				if len(h.Username) > 20 {
 81					h.Username = h.Username[:20] + ".."
 82				}
 83			}
 84			if h.URL == "" {
 85				h.URL = h.XID
 86			}
 87		}
 88		if h.Oonker != "" {
 89			_, h.Oondle = handles(h.Oonker)
 90		}
 91		h.Precis = demoji(h.Precis)
 92		h.Noise = demoji(h.Noise)
 93		h.Open = "open"
 94
 95		zap := make(map[string]bool)
 96		{
 97			var htf htfilter.Filter
 98			htf.Imager = replaceimgsand(zap, false)
 99			htf.SpanClasses = allowedclasses
100			htf.BaseURL, _ = url.Parse(h.XID)
101			p, _ := htf.String(h.Precis)
102			n, _ := htf.String(h.Noise)
103			h.Precis = string(p)
104			h.Noise = string(n)
105		}
106
107		if userid == -1 {
108			if h.Precis != "" {
109				h.Open = ""
110			}
111		} else {
112			unsee(userid, h)
113			if h.Open == "open" && h.Precis == "unspecified horror" {
114				h.Precis = ""
115			}
116		}
117		if len(h.Noise) > 6000 && h.Open == "open" {
118			if h.Precis == "" {
119				h.Precis = "really freaking long"
120			}
121			h.Open = ""
122		}
123
124		emuxifier := func(e string) string {
125			for _, d := range h.Donks {
126				if d.Name == e {
127					zap[d.XID] = true
128					if d.Local {
129						return fmt.Sprintf(`<img class="emu" title="%s" src="/d/%s">`, d.Name, d.XID)
130					}
131				}
132			}
133			return e
134		}
135		h.Precis = re_emus.ReplaceAllStringFunc(h.Precis, emuxifier)
136		h.Noise = re_emus.ReplaceAllStringFunc(h.Noise, emuxifier)
137
138		j := 0
139		for i := 0; i < len(h.Donks); i++ {
140			if !zap[h.Donks[i].XID] {
141				h.Donks[j] = h.Donks[i]
142				j++
143			}
144		}
145		h.Donks = h.Donks[:j]
146
147		h.HTPrecis = template.HTML(h.Precis)
148		h.HTML = template.HTML(h.Noise)
149	}
150}
151
152func replaceimgsand(zap map[string]bool, absolute bool) func(node *html.Node) string {
153	return func(node *html.Node) string {
154		src := htfilter.GetAttr(node, "src")
155		alt := htfilter.GetAttr(node, "alt")
156		//title := GetAttr(node, "title")
157		if htfilter.HasClass(node, "Emoji") && alt != "" {
158			return alt
159		}
160		d := finddonk(src)
161		if d != nil {
162			zap[d.XID] = true
163			base := ""
164			if absolute {
165				base = "https://" + serverName
166			}
167			return string(templates.Sprintf(`<img alt="%s" title="%s" src="%s/d/%s">`, alt, alt, base, d.XID))
168		}
169		return string(templates.Sprintf(`&lt;img alt="%s" src="<a href="%s">%s<a>"&gt;`, alt, src, src))
170	}
171}
172
173func inlineimgsfor(honk *Honk) func(node *html.Node) string {
174	return func(node *html.Node) string {
175		src := htfilter.GetAttr(node, "src")
176		alt := htfilter.GetAttr(node, "alt")
177		d := savedonk(src, "image", alt, "image", true)
178		if d != nil {
179			honk.Donks = append(honk.Donks, d)
180		}
181		log.Printf("inline img with src: %s", src)
182		return ""
183	}
184}
185
186func imaginate(honk *Honk) {
187	var htf htfilter.Filter
188	htf.Imager = inlineimgsfor(honk)
189	htf.BaseURL, _ = url.Parse(honk.XID)
190	htf.String(honk.Noise)
191}
192
193func translate(honk *Honk, redoimages bool) {
194	if honk.Format == "html" {
195		return
196	}
197	noise := honk.Noise
198	if strings.HasPrefix(noise, "DZ:") {
199		idx := strings.Index(noise, "\n")
200		if idx == -1 {
201			honk.Precis = noise
202			noise = ""
203		} else {
204			honk.Precis = noise[:idx]
205			noise = noise[idx+1:]
206		}
207	}
208	honk.Precis = markitzero(strings.TrimSpace(honk.Precis))
209
210	noise = strings.TrimSpace(noise)
211	noise = markitzero(noise)
212	honk.Noise = noise
213	honk.Onts = oneofakind(ontologies(honk.Noise))
214
215	if redoimages {
216		zap := make(map[string]bool)
217		{
218			var htf htfilter.Filter
219			htf.Imager = replaceimgsand(zap, true)
220			htf.SpanClasses = allowedclasses
221			p, _ := htf.String(honk.Precis)
222			n, _ := htf.String(honk.Noise)
223			honk.Precis = string(p)
224			honk.Noise = string(n)
225		}
226		j := 0
227		for i := 0; i < len(honk.Donks); i++ {
228			if !zap[honk.Donks[i].XID] {
229				honk.Donks[j] = honk.Donks[i]
230				j++
231			}
232		}
233		honk.Donks = honk.Donks[:j]
234
235		honk.Noise = re_memes.ReplaceAllString(honk.Noise, "")
236		honk.Noise = ontologize(mentionize(honk.Noise))
237		honk.Noise = strings.Replace(honk.Noise, "<a href=", "<a class=\"mention u-url\" href=", -1)
238	}
239}
240
241func xcelerate(b []byte) string {
242	letters := "BCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz1234567891234567891234"
243	for i, c := range b {
244		b[i] = letters[c&63]
245	}
246	s := string(b)
247	return s
248}
249
250func shortxid(xid string) string {
251	h := sha512.New512_256()
252	io.WriteString(h, xid)
253	return xcelerate(h.Sum(nil)[:20])
254}
255
256func xfiltrate() string {
257	var b [18]byte
258	rand.Read(b[:])
259	return xcelerate(b[:])
260}
261
262var re_hashes = regexp.MustCompile(`(?:^| |>)#[[:alnum:]]*[[:alpha:]][[:alnum:]_-]*`)
263
264func ontologies(s string) []string {
265	m := re_hashes.FindAllString(s, -1)
266	j := 0
267	for _, h := range m {
268		if h[0] == '&' {
269			continue
270		}
271		if h[0] != '#' {
272			h = h[1:]
273		}
274		m[j] = h
275		j++
276	}
277	return m[:j]
278}
279
280var re_mentions = regexp.MustCompile(`@[[:alnum:]._-]+@[[:alnum:].-]*[[:alnum:]]`)
281var re_urltions = regexp.MustCompile(`@https://\S+`)
282
283func grapevine(s string) []string {
284	var mentions []string
285	m := re_mentions.FindAllString(s, -1)
286	for i := range m {
287		where := gofish(m[i])
288		if where != "" {
289			mentions = append(mentions, where)
290		}
291	}
292	m = re_urltions.FindAllString(s, -1)
293	for i := range m {
294		mentions = append(mentions, m[i][1:])
295	}
296	return mentions
297}
298
299func bunchofgrapes(s string) []Mention {
300	m := re_mentions.FindAllString(s, -1)
301	var mentions []Mention
302	for i := range m {
303		where := gofish(m[i])
304		if where != "" {
305			mentions = append(mentions, Mention{Who: m[i], Where: where})
306		}
307	}
308	m = re_urltions.FindAllString(s, -1)
309	for i := range m {
310		mentions = append(mentions, Mention{Who: m[i][1:], Where: m[i][1:]})
311	}
312	return mentions
313}
314
315type Emu struct {
316	ID   string
317	Name string
318}
319
320var re_emus = regexp.MustCompile(`:[[:alnum:]_-]+:`)
321
322func herdofemus(noise string) []Emu {
323	m := re_emus.FindAllString(noise, -1)
324	m = oneofakind(m)
325	var emus []Emu
326	for _, e := range m {
327		fname := e[1 : len(e)-1]
328		_, err := os.Stat("emus/" + fname + ".png")
329		if err != nil {
330			continue
331		}
332		url := fmt.Sprintf("https://%s/emu/%s.png", serverName, fname)
333		emus = append(emus, Emu{ID: url, Name: e})
334	}
335	return emus
336}
337
338var re_memes = regexp.MustCompile("meme: ?([^\n]+)")
339var re_avatar = regexp.MustCompile("avatar: ?([^\n]+)")
340
341func memetize(honk *Honk) {
342	repl := func(x string) string {
343		name := x[5:]
344		if name[0] == ' ' {
345			name = name[1:]
346		}
347		fd, err := os.Open("memes/" + name)
348		if err != nil {
349			log.Printf("no meme for %s", name)
350			return x
351		}
352		var peek [512]byte
353		n, _ := fd.Read(peek[:])
354		ct := http.DetectContentType(peek[:n])
355		fd.Close()
356
357		url := fmt.Sprintf("https://%s/meme/%s", serverName, name)
358		fileid, err := savefile("", name, name, url, ct, false, nil)
359		if err != nil {
360			log.Printf("error saving meme: %s", err)
361			return x
362		}
363		d := &Donk{
364			FileID: fileid,
365			XID:    "",
366			Name:   name,
367			Media:  ct,
368			URL:    url,
369			Local:  false,
370		}
371		honk.Donks = append(honk.Donks, d)
372		return ""
373	}
374	honk.Noise = re_memes.ReplaceAllStringFunc(honk.Noise, repl)
375}
376
377var re_quickmention = regexp.MustCompile("(^|[ \n])@[[:alnum:]]+([ \n]|$)")
378
379func quickrename(s string, userid int64) string {
380	nonstop := true
381	for nonstop {
382		nonstop = false
383		s = re_quickmention.ReplaceAllStringFunc(s, func(m string) string {
384			prefix := ""
385			if m[0] == ' ' || m[0] == '\n' {
386				prefix = m[:1]
387				m = m[1:]
388			}
389			prefix += "@"
390			m = m[1:]
391			tail := ""
392			if m[len(m)-1] == ' ' || m[len(m)-1] == '\n' {
393				tail = m[len(m)-1:]
394				m = m[:len(m)-1]
395			}
396
397			xid := fullname(m, userid)
398
399			if xid != "" {
400				_, name := handles(xid)
401				if name != "" {
402					nonstop = true
403					m = name
404				}
405			}
406			return prefix + m + tail
407		})
408	}
409	return s
410}
411
412var shortnames = cache.New(cache.Options{Filler: func(userid int64) (map[string]string, bool) {
413	honkers := gethonkers(userid)
414	m := make(map[string]string)
415	for _, h := range honkers {
416		m[h.XID] = h.Name
417	}
418	return m, true
419}, Invalidator: &honkerinvalidator})
420
421func shortname(userid int64, xid string) string {
422	var m map[string]string
423	ok := shortnames.Get(userid, &m)
424	if ok {
425		return m[xid]
426	}
427	return ""
428}
429
430var fullnames = cache.New(cache.Options{Filler: func(userid int64) (map[string]string, bool) {
431	honkers := gethonkers(userid)
432	m := make(map[string]string)
433	for _, h := range honkers {
434		m[h.Name] = h.XID
435	}
436	return m, true
437}, Invalidator: &honkerinvalidator})
438
439func fullname(name string, userid int64) string {
440	var m map[string]string
441	ok := fullnames.Get(userid, &m)
442	if ok {
443		return m[name]
444	}
445	return ""
446}
447
448func mentionize(s string) string {
449	s = re_mentions.ReplaceAllStringFunc(s, func(m string) string {
450		where := gofish(m)
451		if where == "" {
452			return m
453		}
454		who := m[0 : 1+strings.IndexByte(m[1:], '@')]
455		return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
456			html.EscapeString(where), html.EscapeString(who))
457	})
458	s = re_urltions.ReplaceAllStringFunc(s, func(m string) string {
459		return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
460			html.EscapeString(m[1:]), html.EscapeString(m))
461	})
462	return s
463}
464
465func ontologize(s string) string {
466	s = re_hashes.ReplaceAllStringFunc(s, func(o string) string {
467		if o[0] == '&' {
468			return o
469		}
470		p := ""
471		h := o
472		if h[0] != '#' {
473			p = h[:1]
474			h = h[1:]
475		}
476		return fmt.Sprintf(`%s<a href="https://%s/o/%s">%s</a>`, p, serverName,
477			strings.ToLower(h[1:]), h)
478	})
479	return s
480}
481
482var re_unurl = regexp.MustCompile("https://([^/]+).*/([^/]+)")
483var re_urlhost = regexp.MustCompile("https://([^/ ]+)")
484
485func originate(u string) string {
486	m := re_urlhost.FindStringSubmatch(u)
487	if len(m) > 1 {
488		return m[1]
489	}
490	return ""
491}
492
493var allhandles = cache.New(cache.Options{Filler: func(xid string) (string, bool) {
494	var handle string
495	row := stmtGetXonker.QueryRow(xid, "handle")
496	err := row.Scan(&handle)
497	if err != nil {
498		log.Printf("need to get a handle: %s", xid)
499		info, err := investigate(xid)
500		if err != nil {
501			m := re_unurl.FindStringSubmatch(xid)
502			if len(m) > 2 {
503				handle = m[2]
504			} else {
505				handle = xid
506			}
507		} else {
508			handle = info.Name
509		}
510	}
511	return handle, true
512}})
513
514// handle, handle@host
515func handles(xid string) (string, string) {
516	if xid == "" {
517		return "", ""
518	}
519	var handle string
520	allhandles.Get(xid, &handle)
521	if handle == xid {
522		return xid, xid
523	}
524	return handle, handle + "@" + originate(xid)
525}
526
527func butnottooloud(aud []string) {
528	for i, a := range aud {
529		if strings.HasSuffix(a, "/followers") {
530			aud[i] = ""
531		}
532	}
533}
534
535func loudandproud(aud []string) bool {
536	for _, a := range aud {
537		if a == thewholeworld {
538			return true
539		}
540	}
541	return false
542}
543
544func firstclass(honk *Honk) bool {
545	return honk.Audience[0] == thewholeworld
546}
547
548func oneofakind(a []string) []string {
549	seen := make(map[string]bool)
550	seen[""] = true
551	j := 0
552	for _, s := range a {
553		if !seen[s] {
554			seen[s] = true
555			a[j] = s
556			j++
557		}
558	}
559	return a[:j]
560}
561
562var ziggies = cache.New(cache.Options{Filler: func(userid int64) (*KeyInfo, bool) {
563	var user *WhatAbout
564	ok := somenumberedusers.Get(userid, &user)
565	if !ok {
566		return nil, false
567	}
568	ki := new(KeyInfo)
569	ki.keyname = user.URL + "#key"
570	ki.seckey = user.SecKey
571	return ki, true
572}})
573
574func ziggy(userid int64) *KeyInfo {
575	var ki *KeyInfo
576	ziggies.Get(userid, &ki)
577	return ki
578}
579
580var zaggies = cache.New(cache.Options{Filler: func(keyname string) (*rsa.PublicKey, bool) {
581	var data string
582	row := stmtGetXonker.QueryRow(keyname, "pubkey")
583	err := row.Scan(&data)
584	if err != nil {
585		log.Printf("hitting the webs for missing pubkey: %s", keyname)
586		j, err := GetJunk(keyname)
587		if err != nil {
588			log.Printf("error getting %s pubkey: %s", keyname, err)
589			return nil, true
590		}
591		allinjest(originate(keyname), j)
592		row = stmtGetXonker.QueryRow(keyname, "pubkey")
593		err = row.Scan(&data)
594		if err != nil {
595			log.Printf("key not found after ingesting")
596			return nil, true
597		}
598	}
599	_, key, err := httpsig.DecodeKey(data)
600	if err != nil {
601		log.Printf("error decoding %s pubkey: %s", keyname, err)
602		return nil, true
603	}
604	return key, true
605}, Limit: 512})
606
607func zaggy(keyname string) *rsa.PublicKey {
608	var key *rsa.PublicKey
609	zaggies.Get(keyname, &key)
610	return key
611}
612
613func savingthrow(keyname string) {
614	when := time.Now().UTC().Add(-30 * time.Minute).Format(dbtimeformat)
615	stmtDeleteXonker.Exec(keyname, "pubkey", when)
616	zaggies.Clear(keyname)
617}
618
619func keymatch(keyname string, actor string) string {
620	hash := strings.IndexByte(keyname, '#')
621	if hash == -1 {
622		hash = len(keyname)
623	}
624	owner := keyname[0:hash]
625	if owner == actor {
626		return originate(actor)
627	}
628	return ""
629}