fun.go (view raw)
1//
2// Copyright (c) 2019 Ted Unangst <tedu@tedunangst.com>
3//
4// Permission to use, copy, modify, and distribute this software for any
5// purpose with or without fee is hereby granted, provided that the above
6// copyright notice and this permission notice appear in all copies.
7//
8// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15
16package main
17
18import (
19 "crypto/rand"
20 "crypto/rsa"
21 "crypto/sha512"
22 "fmt"
23 "html/template"
24 "io"
25 "log"
26 "net/http"
27 "net/url"
28 "os"
29 "regexp"
30 "strings"
31 "time"
32
33 "golang.org/x/net/html"
34 "humungus.tedunangst.com/r/webs/cache"
35 "humungus.tedunangst.com/r/webs/htfilter"
36 "humungus.tedunangst.com/r/webs/httpsig"
37 "humungus.tedunangst.com/r/webs/templates"
38)
39
40var allowedclasses = make(map[string]bool)
41
42func init() {
43 allowedclasses["kw"] = true
44 allowedclasses["bi"] = true
45 allowedclasses["st"] = true
46 allowedclasses["nm"] = true
47 allowedclasses["tp"] = true
48 allowedclasses["op"] = true
49 allowedclasses["cm"] = true
50 allowedclasses["al"] = true
51 allowedclasses["dl"] = true
52}
53
54func reverbolate(userid int64, honks []*Honk) {
55 for _, h := range honks {
56 h.What += "ed"
57 if h.What == "tonked" {
58 h.What = "honked back"
59 h.Style += " subtle"
60 }
61 if !h.Public {
62 h.Style += " limited"
63 }
64 translate(h, false)
65 local := false
66 if (h.Whofore == 2 || h.Whofore == 3) || h.What != "bonked" {
67 local = true
68 }
69 if h.Whofore == 2 || h.Whofore == 3 {
70 h.URL = h.XID
71 if h.What != "bonked" {
72 h.Noise = re_memes.ReplaceAllString(h.Noise, "")
73 h.Noise = mentionize(h.Noise)
74 h.Noise = ontologize(h.Noise)
75 }
76 h.Username, h.Handle = handles(h.Honker)
77 } else {
78 _, h.Handle = handles(h.Honker)
79 short := shortname(userid, h.Honker)
80 if short != "" {
81 h.Username = short
82 } else {
83 h.Username = h.Handle
84 if len(h.Username) > 20 {
85 h.Username = h.Username[:20] + ".."
86 }
87 }
88 if h.URL == "" {
89 h.URL = h.XID
90 }
91 }
92 if h.Oonker != "" {
93 _, h.Oondle = handles(h.Oonker)
94 }
95 h.Precis = demoji(h.Precis)
96 h.Noise = demoji(h.Noise)
97 h.Open = "open"
98
99 zap := make(map[string]bool)
100 {
101 var htf htfilter.Filter
102 htf.Imager = replaceimgsand(zap, false)
103 htf.SpanClasses = allowedclasses
104 htf.BaseURL, _ = url.Parse(h.XID)
105 p, _ := htf.String(h.Precis)
106 n, _ := htf.String(h.Noise)
107 h.Precis = string(p)
108 h.Noise = string(n)
109 }
110
111 if userid == -1 {
112 if h.Precis != "" {
113 h.Open = ""
114 }
115 } else {
116 unsee(userid, h)
117 if h.Open == "open" && h.Precis == "unspecified horror" {
118 h.Precis = ""
119 }
120 }
121 if len(h.Noise) > 6000 && h.Open == "open" {
122 if h.Precis == "" {
123 h.Precis = "really freaking long"
124 }
125 h.Open = ""
126 }
127
128 emuxifier := func(e string) string {
129 for _, d := range h.Donks {
130 if d.Name == e {
131 zap[d.XID] = true
132 if d.Local {
133 return fmt.Sprintf(`<img class="emu" title="%s" src="/d/%s">`, d.Name, d.XID)
134 }
135 }
136 }
137 if local {
138 var emu Emu
139 emucache.Get(e, &emu)
140 if emu.ID != "" {
141 return fmt.Sprintf(`<img class="emu" title="%s" src="%s">`, emu.Name, emu.ID)
142 }
143 }
144 return e
145 }
146 h.Precis = re_emus.ReplaceAllStringFunc(h.Precis, emuxifier)
147 h.Noise = re_emus.ReplaceAllStringFunc(h.Noise, emuxifier)
148
149 j := 0
150 for i := 0; i < len(h.Donks); i++ {
151 if !zap[h.Donks[i].XID] {
152 h.Donks[j] = h.Donks[i]
153 j++
154 }
155 }
156 h.Donks = h.Donks[:j]
157
158 h.HTPrecis = template.HTML(h.Precis)
159 h.HTML = template.HTML(h.Noise)
160 }
161}
162
163func replaceimgsand(zap map[string]bool, absolute bool) func(node *html.Node) string {
164 return func(node *html.Node) string {
165 src := htfilter.GetAttr(node, "src")
166 alt := htfilter.GetAttr(node, "alt")
167 //title := GetAttr(node, "title")
168 if htfilter.HasClass(node, "Emoji") && alt != "" {
169 return alt
170 }
171 d := finddonk(src)
172 if d != nil {
173 zap[d.XID] = true
174 base := ""
175 if absolute {
176 base = "https://" + serverName
177 }
178 return string(templates.Sprintf(`<img alt="%s" title="%s" src="%s/d/%s">`, alt, alt, base, d.XID))
179 }
180 return string(templates.Sprintf(`<img alt="%s" src="<a href="%s">%s<a>">`, alt, src, src))
181 }
182}
183
184func inlineimgsfor(honk *Honk) func(node *html.Node) string {
185 return func(node *html.Node) string {
186 src := htfilter.GetAttr(node, "src")
187 alt := htfilter.GetAttr(node, "alt")
188 d := savedonk(src, "image", alt, "image", true)
189 if d != nil {
190 honk.Donks = append(honk.Donks, d)
191 }
192 log.Printf("inline img with src: %s", src)
193 return ""
194 }
195}
196
197func imaginate(honk *Honk) {
198 var htf htfilter.Filter
199 htf.Imager = inlineimgsfor(honk)
200 htf.BaseURL, _ = url.Parse(honk.XID)
201 htf.String(honk.Noise)
202}
203
204func translate(honk *Honk, redoimages bool) {
205 if honk.Format == "html" {
206 return
207 }
208 noise := honk.Noise
209 if strings.HasPrefix(noise, "DZ:") {
210 idx := strings.Index(noise, "\n")
211 if idx == -1 {
212 honk.Precis = noise
213 noise = ""
214 } else {
215 honk.Precis = noise[:idx]
216 noise = noise[idx+1:]
217 }
218 }
219 honk.Precis = markitzero(strings.TrimSpace(honk.Precis))
220
221 noise = strings.TrimSpace(noise)
222 noise = markitzero(noise)
223 honk.Noise = noise
224 honk.Onts = oneofakind(ontologies(honk.Noise))
225
226 if redoimages {
227 zap := make(map[string]bool)
228 {
229 var htf htfilter.Filter
230 htf.Imager = replaceimgsand(zap, true)
231 htf.SpanClasses = allowedclasses
232 p, _ := htf.String(honk.Precis)
233 n, _ := htf.String(honk.Noise)
234 honk.Precis = string(p)
235 honk.Noise = string(n)
236 }
237 j := 0
238 for i := 0; i < len(honk.Donks); i++ {
239 if !zap[honk.Donks[i].XID] {
240 honk.Donks[j] = honk.Donks[i]
241 j++
242 }
243 }
244 honk.Donks = honk.Donks[:j]
245
246 honk.Noise = re_memes.ReplaceAllString(honk.Noise, "")
247 honk.Noise = ontologize(mentionize(honk.Noise))
248 honk.Noise = strings.Replace(honk.Noise, "<a href=", "<a class=\"mention u-url\" href=", -1)
249 }
250}
251
252func xcelerate(b []byte) string {
253 letters := "BCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz1234567891234567891234"
254 for i, c := range b {
255 b[i] = letters[c&63]
256 }
257 s := string(b)
258 return s
259}
260
261func shortxid(xid string) string {
262 h := sha512.New512_256()
263 io.WriteString(h, xid)
264 return xcelerate(h.Sum(nil)[:20])
265}
266
267func xfiltrate() string {
268 var b [18]byte
269 rand.Read(b[:])
270 return xcelerate(b[:])
271}
272
273var re_hashes = regexp.MustCompile(`(?:^| |>)#[[:alnum:]]*[[:alpha:]][[:alnum:]_-]*`)
274
275func ontologies(s string) []string {
276 m := re_hashes.FindAllString(s, -1)
277 j := 0
278 for _, h := range m {
279 if h[0] == '&' {
280 continue
281 }
282 if h[0] != '#' {
283 h = h[1:]
284 }
285 m[j] = h
286 j++
287 }
288 return m[:j]
289}
290
291var re_mentions = regexp.MustCompile(`@[[:alnum:]._-]+@[[:alnum:].-]*[[:alnum:]]`)
292var re_urltions = regexp.MustCompile(`@https://\S+`)
293
294func grapevine(s string) []string {
295 var mentions []string
296 m := re_mentions.FindAllString(s, -1)
297 for i := range m {
298 where := gofish(m[i])
299 if where != "" {
300 mentions = append(mentions, where)
301 }
302 }
303 m = re_urltions.FindAllString(s, -1)
304 for i := range m {
305 mentions = append(mentions, m[i][1:])
306 }
307 return mentions
308}
309
310func bunchofgrapes(s string) []Mention {
311 m := re_mentions.FindAllString(s, -1)
312 var mentions []Mention
313 for i := range m {
314 where := gofish(m[i])
315 if where != "" {
316 mentions = append(mentions, Mention{Who: m[i], Where: where})
317 }
318 }
319 m = re_urltions.FindAllString(s, -1)
320 for i := range m {
321 mentions = append(mentions, Mention{Who: m[i][1:], Where: m[i][1:]})
322 }
323 return mentions
324}
325
326type Emu struct {
327 ID string
328 Name string
329}
330
331var re_emus = regexp.MustCompile(`:[[:alnum:]_-]+:`)
332
333var emucache = cache.New(cache.Options{Filler: func(ename string) (Emu, bool) {
334 fname := ename[1 : len(ename)-1]
335 _, err := os.Stat(dataDir + "/emus/" + fname + ".png")
336 if err != nil {
337 return Emu{Name: ename, ID: ""}, true
338 }
339 url := fmt.Sprintf("https://%s/emu/%s.png", serverName, fname)
340 return Emu{ID: url, Name: ename}, true
341}, Duration: 10 * time.Second})
342
343func herdofemus(noise string) []Emu {
344 m := re_emus.FindAllString(noise, -1)
345 m = oneofakind(m)
346 var emus []Emu
347 for _, e := range m {
348 var emu Emu
349 emucache.Get(e, &emu)
350 if emu.ID == "" {
351 continue
352 }
353 emus = append(emus, emu)
354 }
355 return emus
356}
357
358var re_memes = regexp.MustCompile("meme: ?([^\n]+)")
359var re_avatar = regexp.MustCompile("avatar: ?([^\n]+)")
360
361func memetize(honk *Honk) {
362 repl := func(x string) string {
363 name := x[5:]
364 if name[0] == ' ' {
365 name = name[1:]
366 }
367 fd, err := os.Open("memes/" + name)
368 if err != nil {
369 log.Printf("no meme for %s", name)
370 return x
371 }
372 var peek [512]byte
373 n, _ := fd.Read(peek[:])
374 ct := http.DetectContentType(peek[:n])
375 fd.Close()
376
377 url := fmt.Sprintf("https://%s/meme/%s", serverName, name)
378 fileid, err := savefile("", name, name, url, ct, false, nil)
379 if err != nil {
380 log.Printf("error saving meme: %s", err)
381 return x
382 }
383 d := &Donk{
384 FileID: fileid,
385 XID: "",
386 Name: name,
387 Media: ct,
388 URL: url,
389 Local: false,
390 }
391 honk.Donks = append(honk.Donks, d)
392 return ""
393 }
394 honk.Noise = re_memes.ReplaceAllStringFunc(honk.Noise, repl)
395}
396
397var re_quickmention = regexp.MustCompile("(^|[ \n])@[[:alnum:]]+([ \n.]|$)")
398
399func quickrename(s string, userid int64) string {
400 nonstop := true
401 for nonstop {
402 nonstop = false
403 s = re_quickmention.ReplaceAllStringFunc(s, func(m string) string {
404 prefix := ""
405 if m[0] == ' ' || m[0] == '\n' {
406 prefix = m[:1]
407 m = m[1:]
408 }
409 prefix += "@"
410 m = m[1:]
411 tail := ""
412 if last := m[len(m)-1]; last == ' ' || last == '\n' || last == '.' {
413 tail = m[len(m)-1:]
414 m = m[:len(m)-1]
415 }
416
417 xid := fullname(m, userid)
418
419 if xid != "" {
420 _, name := handles(xid)
421 if name != "" {
422 nonstop = true
423 m = name
424 }
425 }
426 return prefix + m + tail
427 })
428 }
429 return s
430}
431
432var shortnames = cache.New(cache.Options{Filler: func(userid int64) (map[string]string, bool) {
433 honkers := gethonkers(userid)
434 m := make(map[string]string)
435 for _, h := range honkers {
436 m[h.XID] = h.Name
437 }
438 return m, true
439}, Invalidator: &honkerinvalidator})
440
441func shortname(userid int64, xid string) string {
442 var m map[string]string
443 ok := shortnames.Get(userid, &m)
444 if ok {
445 return m[xid]
446 }
447 return ""
448}
449
450var fullnames = cache.New(cache.Options{Filler: func(userid int64) (map[string]string, bool) {
451 honkers := gethonkers(userid)
452 m := make(map[string]string)
453 for _, h := range honkers {
454 m[h.Name] = h.XID
455 }
456 return m, true
457}, Invalidator: &honkerinvalidator})
458
459func fullname(name string, userid int64) string {
460 var m map[string]string
461 ok := fullnames.Get(userid, &m)
462 if ok {
463 return m[name]
464 }
465 return ""
466}
467
468func mentionize(s string) string {
469 s = re_mentions.ReplaceAllStringFunc(s, func(m string) string {
470 where := gofish(m)
471 if where == "" {
472 return m
473 }
474 who := m[0 : 1+strings.IndexByte(m[1:], '@')]
475 return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
476 html.EscapeString(where), html.EscapeString(who))
477 })
478 s = re_urltions.ReplaceAllStringFunc(s, func(m string) string {
479 return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
480 html.EscapeString(m[1:]), html.EscapeString(m))
481 })
482 return s
483}
484
485func ontologize(s string) string {
486 s = re_hashes.ReplaceAllStringFunc(s, func(o string) string {
487 if o[0] == '&' {
488 return o
489 }
490 p := ""
491 h := o
492 if h[0] != '#' {
493 p = h[:1]
494 h = h[1:]
495 }
496 return fmt.Sprintf(`%s<a href="https://%s/o/%s">%s</a>`, p, serverName,
497 strings.ToLower(h[1:]), h)
498 })
499 return s
500}
501
502var re_unurl = regexp.MustCompile("https://([^/]+).*/([^/]+)")
503var re_urlhost = regexp.MustCompile("https://([^/ ]+)")
504
505func originate(u string) string {
506 m := re_urlhost.FindStringSubmatch(u)
507 if len(m) > 1 {
508 return m[1]
509 }
510 return ""
511}
512
513var allhandles = cache.New(cache.Options{Filler: func(xid string) (string, bool) {
514 var handle string
515 row := stmtGetXonker.QueryRow(xid, "handle")
516 err := row.Scan(&handle)
517 if err != nil {
518 log.Printf("need to get a handle: %s", xid)
519 info, err := investigate(xid)
520 if err != nil {
521 m := re_unurl.FindStringSubmatch(xid)
522 if len(m) > 2 {
523 handle = m[2]
524 } else {
525 handle = xid
526 }
527 } else {
528 handle = info.Name
529 }
530 }
531 return handle, true
532}})
533
534// handle, handle@host
535func handles(xid string) (string, string) {
536 if xid == "" {
537 return "", ""
538 }
539 var handle string
540 allhandles.Get(xid, &handle)
541 if handle == xid {
542 return xid, xid
543 }
544 return handle, handle + "@" + originate(xid)
545}
546
547func butnottooloud(aud []string) {
548 for i, a := range aud {
549 if strings.HasSuffix(a, "/followers") {
550 aud[i] = ""
551 }
552 }
553}
554
555func loudandproud(aud []string) bool {
556 for _, a := range aud {
557 if a == thewholeworld {
558 return true
559 }
560 }
561 return false
562}
563
564func firstclass(honk *Honk) bool {
565 return honk.Audience[0] == thewholeworld
566}
567
568func oneofakind(a []string) []string {
569 seen := make(map[string]bool)
570 seen[""] = true
571 j := 0
572 for _, s := range a {
573 if !seen[s] {
574 seen[s] = true
575 a[j] = s
576 j++
577 }
578 }
579 return a[:j]
580}
581
582var ziggies = cache.New(cache.Options{Filler: func(userid int64) (*KeyInfo, bool) {
583 var user *WhatAbout
584 ok := somenumberedusers.Get(userid, &user)
585 if !ok {
586 return nil, false
587 }
588 ki := new(KeyInfo)
589 ki.keyname = user.URL + "#key"
590 ki.seckey = user.SecKey
591 return ki, true
592}})
593
594func ziggy(userid int64) *KeyInfo {
595 var ki *KeyInfo
596 ziggies.Get(userid, &ki)
597 return ki
598}
599
600var zaggies = cache.New(cache.Options{Filler: func(keyname string) (*rsa.PublicKey, bool) {
601 var data string
602 row := stmtGetXonker.QueryRow(keyname, "pubkey")
603 err := row.Scan(&data)
604 if err != nil {
605 log.Printf("hitting the webs for missing pubkey: %s", keyname)
606 j, err := GetJunk(keyname)
607 if err != nil {
608 log.Printf("error getting %s pubkey: %s", keyname, err)
609 return nil, true
610 }
611 allinjest(originate(keyname), j)
612 row = stmtGetXonker.QueryRow(keyname, "pubkey")
613 err = row.Scan(&data)
614 if err != nil {
615 log.Printf("key not found after ingesting")
616 return nil, true
617 }
618 }
619 _, key, err := httpsig.DecodeKey(data)
620 if err != nil {
621 log.Printf("error decoding %s pubkey: %s", keyname, err)
622 return nil, true
623 }
624 return key, true
625}, Limit: 512})
626
627func zaggy(keyname string) *rsa.PublicKey {
628 var key *rsa.PublicKey
629 zaggies.Get(keyname, &key)
630 return key
631}
632
633func savingthrow(keyname string) {
634 when := time.Now().UTC().Add(-30 * time.Minute).Format(dbtimeformat)
635 stmtDeleteXonker.Exec(keyname, "pubkey", when)
636 zaggies.Clear(keyname)
637}
638
639func keymatch(keyname string, actor string) string {
640 hash := strings.IndexByte(keyname, '#')
641 if hash == -1 {
642 hash = len(keyname)
643 }
644 owner := keyname[0:hash]
645 if owner == actor {
646 return originate(actor)
647 }
648 return ""
649}