fun.go (view raw)
1//
2// Copyright (c) 2019 Ted Unangst <tedu@tedunangst.com>
3//
4// Permission to use, copy, modify, and distribute this software for any
5// purpose with or without fee is hereby granted, provided that the above
6// copyright notice and this permission notice appear in all copies.
7//
8// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15
16package main
17
18import (
19 "crypto/rand"
20 "crypto/sha512"
21 "fmt"
22 "html/template"
23 "io"
24 "net/http"
25 "net/url"
26 "os"
27 "path"
28 "regexp"
29 "strconv"
30 "strings"
31 "time"
32
33 "github.com/dustin/go-humanize"
34 "golang.org/x/net/html"
35 "humungus.tedunangst.com/r/webs/gencache"
36 "humungus.tedunangst.com/r/webs/htfilter"
37 "humungus.tedunangst.com/r/webs/httpsig"
38 "humungus.tedunangst.com/r/webs/mz"
39 "humungus.tedunangst.com/r/webs/templates"
40)
41
42var allowedclasses = make(map[string]bool)
43
44func init() {
45 allowedclasses["kw"] = true
46 allowedclasses["bi"] = true
47 allowedclasses["st"] = true
48 allowedclasses["nm"] = true
49 allowedclasses["tp"] = true
50 allowedclasses["op"] = true
51 allowedclasses["cm"] = true
52 allowedclasses["al"] = true
53 allowedclasses["dl"] = true
54}
55
56var relingo = make(map[string]string)
57
58func loadLingo() {
59 for _, l := range []string{"honked", "bonked", "honked back", "qonked", "evented"} {
60 v := l
61 k := "lingo-" + strings.ReplaceAll(l, " ", "")
62 getconfig(k, &v)
63 relingo[l] = v
64 }
65}
66
67func prettifydate(d time.Time) string {
68 var customMags = []humanize.RelTimeMagnitude{
69 {time.Second, "now", time.Second},
70 {2 * time.Second, "1s %s", 1},
71 {time.Minute, "%ds %s", time.Second},
72 {2 * time.Minute, "1m %s", 1},
73 {time.Hour, "%dm %s", time.Minute},
74 {2 * time.Hour, "1h %s", 1},
75 {humanize.Day, "%dh %s", time.Hour},
76 {2 * humanize.Day, "1d %s", 1},
77 {humanize.Week, "%dd %s", humanize.Day},
78 {2 * humanize.Week, "1w %s", 1},
79 {humanize.Month, "%dw %s", humanize.Week},
80 }
81
82 since := time.Since(d)
83 // More than a month, return the actual date.
84 if since.Hours() > 730 {
85 return d.Format("02 Jan 2006 15:04")
86 }
87
88 return humanize.CustomRelTime(d, time.Now(), "", "from now", customMags)
89}
90func reverbolate(userid UserID, honks []*Honk) {
91 user, _ := somenumberedusers.Get(userid)
92 for _, h := range honks {
93 // idk where else to put this
94 h.DatePretty = prettifydate(h.Date)
95 h.What += "ed"
96 if h.What == "honked" && h.RID != "" {
97 h.What = "honked back"
98 h.Style += " subtle"
99 }
100 if !h.Public {
101 h.Style += " limited"
102 }
103 if h.Whofore == 1 {
104 h.Style += " atme"
105 }
106 translate(h)
107 local := false
108 if h.Whofore == 2 || h.Whofore == 3 {
109 local = true
110 }
111 if local && h.What != "bonked" {
112 h.Noise = re_memes.ReplaceAllString(h.Noise, "")
113 }
114 h.Username, h.Handle = handles(h.Honker)
115 if !local {
116 short := shortname(userid, h.Honker)
117 if short != "" {
118 h.Username = short
119 } else {
120 h.Username = h.Handle
121 if len(h.Username) > 20 {
122 h.Username = h.Username[:20] + ".."
123 }
124 }
125 }
126 if user != nil {
127 hset := []string{}
128 if h.Honker != user.URL {
129 hset = append(hset, "@"+h.Handle)
130 }
131 if user.Options.MentionAll {
132 for _, a := range h.Audience {
133 if a == h.Honker || a == user.URL {
134 continue
135 }
136 _, hand := handles(a)
137 if hand != "" {
138 hand = "@" + hand
139 hset = append(hset, hand)
140 }
141 }
142 }
143 h.Handles = strings.Join(hset, " ")
144 }
145 if h.URL == "" {
146 h.URL = h.XID
147 }
148 if h.Oonker != "" {
149 _, h.Oondle = handles(h.Oonker)
150 }
151 h.Precis = demoji(h.Precis)
152 h.Noise = demoji(h.Noise)
153 h.Open = "open"
154 var misto string
155 for _, m := range h.Mentions {
156 if m.Where != h.Honker && !m.IsPresent(h.Noise) {
157 misto += " " + m.Who
158 }
159 }
160 var mistag string
161 for _, o := range h.Onts {
162 if !OntIsPresent(o, h.Noise) {
163 mistag += " " + o
164 }
165 }
166 if len(misto) > 0 || len(mistag) > 0 {
167 if len(misto) > 0 {
168 misto = "(" + misto[1:] + ")<p>"
169 }
170 if len(mistag) > 0 {
171 mistag = "<p>(" + mistag[1:] + ")"
172 }
173 h.Noise = misto + h.Noise + mistag
174 }
175
176 zap := make(map[string]bool)
177 {
178 var htf htfilter.Filter
179 htf.Imager = replaceimgsand(zap, false, h)
180 htf.SpanClasses = allowedclasses
181 htf.BaseURL, _ = url.Parse(h.XID)
182 emuxifier := func(e string) string {
183 for _, d := range h.Donks {
184 if d.Name == e {
185 zap[d.XID] = true
186 if d.Local {
187 return fmt.Sprintf(`<img class="emu" title="%s" src="/d/%s">`, d.Name, d.XID)
188 }
189 }
190 }
191 if local && h.What != "bonked" {
192 emu, _ := emucache.Get(e)
193 if emu != nil {
194 return fmt.Sprintf(`<img class="emu" title="%s" src="%s">`, emu.Name, emu.ID)
195 }
196 }
197 return e
198 }
199 htf.FilterText = func(w io.Writer, data string) {
200 data = htfilter.EscapeText(data)
201 data = re_emus.ReplaceAllStringFunc(data, emuxifier)
202 io.WriteString(w, data)
203 }
204 if user != nil {
205 htf.RetargetLink = func(href string) string {
206 h2 := strings.ReplaceAll(href, "/@", "/users/")
207 for _, m := range h.Mentions {
208 if h2 == m.Where || href == m.Where {
209 return "/h?xid=" + url.QueryEscape(m.Where)
210 }
211 }
212 return href
213 }
214 }
215 p, _ := htf.String(h.Precis)
216 n, _ := htf.String(h.Noise)
217 h.Precis = string(p)
218 h.Noise = string(n)
219 }
220 j := 0
221 for i := 0; i < len(h.Donks); i++ {
222 if !zap[h.Donks[i].XID] {
223 h.Donks[j] = h.Donks[i]
224 j++
225 }
226 }
227 h.Donks = h.Donks[:j]
228 }
229
230 unsee(honks, userid)
231
232 for _, h := range honks {
233 renderflags(h)
234
235 h.HTPrecis = template.HTML(h.Precis)
236 h.HTML = template.HTML(h.Noise)
237 if redo := relingo[h.What]; redo != "" {
238 h.What = redo
239 }
240 }
241}
242
243func replaceimgsand(zap map[string]bool, absolute bool, honk *Honk) func(node *html.Node) string {
244 return func(node *html.Node) string {
245 src := htfilter.GetAttr(node, "src")
246 alt := htfilter.GetAttr(node, "alt")
247 //title := GetAttr(node, "title")
248 if htfilter.HasClass(node, "Emoji") && alt != "" {
249 return alt
250 }
251 base := path.Base(src)
252 didx, _ := strconv.Atoi(base)
253 var d *Donk
254 if strings.HasPrefix(src, serverPrefix) && didx > 0 && didx <= len(honk.Donks) {
255 d = honk.Donks[didx-1]
256 } else {
257 d = finddonk(src)
258 }
259 if d != nil {
260 zap[d.XID] = true
261 base := ""
262 if absolute {
263 base = serverURL("")
264 }
265 return string(templates.Sprintf(`<img alt="%s" title="%s" src="%s/d/%s">`, alt, alt, base, d.XID))
266 }
267 return string(templates.Sprintf(`<img alt="%s" src="<a href="%s">%s</a>">`, alt, src, src))
268 }
269}
270
271func translatechonk(ch *Chonk) {
272 noise := ch.Noise
273 if ch.Format == "markdown" {
274 var marker mz.Marker
275 noise = marker.Mark(noise)
276 }
277 var htf htfilter.Filter
278 htf.SpanClasses = allowedclasses
279 htf.BaseURL, _ = url.Parse(ch.XID)
280 ch.HTML, _ = htf.String(noise)
281}
282
283func filterchonk(ch *Chonk) {
284 translatechonk(ch)
285
286 noise := string(ch.HTML)
287
288 local := originate(ch.XID) == serverName
289
290 zap := make(map[string]bool)
291 emuxifier := func(e string) string {
292 for _, d := range ch.Donks {
293 if d.Name == e {
294 zap[d.XID] = true
295 if d.Local {
296 return fmt.Sprintf(`<img class="emu" title="%s" src="/d/%s">`, d.Name, d.XID)
297 }
298 }
299 }
300 if local {
301 emu, _ := emucache.Get(e)
302 if emu != nil {
303 return fmt.Sprintf(`<img class="emu" title="%s" src="%s">`, emu.Name, emu.ID)
304 }
305 }
306 return e
307 }
308 noise = re_emus.ReplaceAllStringFunc(noise, emuxifier)
309 j := 0
310 for i := 0; i < len(ch.Donks); i++ {
311 if !zap[ch.Donks[i].XID] {
312 ch.Donks[j] = ch.Donks[i]
313 j++
314 }
315 }
316 ch.Donks = ch.Donks[:j]
317
318 if strings.HasPrefix(noise, "<p>") {
319 noise = noise[3:]
320 }
321 ch.HTML = template.HTML(noise)
322 if short := shortname(ch.UserID, ch.Who); short != "" {
323 ch.Handle = short
324 } else {
325 ch.Handle, _ = handles(ch.Who)
326 }
327
328}
329
330func inlineimgsfor(honk *Honk) func(node *html.Node) string {
331 return func(node *html.Node) string {
332 src := htfilter.GetAttr(node, "src")
333 alt := htfilter.GetAttr(node, "alt")
334 base := path.Base(src)
335 didx, _ := strconv.Atoi(base)
336 if strings.HasPrefix(src, serverPrefix) && didx > 0 && didx <= len(honk.Donks) {
337 dlog.Printf("skipping inline image %s", src)
338 return ""
339 }
340 d := savedonk(src, "image", alt, "image", true)
341 if d != nil {
342 honk.Donks = append(honk.Donks, d)
343 }
344 dlog.Printf("inline img with src: %s", src)
345 return ""
346 }
347}
348
349func imaginate(honk *Honk) {
350 var htf htfilter.Filter
351 htf.Imager = inlineimgsfor(honk)
352 htf.BaseURL, _ = url.Parse(honk.XID)
353 htf.String(honk.Noise)
354}
355
356var re_dangerous = regexp.MustCompile("^[a-zA-Z]{2}:")
357
358func precipitate(honk *Honk) {
359 noise := honk.Noise
360 if re_dangerous.MatchString(noise) {
361 idx := strings.Index(noise, "\n")
362 if idx == -1 {
363 honk.Precis = noise
364 noise = ""
365 } else {
366 honk.Precis = noise[:idx]
367 noise = noise[idx+1:]
368 }
369 var marker mz.Marker
370 marker.Short = true
371 honk.Precis = marker.Mark(strings.TrimSpace(honk.Precis))
372 honk.Noise = noise
373 }
374}
375
376func translate(honk *Honk) {
377 if honk.Format == "html" {
378 return
379 }
380 noise := honk.Noise
381
382 var marker mz.Marker
383 marker.HashLinker = ontoreplacer
384 marker.AtLinker = attoreplacer
385 marker.AllowImages = true
386 noise = strings.TrimSpace(noise)
387 noise = marker.Mark(noise)
388 honk.Noise = noise
389 honk.Onts = append(honk.Onts, marker.HashTags...)
390 honk.Mentions = bunchofgrapes(marker.Mentions)
391 for _, t := range oneofakind(strings.Split(honk.Onties, " ")) {
392 if t[0] != '#' {
393 t = "#" + t
394 }
395 honk.Onts = append(honk.Onts, t)
396 }
397 honk.Onts = oneofakind(honk.Onts)
398 honk.Mentions = append(honk.Mentions, bunchofgrapes(oneofakind(strings.Split(honk.SeeAlso, " ")))...)
399}
400
401func redoimages(honk *Honk) {
402 zap := make(map[string]bool)
403 {
404 var htf htfilter.Filter
405 htf.Imager = replaceimgsand(zap, true, honk)
406 htf.SpanClasses = allowedclasses
407 htf.BaseURL, _ = url.Parse(honk.XID)
408 p, _ := htf.String(honk.Precis)
409 n, _ := htf.String(honk.Noise)
410 honk.Precis = string(p)
411 honk.Noise = string(n)
412 }
413 j := 0
414 for i := 0; i < len(honk.Donks); i++ {
415 if !zap[honk.Donks[i].XID] {
416 honk.Donks[j] = honk.Donks[i]
417 j++
418 }
419 }
420 honk.Donks = honk.Donks[:j]
421
422 honk.Noise = re_memes.ReplaceAllString(honk.Noise, "")
423 honk.Noise = strings.Replace(honk.Noise, "<a href=", "<a class=\"mention u-url\" href=", -1)
424}
425
426func xcelerate(b []byte) string {
427 letters := "BCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz1234567891234567891234"
428 for i, c := range b {
429 b[i] = letters[c&63]
430 }
431 s := string(b)
432 return s
433}
434
435func shortxid(xid string) string {
436 h := sha512.New512_256()
437 io.WriteString(h, xid)
438 return xcelerate(h.Sum(nil)[:20])
439}
440
441func xfiltrate() string {
442 var b [18]byte
443 rand.Read(b[:])
444 return xcelerate(b[:])
445}
446
447func grapevine(mentions []Mention) []string {
448 var s []string
449 for _, m := range mentions {
450 s = append(s, m.Where)
451 }
452 return s
453}
454
455func bunchofgrapes(m []string) []Mention {
456 var mentions []Mention
457 for i := range m {
458 who := m[i]
459 if strings.HasPrefix(who, "@https://") {
460 mentions = append(mentions, Mention{Who: who, Where: who[1:]})
461 continue
462 }
463 where := gofish(who)
464 if where != "" {
465 mentions = append(mentions, Mention{Who: who, Where: where})
466 }
467 }
468 return mentions
469}
470
471type Emu struct {
472 ID string
473 Name string
474 Type string
475}
476
477var re_emus = regexp.MustCompile(`:[[:alnum:]_-]+:`)
478
479var emucache = gencache.New(gencache.Options[string, *Emu]{Fill: func(ename string) (*Emu, bool) {
480 fname := ename[1 : len(ename)-1]
481 exts := []string{".png", ".gif"}
482 for _, ext := range exts {
483 _, err := os.Stat(dataDir + "/emus/" + fname + ext)
484 if err != nil {
485 continue
486 }
487 url := serverURL("/emu/%s%s", fname, ext)
488 if develMode {
489 url = fmt.Sprintf("/emu/%s%s", fname, ext)
490 }
491 return &Emu{ID: url, Name: ename, Type: "image/" + ext[1:]}, true
492 }
493 return nil, true
494}, Duration: 10 * time.Second})
495
496func herdofemus(noise string) []*Emu {
497 m := re_emus.FindAllString(noise, -1)
498 m = oneofakind(m)
499 var emus []*Emu
500 for _, e := range m {
501 emu, _ := emucache.Get(e)
502 if emu == nil {
503 continue
504 }
505 emus = append(emus, emu)
506 }
507 return emus
508}
509
510var re_memes = regexp.MustCompile("meme: ?([^\n]+)")
511var re_avatar = regexp.MustCompile("avatar: ?([^\n]+)")
512var re_banner = regexp.MustCompile("banner: ?([^\n]+)")
513var re_convoy = regexp.MustCompile("convoy: ?([^\n]+)")
514var re_convalidate = regexp.MustCompile("^(https?|tag|data):")
515
516func memetize(honk *Honk) {
517 repl := func(x string) string {
518 name := x[5:]
519 if name[0] == ' ' {
520 name = name[1:]
521 }
522 fd, err := os.Open(dataDir + "/memes/" + name)
523 if err != nil {
524 ilog.Printf("no meme for %s", name)
525 return x
526 }
527 var peek [512]byte
528 n, _ := fd.Read(peek[:])
529 ct := http.DetectContentType(peek[:n])
530 fd.Close()
531
532 url := serverURL("/meme/%s", name)
533 fileid, err := savefile(name, name, url, ct, false, nil, nil)
534 if err != nil {
535 elog.Printf("error saving meme: %s", err)
536 return x
537 }
538 d := &Donk{
539 FileID: fileid,
540 Name: name,
541 Media: ct,
542 URL: url,
543 Local: false,
544 }
545 honk.Donks = append(honk.Donks, d)
546 return ""
547 }
548 honk.Noise = re_memes.ReplaceAllStringFunc(honk.Noise, repl)
549}
550
551var re_quickmention = regexp.MustCompile("(^|[ \n])@[[:alnum:]_]+([ \n:;.,']|$)")
552
553func quickrename(s string, userid UserID) string {
554 nonstop := true
555 for nonstop {
556 nonstop = false
557 s = re_quickmention.ReplaceAllStringFunc(s, func(m string) string {
558 prefix := ""
559 if m[0] == ' ' || m[0] == '\n' {
560 prefix = m[:1]
561 m = m[1:]
562 }
563 prefix += "@"
564 m = m[1:]
565 tail := ""
566 if last := m[len(m)-1]; last == ' ' || last == '\n' ||
567 last == ':' || last == ';' ||
568 last == '.' || last == ',' || last == '\'' {
569 tail = m[len(m)-1:]
570 m = m[:len(m)-1]
571 }
572
573 xid := fullname(m, userid)
574
575 if xid != "" {
576 _, name := handles(xid)
577 if name != "" {
578 nonstop = true
579 m = name
580 }
581 }
582 return prefix + m + tail
583 })
584 }
585 return s
586}
587
588var shortnames = gencache.New(gencache.Options[UserID, map[string]string]{Fill: func(userid UserID) (map[string]string, bool) {
589 honkers := gethonkers(userid)
590 m := make(map[string]string)
591 for _, h := range honkers {
592 m[h.XID] = h.Name
593 }
594 return m, true
595}, Invalidator: &honkerinvalidator})
596
597func shortname(userid UserID, xid string) string {
598 m, ok := shortnames.Get(userid)
599 if ok {
600 return m[xid]
601 }
602 return ""
603}
604
605var fullnames = gencache.New(gencache.Options[UserID, map[string]string]{Fill: func(userid UserID) (map[string]string, bool) {
606 honkers := gethonkers(userid)
607 m := make(map[string]string)
608 for _, h := range honkers {
609 m[h.Name] = h.XID
610 }
611 return m, true
612}, Invalidator: &honkerinvalidator})
613
614func fullname(name string, userid UserID) string {
615 m, ok := fullnames.Get(userid)
616 if ok {
617 return m[name]
618 }
619 return ""
620}
621
622func attoreplacer(m string) string {
623 fill := `<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`
624 if strings.HasPrefix(m, "@https://") {
625 return fmt.Sprintf(fill, html.EscapeString(m[1:]), html.EscapeString(m))
626 }
627 where := gofish(m)
628 if where == "" {
629 return m
630 }
631 who := m[0 : 1+strings.IndexByte(m[1:], '@')]
632 return fmt.Sprintf(fill, html.EscapeString(where), html.EscapeString(who))
633}
634
635func ontoreplacer(h string) string {
636 return fmt.Sprintf(`<a class="mention hashtag" href="%s">%s</a>`,
637 serverURL("/o/%s", strings.ToLower(h[1:])), h)
638}
639
640var re_unurl = regexp.MustCompile("https://([^/]+).*/([^/]+)")
641var re_urlhost = regexp.MustCompile("https://([^/ #)]+)")
642
643func originate(u string) string {
644 m := re_urlhost.FindStringSubmatch(u)
645 if len(m) > 1 {
646 return m[1]
647 }
648 return ""
649}
650
651var allhandles = gencache.New(gencache.Options[string, string]{Fill: func(xid string) (string, bool) {
652 handle := getxonker(xid, "handle")
653 if handle == "" {
654 dlog.Printf("need to get a handle: %s", xid)
655 info, err := investigate(xid)
656 if err != nil {
657 m := re_unurl.FindStringSubmatch(xid)
658 if len(m) > 2 {
659 handle = m[2]
660 } else {
661 handle = xid
662 }
663 } else {
664 handle = info.Name
665 }
666 }
667 return handle, true
668}})
669
670// handle, handle@host
671func handles(xid string) (string, string) {
672 if xid == "" || xid == thewholeworld || strings.HasSuffix(xid, "/followers") {
673 return "", ""
674 }
675 handle, _ := allhandles.Get(xid)
676 if handle == xid {
677 return xid, xid
678 }
679 return handle, handle + "@" + originate(xid)
680}
681
682func butnottooloud(aud []string) {
683 for i, a := range aud {
684 if strings.HasSuffix(a, "/followers") {
685 aud[i] = ""
686 }
687 }
688}
689
690func loudandproud(aud []string) bool {
691 for _, a := range aud {
692 if a == thewholeworld {
693 return true
694 }
695 }
696 return false
697}
698
699func firstclass(honk *Honk) bool {
700 return honk.Audience[0] == thewholeworld
701}
702
703func oneofakind(a []string) []string {
704 seen := make(map[string]bool)
705 seen[""] = true
706 j := 0
707 for _, s := range a {
708 if !seen[s] {
709 seen[s] = true
710 a[j] = s
711 j++
712 }
713 }
714 if j < len(a)/2 {
715 rv := make([]string, j)
716 copy(rv, a[:j])
717 return rv
718 }
719 return a[:j]
720}
721
722var ziggies = gencache.New(gencache.Options[UserID, *KeyInfo]{Fill: func(userid UserID) (*KeyInfo, bool) {
723 user, ok := somenumberedusers.Get(userid)
724 if !ok {
725 return nil, false
726 }
727 ki := new(KeyInfo)
728 ki.keyname = user.URL + "#key"
729 ki.seckey = user.SecKey
730 return ki, true
731}})
732
733func ziggy(userid UserID) *KeyInfo {
734 ki, _ := ziggies.Get(userid)
735 return ki
736}
737
738var zaggies = gencache.New(gencache.Options[string, httpsig.PublicKey]{Fill: func(keyname string) (httpsig.PublicKey, bool) {
739 data := getxonker(keyname, "pubkey")
740 if data == "" {
741 dlog.Printf("hitting the webs for missing pubkey: %s", keyname)
742 j, err := GetJunk(readyLuserOne, keyname)
743 if err != nil {
744 ilog.Printf("error getting %s pubkey: %s", keyname, err)
745 when := time.Now().UTC().Format(dbtimeformat)
746 stmtSaveXonker.Exec(keyname, "failed", "pubkey", when)
747 return httpsig.PublicKey{}, true
748 }
749 allinjest(originate(keyname), j)
750 data = getxonker(keyname, "pubkey")
751 if data == "" {
752 ilog.Printf("key not found after ingesting")
753 when := time.Now().UTC().Format(dbtimeformat)
754 stmtSaveXonker.Exec(keyname, "failed", "pubkey", when)
755 return httpsig.PublicKey{}, true
756 }
757 }
758 if data == "failed" {
759 ilog.Printf("lookup previously failed key %s", keyname)
760 return httpsig.PublicKey{}, true
761 }
762 _, key, err := httpsig.DecodeKey(data)
763 if err != nil {
764 ilog.Printf("error decoding %s pubkey: %s", keyname, err)
765 return key, true
766 }
767 return key, true
768}, Limit: 512})
769
770func zaggy(keyname string) (httpsig.PublicKey, error) {
771 key, _ := zaggies.Get(keyname)
772 return key, nil
773}
774
775func savingthrow(keyname string) {
776 when := time.Now().Add(-30 * time.Minute).UTC().Format(dbtimeformat)
777 stmtDeleteXonker.Exec(keyname, "pubkey", when)
778 zaggies.Clear(keyname)
779}
780
781func keymatch(keyname string, actor string) string {
782 origin := originate(actor)
783 if origin == originate(keyname) {
784 return origin
785 }
786 return ""
787}