incrementally closer to consistent html processing
Ted Unangst tedu@tedunangst.com
Tue, 29 Oct 2019 16:24:32 -0400
2 files changed,
7 insertions(+),
4 deletions(-)
M
activity.go
→
activity.go
@@ -1013,10 +1013,10 @@ }
if !h.Public { jo["directMessage"] = true } + mentions := bunchofgrapes(h.Noise) translate(h, true) - h.Noise = re_memes.ReplaceAllString(h.Noise, "") jo["summary"] = html.EscapeString(h.Precis) - jo["content"] = ontologize(mentionize(h.Noise)) + jo["content"] = h.Noise if strings.HasPrefix(h.Precis, "DZ:") { jo["sensitive"] = true }@@ -1034,7 +1034,7 @@ jo["replies"] = jr
} var tags []junk.Junk - for _, m := range bunchofgrapes(h.Noise) { + for _, m := range mentions { t := junk.New() t["type"] = "Mention" t["name"] = m.who
M
fun.go
→
fun.go
@@ -220,7 +220,6 @@ p, _ := htf.String(honk.Precis)
n, _ := htf.String(honk.Noise) honk.Precis = string(p) honk.Noise = string(n) - honk.Noise = strings.Replace(honk.Noise, "<a href=", "<a class=\"mention u-url\" href=", -1) } j := 0 for i := 0; i < len(honk.Donks); i++ {@@ -230,6 +229,10 @@ j++
} } honk.Donks = honk.Donks[:j] + + honk.Noise = re_memes.ReplaceAllString(honk.Noise, "") + honk.Noise = ontologize(mentionize(honk.Noise)) + honk.Noise = strings.Replace(honk.Noise, "<a href=", "<a class=\"mention u-url\" href=", -1) } }