honk/fun.go

634 lines
14 KiB
Go
Raw Normal View History

2019-04-13 19:58:42 +02:00
//
// Copyright (c) 2019 Ted Unangst <tedu@tedunangst.com>
//
// Permission to use, copy, modify, and distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
package main
import (
"crypto/rand"
"crypto/rsa"
"crypto/sha512"
2019-04-13 19:58:42 +02:00
"fmt"
"html/template"
"io"
2019-04-13 19:58:42 +02:00
"log"
"net/http"
2019-11-11 05:49:09 +01:00
"net/url"
2019-05-31 06:24:18 +02:00
"os"
2019-04-13 19:58:42 +02:00
"regexp"
"strings"
"time"
2019-05-18 01:37:43 +02:00
2019-09-19 06:50:26 +02:00
"golang.org/x/net/html"
"humungus.tedunangst.com/r/webs/cache"
2019-05-18 01:37:43 +02:00
"humungus.tedunangst.com/r/webs/htfilter"
"humungus.tedunangst.com/r/webs/httpsig"
"humungus.tedunangst.com/r/webs/templates"
2019-04-13 19:58:42 +02:00
)
var allowedclasses = make(map[string]bool)
func init() {
allowedclasses["kw"] = true
allowedclasses["bi"] = true
allowedclasses["st"] = true
allowedclasses["nm"] = true
allowedclasses["tp"] = true
allowedclasses["op"] = true
allowedclasses["cm"] = true
allowedclasses["al"] = true
allowedclasses["dl"] = true
}
2019-07-10 20:36:14 +02:00
func reverbolate(userid int64, honks []*Honk) {
2019-04-13 19:58:42 +02:00
for _, h := range honks {
h.What += "ed"
if h.What == "tonked" {
h.What = "honked back"
2019-10-29 21:00:41 +01:00
h.Style += " subtle"
}
if !h.Public {
2019-07-15 03:55:57 +02:00
h.Style += " limited"
}
2019-10-22 06:19:31 +02:00
translate(h, false)
2019-05-28 09:39:34 +02:00
if h.Whofore == 2 || h.Whofore == 3 {
h.URL = h.XID
2019-07-22 06:35:26 +02:00
if h.What != "bonked" {
2019-09-18 20:23:47 +02:00
h.Noise = re_memes.ReplaceAllString(h.Noise, "")
2019-07-22 06:35:26 +02:00
h.Noise = mentionize(h.Noise)
h.Noise = ontologize(h.Noise)
2019-07-22 06:35:26 +02:00
}
h.Username, h.Handle = handles(h.Honker)
2019-04-13 19:58:42 +02:00
} else {
_, h.Handle = handles(h.Honker)
2019-10-12 16:34:23 +02:00
short := shortname(userid, h.Honker)
if short != "" {
h.Username = short
} else {
h.Username = h.Handle
if len(h.Username) > 20 {
h.Username = h.Username[:20] + ".."
}
2019-04-13 19:58:42 +02:00
}
if h.URL == "" {
h.URL = h.XID
}
}
2019-07-05 19:07:59 +02:00
if h.Oonker != "" {
_, h.Oondle = handles(h.Oonker)
2019-07-05 19:07:59 +02:00
}
h.Precis = demoji(h.Precis)
h.Noise = demoji(h.Noise)
2019-07-10 20:36:14 +02:00
h.Open = "open"
zap := make(map[string]bool)
{
2019-10-22 08:02:36 +02:00
var htf htfilter.Filter
htf.Imager = replaceimgsand(zap, false)
htf.SpanClasses = allowedclasses
2019-11-11 05:49:09 +01:00
htf.BaseURL, _ = url.Parse(h.XID)
2019-10-22 08:02:36 +02:00
p, _ := htf.String(h.Precis)
n, _ := htf.String(h.Noise)
h.Precis = string(p)
h.Noise = string(n)
}
2019-10-01 04:28:53 +02:00
if userid == -1 {
if h.Precis != "" {
h.Open = ""
}
} else {
2019-10-07 02:26:21 +02:00
unsee(userid, h)
if h.Open == "open" && h.Precis == "unspecified horror" {
2019-10-01 04:28:53 +02:00
h.Precis = ""
2019-06-04 09:22:03 +02:00
}
}
2019-10-05 19:53:41 +02:00
if len(h.Noise) > 6000 && h.Open == "open" {
2019-10-03 06:22:01 +02:00
if h.Precis == "" {
h.Precis = "really freaking long"
}
h.Open = ""
}
2019-04-13 19:58:42 +02:00
emuxifier := func(e string) string {
for _, d := range h.Donks {
if d.Name == e {
zap[d.XID] = true
2019-07-09 02:58:47 +02:00
if d.Local {
return fmt.Sprintf(`<img class="emu" title="%s" src="/d/%s">`, d.Name, d.XID)
}
2019-04-13 19:58:42 +02:00
}
}
return e
}
h.Precis = re_emus.ReplaceAllStringFunc(h.Precis, emuxifier)
h.Noise = re_emus.ReplaceAllStringFunc(h.Noise, emuxifier)
2019-04-17 04:36:05 +02:00
j := 0
2019-04-13 19:58:42 +02:00
for i := 0; i < len(h.Donks); i++ {
if !zap[h.Donks[i].XID] {
2019-04-17 04:36:05 +02:00
h.Donks[j] = h.Donks[i]
j++
2019-04-13 19:58:42 +02:00
}
}
2019-04-17 04:36:05 +02:00
h.Donks = h.Donks[:j]
h.HTPrecis = template.HTML(h.Precis)
h.HTML = template.HTML(h.Noise)
2019-04-13 19:58:42 +02:00
}
}
2019-10-22 06:19:31 +02:00
func replaceimgsand(zap map[string]bool, absolute bool) func(node *html.Node) string {
return func(node *html.Node) string {
src := htfilter.GetAttr(node, "src")
alt := htfilter.GetAttr(node, "alt")
//title := GetAttr(node, "title")
if htfilter.HasClass(node, "Emoji") && alt != "" {
return alt
}
d := finddonk(src)
if d != nil {
zap[d.XID] = true
2019-10-22 06:19:31 +02:00
base := ""
if absolute {
base = "https://" + serverName
}
return string(templates.Sprintf(`<img alt="%s" title="%s" src="%s/d/%s">`, alt, alt, base, d.XID))
}
return string(templates.Sprintf(`&lt;img alt="%s" src="<a href="%s">%s<a>"&gt;`, alt, src, src))
}
2019-09-19 06:50:26 +02:00
}
func inlineimgsfor(honk *Honk) func(node *html.Node) string {
return func(node *html.Node) string {
src := htfilter.GetAttr(node, "src")
alt := htfilter.GetAttr(node, "alt")
d := savedonk(src, "image", alt, "image", true)
if d != nil {
honk.Donks = append(honk.Donks, d)
}
log.Printf("inline img with src: %s", src)
return ""
}
}
2019-10-22 06:19:31 +02:00
func imaginate(honk *Honk) {
2019-10-22 08:02:36 +02:00
var htf htfilter.Filter
htf.Imager = inlineimgsfor(honk)
2019-11-11 05:49:09 +01:00
htf.BaseURL, _ = url.Parse(honk.XID)
2019-10-22 08:02:36 +02:00
htf.String(honk.Noise)
2019-10-22 06:19:31 +02:00
}
func translate(honk *Honk, redoimages bool) {
if honk.Format == "html" {
return
}
noise := honk.Noise
if strings.HasPrefix(noise, "DZ:") {
idx := strings.Index(noise, "\n")
if idx == -1 {
honk.Precis = noise
noise = ""
} else {
honk.Precis = noise[:idx]
noise = noise[idx+1:]
}
}
honk.Precis = markitzero(strings.TrimSpace(honk.Precis))
noise = strings.TrimSpace(noise)
noise = markitzero(noise)
honk.Noise = noise
honk.Onts = oneofakind(ontologies(honk.Noise))
2019-10-22 06:19:31 +02:00
if redoimages {
zap := make(map[string]bool)
{
2019-10-22 08:02:36 +02:00
var htf htfilter.Filter
htf.Imager = replaceimgsand(zap, true)
htf.SpanClasses = allowedclasses
p, _ := htf.String(honk.Precis)
n, _ := htf.String(honk.Noise)
2019-10-22 06:19:31 +02:00
honk.Precis = string(p)
honk.Noise = string(n)
}
j := 0
for i := 0; i < len(honk.Donks); i++ {
if !zap[honk.Donks[i].XID] {
honk.Donks[j] = honk.Donks[i]
j++
}
}
honk.Donks = honk.Donks[:j]
honk.Noise = re_memes.ReplaceAllString(honk.Noise, "")
honk.Noise = ontologize(mentionize(honk.Noise))
honk.Noise = strings.Replace(honk.Noise, "<a href=", "<a class=\"mention u-url\" href=", -1)
2019-10-22 06:19:31 +02:00
}
}
func xcelerate(b []byte) string {
letters := "BCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz1234567891234567891234"
for i, c := range b {
b[i] = letters[c&63]
}
s := string(b)
return s
}
func shortxid(xid string) string {
h := sha512.New512_256()
io.WriteString(h, xid)
return xcelerate(h.Sum(nil)[:20])
}
2019-04-13 19:58:42 +02:00
func xfiltrate() string {
2019-06-17 01:18:33 +02:00
var b [18]byte
rand.Read(b[:])
return xcelerate(b[:])
2019-04-13 19:58:42 +02:00
}
var re_hashes = regexp.MustCompile(`(?:^| |>)#[[:alnum:]]*[[:alpha:]][[:alnum:]_-]*`)
2019-06-16 00:24:18 +02:00
func ontologies(s string) []string {
m := re_hashes.FindAllString(s, -1)
j := 0
for _, h := range m {
if h[0] == '&' {
continue
}
2019-06-16 00:30:59 +02:00
if h[0] != '#' {
h = h[1:]
2019-06-16 00:30:59 +02:00
}
m[j] = h
j++
2019-06-16 00:30:59 +02:00
}
return m[:j]
2019-06-16 00:24:18 +02:00
}
2019-04-13 19:58:42 +02:00
type Mention struct {
who string
where string
}
var re_mentions = regexp.MustCompile(`@[[:alnum:]._-]+@[[:alnum:].-]*[[:alnum:]]`)
2019-04-19 18:35:31 +02:00
var re_urltions = regexp.MustCompile(`@https://\S+`)
2019-04-13 19:58:42 +02:00
func grapevine(s string) []string {
var mentions []string
2019-04-19 18:35:31 +02:00
m := re_mentions.FindAllString(s, -1)
2019-04-13 19:58:42 +02:00
for i := range m {
where := gofish(m[i])
if where != "" {
mentions = append(mentions, where)
}
}
2019-04-19 18:35:31 +02:00
m = re_urltions.FindAllString(s, -1)
for i := range m {
mentions = append(mentions, m[i][1:])
}
2019-04-13 19:58:42 +02:00
return mentions
}
func bunchofgrapes(s string) []Mention {
m := re_mentions.FindAllString(s, -1)
var mentions []Mention
for i := range m {
where := gofish(m[i])
if where != "" {
mentions = append(mentions, Mention{who: m[i], where: where})
}
}
2019-04-19 18:35:31 +02:00
m = re_urltions.FindAllString(s, -1)
for i := range m {
mentions = append(mentions, Mention{who: m[i][1:], where: m[i][1:]})
}
2019-04-13 19:58:42 +02:00
return mentions
}
type Emu struct {
ID string
Name string
}
2019-07-10 05:04:19 +02:00
var re_emus = regexp.MustCompile(`:[[:alnum:]_-]+:`)
2019-04-13 19:58:42 +02:00
func herdofemus(noise string) []Emu {
m := re_emus.FindAllString(noise, -1)
m = oneofakind(m)
var emus []Emu
for _, e := range m {
fname := e[1 : len(e)-1]
2019-07-08 02:07:16 +02:00
_, err := os.Stat("emus/" + fname + ".png")
if err != nil {
continue
}
2019-04-13 19:58:42 +02:00
url := fmt.Sprintf("https://%s/emu/%s.png", serverName, fname)
emus = append(emus, Emu{ID: url, Name: e})
}
return emus
}
var re_memes = regexp.MustCompile("meme: ?([^\n]+)")
2019-05-31 06:24:18 +02:00
2019-07-01 01:04:37 +02:00
func memetize(honk *Honk) {
repl := func(x string) string {
2019-05-31 06:24:18 +02:00
name := x[5:]
if name[0] == ' ' {
name = name[1:]
}
fd, err := os.Open("memes/" + name)
if err != nil {
log.Printf("no meme for %s", name)
2019-07-01 01:04:37 +02:00
return x
2019-05-31 06:24:18 +02:00
}
var peek [512]byte
n, _ := fd.Read(peek[:])
ct := http.DetectContentType(peek[:n])
fd.Close()
url := fmt.Sprintf("https://%s/meme/%s", serverName, name)
2019-10-02 23:14:14 +02:00
fileid, err := savefile("", name, name, url, ct, false, nil)
2019-05-31 06:24:18 +02:00
if err != nil {
log.Printf("error saving meme: %s", err)
2019-07-01 01:04:37 +02:00
return x
2019-05-31 06:24:18 +02:00
}
2019-10-21 07:29:39 +02:00
d := &Donk{
FileID: fileid,
XID: "",
Name: name,
Media: ct,
URL: url,
Local: false,
}
honk.Donks = append(honk.Donks, d)
2019-07-01 01:04:37 +02:00
return ""
2019-05-31 06:24:18 +02:00
}
2019-07-01 01:04:37 +02:00
honk.Noise = re_memes.ReplaceAllStringFunc(honk.Noise, repl)
2019-05-31 06:24:18 +02:00
}
var re_quickmention = regexp.MustCompile("(^|[ \n])@[[:alnum:]]+([ \n]|$)")
2019-09-10 20:30:52 +02:00
func quickrename(s string, userid int64) string {
nonstop := true
for nonstop {
nonstop = false
s = re_quickmention.ReplaceAllStringFunc(s, func(m string) string {
prefix := ""
if m[0] == ' ' || m[0] == '\n' {
prefix = m[:1]
m = m[1:]
}
prefix += "@"
2019-09-10 20:30:52 +02:00
m = m[1:]
tail := ""
if m[len(m)-1] == ' ' || m[len(m)-1] == '\n' {
tail = m[len(m)-1:]
2019-10-11 00:51:09 +02:00
m = m[:len(m)-1]
}
2019-09-10 20:30:52 +02:00
xid := fullname(m, userid)
if xid != "" {
_, name := handles(xid)
if name != "" {
nonstop = true
m = name
}
2019-09-10 20:30:52 +02:00
}
return prefix + m + tail
})
}
return s
2019-09-10 20:30:52 +02:00
}
var shortnames = cache.New(cache.Options{Filler: func(userid int64) (map[string]string, bool) {
2019-10-12 16:34:23 +02:00
honkers := gethonkers(userid)
m := make(map[string]string)
for _, h := range honkers {
m[h.XID] = h.Name
}
return m, true
}, Invalidator: &honkerinvalidator})
2019-10-12 16:34:23 +02:00
func shortname(userid int64, xid string) string {
var m map[string]string
ok := shortnames.Get(userid, &m)
if ok {
return m[xid]
}
return ""
}
var fullnames = cache.New(cache.Options{Filler: func(userid int64) (map[string]string, bool) {
honkers := gethonkers(userid)
m := make(map[string]string)
for _, h := range honkers {
m[h.Name] = h.XID
}
return m, true
}, Invalidator: &honkerinvalidator})
func fullname(name string, userid int64) string {
var m map[string]string
ok := fullnames.Get(userid, &m)
if ok {
return m[name]
}
return ""
}
func mentionize(s string) string {
2019-04-13 19:58:42 +02:00
s = re_mentions.ReplaceAllStringFunc(s, func(m string) string {
2019-04-16 05:48:01 +02:00
where := gofish(m)
if where == "" {
return m
}
2019-04-17 02:33:01 +02:00
who := m[0 : 1+strings.IndexByte(m[1:], '@')]
return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
html.EscapeString(where), html.EscapeString(who))
2019-04-13 19:58:42 +02:00
})
2019-04-19 18:35:31 +02:00
s = re_urltions.ReplaceAllStringFunc(s, func(m string) string {
return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
html.EscapeString(m[1:]), html.EscapeString(m))
})
2019-04-13 19:58:42 +02:00
return s
}
func ontologize(s string) string {
s = re_hashes.ReplaceAllStringFunc(s, func(o string) string {
if o[0] == '&' {
return o
}
p := ""
h := o
if h[0] != '#' {
p = h[:1]
h = h[1:]
}
return fmt.Sprintf(`%s<a href="https://%s/o/%s">%s</a>`, p, serverName,
2019-08-28 04:25:02 +02:00
strings.ToLower(h[1:]), h)
})
return s
}
2019-04-13 19:58:42 +02:00
var re_unurl = regexp.MustCompile("https://([^/]+).*/([^/]+)")
2019-09-22 00:20:50 +02:00
var re_urlhost = regexp.MustCompile("https://([^/ ]+)")
2019-04-13 19:58:42 +02:00
func originate(u string) string {
m := re_urlhost.FindStringSubmatch(u)
if len(m) > 1 {
return m[1]
}
return ""
}
var allhandles = cache.New(cache.Options{Filler: func(xid string) (string, bool) {
var handle string
row := stmtGetXonker.QueryRow(xid, "handle")
err := row.Scan(&handle)
if err != nil {
log.Printf("need to get a handle: %s", xid)
info, err := investigate(xid)
if err != nil {
m := re_unurl.FindStringSubmatch(xid)
if len(m) > 2 {
handle = m[2]
} else {
handle = xid
}
} else {
handle = info.Name
2019-08-16 06:05:22 +02:00
}
2019-04-13 19:58:42 +02:00
}
return handle, true
}})
// handle, handle@host
func handles(xid string) (string, string) {
if xid == "" {
return "", ""
}
var handle string
allhandles.Get(xid, &handle)
if handle == xid {
return xid, xid
}
return handle, handle + "@" + originate(xid)
}
func butnottooloud(aud []string) {
for i, a := range aud {
if strings.HasSuffix(a, "/followers") {
aud[i] = ""
}
}
}
func loudandproud(aud []string) bool {
for _, a := range aud {
if a == thewholeworld {
return true
}
}
return false
}
2019-08-24 02:35:58 +02:00
func firstclass(honk *Honk) bool {
return honk.Audience[0] == thewholeworld
}
2019-04-13 19:58:42 +02:00
func oneofakind(a []string) []string {
seen := make(map[string]bool)
seen[""] = true
j := 0
for _, s := range a {
if !seen[s] {
seen[s] = true
a[j] = s
j++
2019-04-13 19:58:42 +02:00
}
}
return a[:j]
2019-04-13 19:58:42 +02:00
}
var ziggies = cache.New(cache.Options{Filler: func(userid int64) (*KeyInfo, bool) {
var user *WhatAbout
ok := somenumberedusers.Get(userid, &user)
if !ok {
return nil, false
2019-04-13 19:58:42 +02:00
}
ki := new(KeyInfo)
ki.keyname = user.URL + "#key"
ki.seckey = user.SecKey
return ki, true
}})
func ziggy(userid int64) *KeyInfo {
var ki *KeyInfo
ziggies.Get(userid, &ki)
return ki
2019-04-13 19:58:42 +02:00
}
2019-10-30 08:31:16 +01:00
var zaggies = cache.New(cache.Options{Filler: func(keyname string) (*rsa.PublicKey, bool) {
2019-04-13 19:58:42 +02:00
var data string
row := stmtGetXonker.QueryRow(keyname, "pubkey")
2019-04-13 19:58:42 +02:00
err := row.Scan(&data)
if err != nil {
log.Printf("hitting the webs for missing pubkey: %s", keyname)
j, err := GetJunk(keyname)
if err != nil {
log.Printf("error getting %s pubkey: %s", keyname, err)
return nil, true
}
allinjest(originate(keyname), j)
row = stmtGetXonker.QueryRow(keyname, "pubkey")
err = row.Scan(&data)
2019-04-20 17:41:20 +02:00
if err != nil {
log.Printf("key not found after ingesting")
return nil, true
2019-04-13 19:58:42 +02:00
}
}
_, key, err := httpsig.DecodeKey(data)
if err != nil {
log.Printf("error decoding %s pubkey: %s", keyname, err)
return nil, true
}
return key, true
}, Limit: 512})
2019-10-30 08:31:16 +01:00
func zaggy(keyname string) *rsa.PublicKey {
var key *rsa.PublicKey
zaggies.Get(keyname, &key)
return key
}
func savingthrow(keyname string) {
2019-11-25 21:55:30 +01:00
when := time.Now().UTC().Add(-30 * time.Minute).Format(dbtimeformat)
stmtDeleteXonker.Exec(keyname, "pubkey", when)
zaggies.Clear(keyname)
}
func keymatch(keyname string, actor string) string {
hash := strings.IndexByte(keyname, '#')
if hash == -1 {
hash = len(keyname)
}
owner := keyname[0:hash]
if owner == actor {
return originate(actor)
}
return ""
2019-04-13 19:58:42 +02:00
}