honk/fun.go

605 lines
13 KiB
Go
Raw Normal View History

2019-04-13 19:58:42 +02:00
//
// Copyright (c) 2019 Ted Unangst <tedu@tedunangst.com>
//
// Permission to use, copy, modify, and distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
package main
import (
"crypto/rand"
"crypto/rsa"
"fmt"
"html/template"
"log"
"net/http"
2019-05-31 06:24:18 +02:00
"os"
2019-04-13 19:58:42 +02:00
"regexp"
"strings"
"sync"
2019-05-18 01:37:43 +02:00
2019-09-19 06:50:26 +02:00
"golang.org/x/net/html"
2019-05-18 01:37:43 +02:00
"humungus.tedunangst.com/r/webs/htfilter"
"humungus.tedunangst.com/r/webs/httpsig"
2019-04-13 19:58:42 +02:00
)
var allowedclasses = make(map[string]bool)
func init() {
allowedclasses["kw"] = true
allowedclasses["bi"] = true
allowedclasses["st"] = true
allowedclasses["nm"] = true
allowedclasses["tp"] = true
allowedclasses["op"] = true
allowedclasses["cm"] = true
allowedclasses["al"] = true
allowedclasses["dl"] = true
}
2019-07-10 20:36:14 +02:00
func reverbolate(userid int64, honks []*Honk) {
2019-05-18 01:37:43 +02:00
filt := htfilter.New()
2019-09-19 06:50:26 +02:00
filt.Imager = replaceimg
filt.SpanClasses = allowedclasses
2019-04-13 19:58:42 +02:00
for _, h := range honks {
h.What += "ed"
if h.What == "tonked" {
h.What = "honked back"
h.Style = "subtle"
}
if !h.Public {
2019-07-15 03:55:57 +02:00
h.Style += " limited"
}
translate(h)
2019-05-28 09:39:34 +02:00
if h.Whofore == 2 || h.Whofore == 3 {
h.URL = h.XID
2019-07-22 06:35:26 +02:00
if h.What != "bonked" {
2019-09-18 20:23:47 +02:00
h.Noise = re_memes.ReplaceAllString(h.Noise, "")
2019-07-22 06:35:26 +02:00
h.Noise = mentionize(h.Noise)
h.Noise = ontologize(h.Noise)
2019-07-22 06:35:26 +02:00
}
h.Username, h.Handle = handles(h.Honker)
2019-04-13 19:58:42 +02:00
} else {
_, h.Handle = handles(h.Honker)
2019-06-24 19:05:18 +02:00
h.Username = h.Handle
2019-06-24 20:16:12 +02:00
if len(h.Username) > 20 {
h.Username = h.Username[:20] + ".."
2019-04-13 19:58:42 +02:00
}
if h.URL == "" {
h.URL = h.XID
}
}
2019-07-05 19:07:59 +02:00
if h.Oonker != "" {
_, h.Oondle = handles(h.Oonker)
2019-07-05 19:07:59 +02:00
}
2019-04-13 19:58:42 +02:00
zap := make(map[*Donk]bool)
h.Precis = demoji(h.Precis)
h.Noise = demoji(h.Noise)
2019-07-10 20:36:14 +02:00
h.Open = "open"
2019-10-01 04:28:53 +02:00
if userid == -1 {
if h.Precis != "" {
h.Open = ""
}
} else {
2019-10-07 02:26:21 +02:00
unsee(userid, h)
if h.Open == "open" && h.Precis == "unspecified horror" {
2019-10-01 04:28:53 +02:00
h.Precis = ""
2019-06-04 09:22:03 +02:00
}
}
2019-10-05 19:53:41 +02:00
if len(h.Noise) > 6000 && h.Open == "open" {
2019-10-03 06:22:01 +02:00
if h.Precis == "" {
h.Precis = "really freaking long"
}
h.Open = ""
}
2019-09-18 21:08:50 +02:00
h.HTPrecis, _ = filt.String(h.Precis)
2019-07-10 20:36:14 +02:00
h.HTML, _ = filt.String(h.Noise)
2019-04-13 19:58:42 +02:00
emuxifier := func(e string) string {
for _, d := range h.Donks {
if d.Name == e {
zap[d] = true
2019-07-09 02:58:47 +02:00
if d.Local {
return fmt.Sprintf(`<img class="emu" title="%s" src="/d/%s">`, d.Name, d.XID)
}
2019-04-13 19:58:42 +02:00
}
}
return e
}
2019-09-18 21:08:50 +02:00
h.HTPrecis = template.HTML(re_emus.ReplaceAllStringFunc(string(h.HTPrecis), emuxifier))
2019-04-13 19:58:42 +02:00
h.HTML = template.HTML(re_emus.ReplaceAllStringFunc(string(h.HTML), emuxifier))
2019-04-17 04:36:05 +02:00
j := 0
2019-04-13 19:58:42 +02:00
for i := 0; i < len(h.Donks); i++ {
2019-04-17 04:36:05 +02:00
if !zap[h.Donks[i]] {
h.Donks[j] = h.Donks[i]
j++
2019-04-13 19:58:42 +02:00
}
}
2019-04-17 04:36:05 +02:00
h.Donks = h.Donks[:j]
2019-04-13 19:58:42 +02:00
}
}
2019-09-19 06:50:26 +02:00
func replaceimg(node *html.Node) string {
src := htfilter.GetAttr(node, "src")
alt := htfilter.GetAttr(node, "alt")
//title := GetAttr(node, "title")
if htfilter.HasClass(node, "Emoji") && alt != "" {
return alt
}
alt = html.EscapeString(alt)
src = html.EscapeString(src)
d := finddonk(src)
if d != nil {
src = fmt.Sprintf("https://%s/d/%s", serverName, d.XID)
return fmt.Sprintf(`<img alt="%s" title="%s" src="%s">`, alt, alt, src)
}
2019-09-19 06:50:26 +02:00
return fmt.Sprintf(`&lt;img alt="%s" src="<a href="%s">%s<a>"&gt;`, alt, src, src)
}
func inlineimgs(node *html.Node) string {
src := htfilter.GetAttr(node, "src")
alt := htfilter.GetAttr(node, "alt")
//title := GetAttr(node, "title")
if htfilter.HasClass(node, "Emoji") && alt != "" {
return alt
}
alt = html.EscapeString(alt)
src = html.EscapeString(src)
if !strings.HasPrefix(src, "https://"+serverName+"/") {
d := savedonk(src, "image", alt, "image", true)
if d != nil {
src = fmt.Sprintf("https://%s/d/%s", serverName, d.XID)
}
}
log.Printf("inline img with src: %s", src)
return fmt.Sprintf(`<img alt="%s" title="%s" src="%s>`, alt, alt, src)
}
func translate(honk *Honk) {
if honk.Format == "html" {
return
}
noise := honk.Noise
if strings.HasPrefix(noise, "DZ:") {
idx := strings.Index(noise, "\n")
if idx == -1 {
honk.Precis = noise
noise = ""
} else {
honk.Precis = noise[:idx]
noise = noise[idx+1:]
}
}
honk.Precis = strings.TrimSpace(honk.Precis)
noise = strings.TrimSpace(noise)
noise = quickrename(noise, honk.UserID)
noise = markitzero(noise)
honk.Noise = noise
honk.Onts = oneofakind(ontologies(honk.Noise))
}
func shortxid(xid string) string {
idx := strings.LastIndexByte(xid, '/')
if idx == -1 {
return xid
}
return xid[idx+1:]
}
2019-04-13 19:58:42 +02:00
func xfiltrate() string {
letters := "BCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz1234567891234567891234"
2019-06-17 01:18:33 +02:00
var b [18]byte
rand.Read(b[:])
for i, c := range b {
b[i] = letters[c&63]
2019-04-13 19:58:42 +02:00
}
2019-06-17 01:18:33 +02:00
s := string(b[:])
return s
2019-04-13 19:58:42 +02:00
}
2019-09-17 15:58:18 +02:00
var re_hashes = regexp.MustCompile(`(?:^| )#[[:alnum:]][[:alnum:]_-]*`)
2019-06-16 00:24:18 +02:00
func ontologies(s string) []string {
m := re_hashes.FindAllString(s, -1)
j := 0
for _, h := range m {
if h[0] == '&' {
continue
}
2019-06-16 00:30:59 +02:00
if h[0] != '#' {
h = h[1:]
2019-06-16 00:30:59 +02:00
}
m[j] = h
j++
2019-06-16 00:30:59 +02:00
}
return m[:j]
2019-06-16 00:24:18 +02:00
}
2019-04-13 19:58:42 +02:00
type Mention struct {
who string
where string
}
var re_mentions = regexp.MustCompile(`@[[:alnum:]._-]+@[[:alnum:].-]*[[:alnum:]]`)
2019-04-19 18:35:31 +02:00
var re_urltions = regexp.MustCompile(`@https://\S+`)
2019-04-13 19:58:42 +02:00
func grapevine(s string) []string {
var mentions []string
2019-04-19 18:35:31 +02:00
m := re_mentions.FindAllString(s, -1)
2019-04-13 19:58:42 +02:00
for i := range m {
where := gofish(m[i])
if where != "" {
mentions = append(mentions, where)
}
}
2019-04-19 18:35:31 +02:00
m = re_urltions.FindAllString(s, -1)
for i := range m {
mentions = append(mentions, m[i][1:])
}
2019-04-13 19:58:42 +02:00
return mentions
}
func bunchofgrapes(s string) []Mention {
m := re_mentions.FindAllString(s, -1)
var mentions []Mention
for i := range m {
where := gofish(m[i])
if where != "" {
mentions = append(mentions, Mention{who: m[i], where: where})
}
}
2019-04-19 18:35:31 +02:00
m = re_urltions.FindAllString(s, -1)
for i := range m {
mentions = append(mentions, Mention{who: m[i][1:], where: m[i][1:]})
}
2019-04-13 19:58:42 +02:00
return mentions
}
type Emu struct {
ID string
Name string
}
2019-07-10 05:04:19 +02:00
var re_emus = regexp.MustCompile(`:[[:alnum:]_-]+:`)
2019-04-13 19:58:42 +02:00
func herdofemus(noise string) []Emu {
m := re_emus.FindAllString(noise, -1)
m = oneofakind(m)
var emus []Emu
for _, e := range m {
fname := e[1 : len(e)-1]
2019-07-08 02:07:16 +02:00
_, err := os.Stat("emus/" + fname + ".png")
if err != nil {
continue
}
2019-04-13 19:58:42 +02:00
url := fmt.Sprintf("https://%s/emu/%s.png", serverName, fname)
emus = append(emus, Emu{ID: url, Name: e})
}
return emus
}
2019-05-31 06:24:18 +02:00
var re_memes = regexp.MustCompile("meme: ?([[:alnum:]_.-]+)")
2019-07-01 01:04:37 +02:00
func memetize(honk *Honk) {
repl := func(x string) string {
2019-05-31 06:24:18 +02:00
name := x[5:]
if name[0] == ' ' {
name = name[1:]
}
fd, err := os.Open("memes/" + name)
if err != nil {
log.Printf("no meme for %s", name)
2019-07-01 01:04:37 +02:00
return x
2019-05-31 06:24:18 +02:00
}
var peek [512]byte
n, _ := fd.Read(peek[:])
ct := http.DetectContentType(peek[:n])
fd.Close()
url := fmt.Sprintf("https://%s/meme/%s", serverName, name)
2019-10-02 23:14:14 +02:00
fileid, err := savefile("", name, name, url, ct, false, nil)
2019-05-31 06:24:18 +02:00
if err != nil {
log.Printf("error saving meme: %s", err)
2019-07-01 01:04:37 +02:00
return x
2019-05-31 06:24:18 +02:00
}
var d Donk
2019-10-02 23:14:14 +02:00
d.FileID = fileid
2019-05-31 06:24:18 +02:00
d.XID = ""
d.Name = name
d.Media = ct
d.URL = url
d.Local = false
2019-07-01 01:04:37 +02:00
honk.Donks = append(honk.Donks, &d)
return ""
2019-05-31 06:24:18 +02:00
}
2019-07-01 01:04:37 +02:00
honk.Noise = re_memes.ReplaceAllStringFunc(honk.Noise, repl)
2019-05-31 06:24:18 +02:00
}
2019-09-10 20:30:52 +02:00
var re_quickmention = regexp.MustCompile("(^| )@[[:alnum:]]+ ")
func quickrename(s string, userid int64) string {
nonstop := true
for nonstop {
nonstop = false
s = re_quickmention.ReplaceAllStringFunc(s, func(m string) string {
log.Printf("m: %s", m)
prefix := ""
if m[0] == ' ' {
prefix = " "
m = m[1:]
}
prefix += "@"
2019-09-10 20:30:52 +02:00
m = m[1:]
m = m[:len(m)-1]
2019-09-10 20:30:52 +02:00
row := stmtOneHonker.QueryRow(m, userid)
var xid string
err := row.Scan(&xid)
if err == nil {
_, name := handles(xid)
if name != "" {
nonstop = true
m = name
}
2019-09-10 20:30:52 +02:00
}
return prefix + m + " "
})
}
return s
2019-09-10 20:30:52 +02:00
}
func mentionize(s string) string {
2019-04-13 19:58:42 +02:00
s = re_mentions.ReplaceAllStringFunc(s, func(m string) string {
2019-04-16 05:48:01 +02:00
where := gofish(m)
if where == "" {
return m
}
2019-04-17 02:33:01 +02:00
who := m[0 : 1+strings.IndexByte(m[1:], '@')]
return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
html.EscapeString(where), html.EscapeString(who))
2019-04-13 19:58:42 +02:00
})
2019-04-19 18:35:31 +02:00
s = re_urltions.ReplaceAllStringFunc(s, func(m string) string {
return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
html.EscapeString(m[1:]), html.EscapeString(m))
})
2019-04-13 19:58:42 +02:00
return s
}
func ontologize(s string) string {
s = re_hashes.ReplaceAllStringFunc(s, func(o string) string {
if o[0] == '&' {
return o
}
p := ""
h := o
if h[0] != '#' {
p = h[:1]
h = h[1:]
}
2019-09-02 00:04:30 +02:00
return fmt.Sprintf(`%s<a class="mention u-url" href="https://%s/o/%s">%s</a>`, p, serverName,
2019-08-28 04:25:02 +02:00
strings.ToLower(h[1:]), h)
})
return s
}
2019-04-13 19:58:42 +02:00
var re_unurl = regexp.MustCompile("https://([^/]+).*/([^/]+)")
2019-09-22 00:20:50 +02:00
var re_urlhost = regexp.MustCompile("https://([^/ ]+)")
2019-04-13 19:58:42 +02:00
func originate(u string) string {
m := re_urlhost.FindStringSubmatch(u)
if len(m) > 1 {
return m[1]
}
return ""
}
2019-08-16 06:11:30 +02:00
var allhandles = make(map[string]string)
var handlelock sync.Mutex
// handle, handle@host
func handles(xid string) (string, string) {
if xid == "" {
return "", ""
}
2019-08-16 06:11:30 +02:00
handlelock.Lock()
handle := allhandles[xid]
handlelock.Unlock()
if handle == "" {
handle = findhandle(xid)
handlelock.Lock()
allhandles[xid] = handle
handlelock.Unlock()
}
if handle == xid {
return xid, xid
}
return handle, handle + "@" + originate(xid)
}
func findhandle(xid string) string {
row := stmtGetXonker.QueryRow(xid, "handle")
var handle string
err := row.Scan(&handle)
if err != nil {
2019-08-30 16:16:59 +02:00
p, _ := investigate(xid)
if p == nil {
m := re_unurl.FindStringSubmatch(xid)
if len(m) > 2 {
handle = m[2]
} else {
handle = xid
}
} else {
handle = p.Handle
}
2019-08-16 06:05:22 +02:00
_, err = stmtSaveXonker.Exec(xid, handle, "handle")
if err != nil {
log.Printf("error saving handle: %s", err)
}
2019-04-13 19:58:42 +02:00
}
2019-08-16 06:11:30 +02:00
return handle
2019-04-13 19:58:42 +02:00
}
var handleprelock sync.Mutex
func prehandle(xid string) {
handleprelock.Lock()
defer handleprelock.Unlock()
handles(xid)
}
2019-04-13 19:58:42 +02:00
func prepend(s string, x []string) []string {
return append([]string{s}, x...)
}
// pleroma leaks followers addressed posts to followers
func butnottooloud(aud []string) {
for i, a := range aud {
if strings.HasSuffix(a, "/followers") {
aud[i] = ""
}
}
}
func keepitquiet(aud []string) bool {
for _, a := range aud {
if a == thewholeworld {
return false
}
}
return true
}
2019-08-24 02:35:58 +02:00
func firstclass(honk *Honk) bool {
return honk.Audience[0] == thewholeworld
}
2019-04-13 19:58:42 +02:00
func oneofakind(a []string) []string {
var x []string
for n, s := range a {
if s != "" {
x = append(x, s)
for i := n + 1; i < len(a); i++ {
if a[i] == s {
a[i] = ""
}
}
}
}
return x
}
var ziggies = make(map[string]*rsa.PrivateKey)
var zaggies = make(map[string]*rsa.PublicKey)
var ziggylock sync.Mutex
func ziggy(username string) (keyname string, key *rsa.PrivateKey) {
ziggylock.Lock()
key = ziggies[username]
ziggylock.Unlock()
if key == nil {
db := opendatabase()
row := db.QueryRow("select seckey from users where username = ?", username)
var data string
row.Scan(&data)
var err error
key, _, err = httpsig.DecodeKey(data)
if err != nil {
log.Printf("error decoding %s seckey: %s", username, err)
return
}
ziggylock.Lock()
ziggies[username] = key
ziggylock.Unlock()
2019-04-13 19:58:42 +02:00
}
2019-07-25 05:24:48 +02:00
keyname = fmt.Sprintf("https://%s/%s/%s#key", serverName, userSep, username)
2019-04-13 19:58:42 +02:00
return
}
func zaggy(keyname string) (key *rsa.PublicKey) {
ziggylock.Lock()
key = zaggies[keyname]
ziggylock.Unlock()
if key != nil {
return
}
2019-06-03 06:15:06 +02:00
row := stmtGetXonker.QueryRow(keyname, "pubkey")
2019-04-13 19:58:42 +02:00
var data string
err := row.Scan(&data)
if err != nil {
log.Printf("hitting the webs for missing pubkey: %s", keyname)
2019-04-13 19:58:42 +02:00
j, err := GetJunk(keyname)
if err != nil {
log.Printf("error getting %s pubkey: %s", keyname, err)
return
}
keyobj, ok := j.GetMap("publicKey")
if ok {
j = keyobj
}
data, ok = j.GetString("publicKeyPem")
2019-04-13 19:58:42 +02:00
if !ok {
2019-04-20 17:41:20 +02:00
log.Printf("error finding %s pubkey", keyname)
2019-04-13 19:58:42 +02:00
return
}
_, ok = j.GetString("owner")
2019-04-13 19:58:42 +02:00
if !ok {
2019-04-20 17:41:20 +02:00
log.Printf("error finding %s pubkey owner", keyname)
return
}
_, key, err = httpsig.DecodeKey(data)
2019-04-20 17:41:20 +02:00
if err != nil {
log.Printf("error decoding %s pubkey: %s", keyname, err)
return
}
2019-06-03 06:15:06 +02:00
_, err = stmtSaveXonker.Exec(keyname, data, "pubkey")
2019-04-29 04:52:40 +02:00
if err != nil {
log.Printf("error saving key: %s", err)
}
2019-04-20 17:41:20 +02:00
} else {
_, key, err = httpsig.DecodeKey(data)
2019-04-20 17:41:20 +02:00
if err != nil {
log.Printf("error decoding %s pubkey: %s", keyname, err)
2019-04-13 19:58:42 +02:00
return
}
}
ziggylock.Lock()
zaggies[keyname] = key
ziggylock.Unlock()
2019-04-13 19:58:42 +02:00
return
}
func makeitworksomehowwithoutregardforkeycontinuity(keyname string, r *http.Request, payload []byte) (string, error) {
2019-06-26 02:49:25 +02:00
_, err := stmtDeleteXonker.Exec(keyname, "pubkey")
2019-04-29 04:52:40 +02:00
if err != nil {
log.Printf("error deleting key: %s", err)
}
ziggylock.Lock()
delete(zaggies, keyname)
ziggylock.Unlock()
return httpsig.VerifyRequest(r, payload, zaggy)
}
func keymatch(keyname string, actor string) string {
hash := strings.IndexByte(keyname, '#')
if hash == -1 {
hash = len(keyname)
}
owner := keyname[0:hash]
if owner == actor {
return originate(actor)
}
return ""
2019-04-13 19:58:42 +02:00
}