2019-04-13 19:58:42 +02:00
|
|
|
//
|
|
|
|
// Copyright (c) 2019 Ted Unangst <tedu@tedunangst.com>
|
|
|
|
//
|
|
|
|
// Permission to use, copy, modify, and distribute this software for any
|
|
|
|
// purpose with or without fee is hereby granted, provided that the above
|
|
|
|
// copyright notice and this permission notice appear in all copies.
|
|
|
|
//
|
|
|
|
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
|
|
|
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
|
|
|
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
|
|
|
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
|
|
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
|
|
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
|
|
|
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"crypto/rand"
|
|
|
|
"crypto/rsa"
|
2019-10-22 19:06:43 +02:00
|
|
|
"crypto/sha512"
|
2019-04-13 19:58:42 +02:00
|
|
|
"fmt"
|
|
|
|
"html/template"
|
2019-10-22 19:06:43 +02:00
|
|
|
"io"
|
2019-04-13 19:58:42 +02:00
|
|
|
"log"
|
2019-04-20 22:12:41 +02:00
|
|
|
"net/http"
|
2019-05-31 06:24:18 +02:00
|
|
|
"os"
|
2019-04-13 19:58:42 +02:00
|
|
|
"regexp"
|
|
|
|
"strings"
|
2019-04-14 16:06:26 +02:00
|
|
|
"sync"
|
2019-05-18 01:37:43 +02:00
|
|
|
|
2019-09-19 06:50:26 +02:00
|
|
|
"golang.org/x/net/html"
|
2019-10-13 01:25:23 +02:00
|
|
|
"humungus.tedunangst.com/r/webs/cache"
|
2019-05-18 01:37:43 +02:00
|
|
|
"humungus.tedunangst.com/r/webs/htfilter"
|
2019-07-29 01:44:27 +02:00
|
|
|
"humungus.tedunangst.com/r/webs/httpsig"
|
2019-10-22 05:12:43 +02:00
|
|
|
"humungus.tedunangst.com/r/webs/templates"
|
2019-04-13 19:58:42 +02:00
|
|
|
)
|
|
|
|
|
2019-10-10 06:40:29 +02:00
|
|
|
var allowedclasses = make(map[string]bool)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
allowedclasses["kw"] = true
|
|
|
|
allowedclasses["bi"] = true
|
|
|
|
allowedclasses["st"] = true
|
|
|
|
allowedclasses["nm"] = true
|
|
|
|
allowedclasses["tp"] = true
|
|
|
|
allowedclasses["op"] = true
|
|
|
|
allowedclasses["cm"] = true
|
|
|
|
allowedclasses["al"] = true
|
|
|
|
allowedclasses["dl"] = true
|
|
|
|
}
|
|
|
|
|
2019-07-10 20:36:14 +02:00
|
|
|
func reverbolate(userid int64, honks []*Honk) {
|
2019-04-13 19:58:42 +02:00
|
|
|
for _, h := range honks {
|
|
|
|
h.What += "ed"
|
2019-07-08 02:24:54 +02:00
|
|
|
if h.What == "tonked" {
|
|
|
|
h.What = "honked back"
|
|
|
|
h.Style = "subtle"
|
|
|
|
}
|
|
|
|
if !h.Public {
|
2019-07-15 03:55:57 +02:00
|
|
|
h.Style += " limited"
|
2019-07-08 02:24:54 +02:00
|
|
|
}
|
2019-10-22 06:19:31 +02:00
|
|
|
translate(h, false)
|
2019-05-28 09:39:34 +02:00
|
|
|
if h.Whofore == 2 || h.Whofore == 3 {
|
2019-05-20 16:48:02 +02:00
|
|
|
h.URL = h.XID
|
2019-07-22 06:35:26 +02:00
|
|
|
if h.What != "bonked" {
|
2019-09-18 20:23:47 +02:00
|
|
|
h.Noise = re_memes.ReplaceAllString(h.Noise, "")
|
2019-07-22 06:35:26 +02:00
|
|
|
h.Noise = mentionize(h.Noise)
|
2019-08-25 05:03:05 +02:00
|
|
|
h.Noise = ontologize(h.Noise)
|
2019-07-22 06:35:26 +02:00
|
|
|
}
|
2019-08-16 05:27:55 +02:00
|
|
|
h.Username, h.Handle = handles(h.Honker)
|
2019-04-13 19:58:42 +02:00
|
|
|
} else {
|
2019-08-16 05:27:55 +02:00
|
|
|
_, h.Handle = handles(h.Honker)
|
2019-10-12 16:34:23 +02:00
|
|
|
short := shortname(userid, h.Honker)
|
|
|
|
if short != "" {
|
|
|
|
h.Username = short
|
|
|
|
} else {
|
|
|
|
h.Username = h.Handle
|
|
|
|
if len(h.Username) > 20 {
|
|
|
|
h.Username = h.Username[:20] + ".."
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
if h.URL == "" {
|
|
|
|
h.URL = h.XID
|
|
|
|
}
|
|
|
|
}
|
2019-07-05 19:07:59 +02:00
|
|
|
if h.Oonker != "" {
|
2019-08-16 05:27:55 +02:00
|
|
|
_, h.Oondle = handles(h.Oonker)
|
2019-07-05 19:07:59 +02:00
|
|
|
}
|
2019-10-07 03:49:04 +02:00
|
|
|
h.Precis = demoji(h.Precis)
|
|
|
|
h.Noise = demoji(h.Noise)
|
2019-07-10 20:36:14 +02:00
|
|
|
h.Open = "open"
|
2019-10-17 00:04:38 +02:00
|
|
|
|
2019-10-22 05:12:43 +02:00
|
|
|
zap := make(map[string]bool)
|
2019-10-17 00:04:38 +02:00
|
|
|
{
|
2019-10-22 08:02:36 +02:00
|
|
|
var htf htfilter.Filter
|
|
|
|
htf.Imager = replaceimgsand(zap, false)
|
|
|
|
htf.SpanClasses = allowedclasses
|
|
|
|
p, _ := htf.String(h.Precis)
|
|
|
|
n, _ := htf.String(h.Noise)
|
2019-10-17 00:04:38 +02:00
|
|
|
h.Precis = string(p)
|
|
|
|
h.Noise = string(n)
|
|
|
|
}
|
|
|
|
|
2019-10-01 04:28:53 +02:00
|
|
|
if userid == -1 {
|
|
|
|
if h.Precis != "" {
|
|
|
|
h.Open = ""
|
|
|
|
}
|
|
|
|
} else {
|
2019-10-07 02:26:21 +02:00
|
|
|
unsee(userid, h)
|
|
|
|
if h.Open == "open" && h.Precis == "unspecified horror" {
|
2019-10-01 04:28:53 +02:00
|
|
|
h.Precis = ""
|
2019-06-04 09:22:03 +02:00
|
|
|
}
|
2019-05-15 22:27:50 +02:00
|
|
|
}
|
2019-10-05 19:53:41 +02:00
|
|
|
if len(h.Noise) > 6000 && h.Open == "open" {
|
2019-10-03 06:22:01 +02:00
|
|
|
if h.Precis == "" {
|
|
|
|
h.Precis = "really freaking long"
|
|
|
|
}
|
|
|
|
h.Open = ""
|
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
emuxifier := func(e string) string {
|
|
|
|
for _, d := range h.Donks {
|
|
|
|
if d.Name == e {
|
2019-10-22 05:12:43 +02:00
|
|
|
zap[d.XID] = true
|
2019-07-09 02:58:47 +02:00
|
|
|
if d.Local {
|
|
|
|
return fmt.Sprintf(`<img class="emu" title="%s" src="/d/%s">`, d.Name, d.XID)
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
2019-10-17 00:04:38 +02:00
|
|
|
h.Precis = re_emus.ReplaceAllStringFunc(h.Precis, emuxifier)
|
|
|
|
h.Noise = re_emus.ReplaceAllStringFunc(h.Noise, emuxifier)
|
|
|
|
|
2019-04-17 04:36:05 +02:00
|
|
|
j := 0
|
2019-04-13 19:58:42 +02:00
|
|
|
for i := 0; i < len(h.Donks); i++ {
|
2019-10-22 05:12:43 +02:00
|
|
|
if !zap[h.Donks[i].XID] {
|
2019-04-17 04:36:05 +02:00
|
|
|
h.Donks[j] = h.Donks[i]
|
|
|
|
j++
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
}
|
2019-04-17 04:36:05 +02:00
|
|
|
h.Donks = h.Donks[:j]
|
2019-10-17 00:04:38 +02:00
|
|
|
|
|
|
|
h.HTPrecis = template.HTML(h.Precis)
|
|
|
|
h.HTML = template.HTML(h.Noise)
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-22 06:19:31 +02:00
|
|
|
func replaceimgsand(zap map[string]bool, absolute bool) func(node *html.Node) string {
|
2019-10-22 05:12:43 +02:00
|
|
|
return func(node *html.Node) string {
|
|
|
|
src := htfilter.GetAttr(node, "src")
|
|
|
|
alt := htfilter.GetAttr(node, "alt")
|
|
|
|
//title := GetAttr(node, "title")
|
|
|
|
if htfilter.HasClass(node, "Emoji") && alt != "" {
|
|
|
|
return alt
|
|
|
|
}
|
|
|
|
d := finddonk(src)
|
|
|
|
if d != nil {
|
|
|
|
zap[d.XID] = true
|
2019-10-22 06:19:31 +02:00
|
|
|
base := ""
|
|
|
|
if absolute {
|
|
|
|
base = "https://" + serverName
|
|
|
|
}
|
|
|
|
return string(templates.Sprintf(`<img alt="%s" title="%s" src="%s/d/%s">`, alt, alt, base, d.XID))
|
2019-10-22 05:12:43 +02:00
|
|
|
}
|
|
|
|
return string(templates.Sprintf(`<img alt="%s" src="<a href="%s">%s<a>">`, alt, src, src))
|
2019-10-03 05:02:01 +02:00
|
|
|
}
|
2019-09-19 06:50:26 +02:00
|
|
|
}
|
|
|
|
|
2019-10-22 05:12:43 +02:00
|
|
|
func inlineimgsfor(honk *Honk) func(node *html.Node) string {
|
|
|
|
return func(node *html.Node) string {
|
|
|
|
src := htfilter.GetAttr(node, "src")
|
|
|
|
alt := htfilter.GetAttr(node, "alt")
|
2019-10-22 06:21:21 +02:00
|
|
|
d := savedonk(src, "image", alt, "image", true)
|
|
|
|
if d != nil {
|
|
|
|
honk.Donks = append(honk.Donks, d)
|
2019-10-03 05:02:01 +02:00
|
|
|
}
|
2019-10-22 05:12:43 +02:00
|
|
|
log.Printf("inline img with src: %s", src)
|
|
|
|
return ""
|
2019-10-03 05:02:01 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-22 06:19:31 +02:00
|
|
|
func imaginate(honk *Honk) {
|
2019-10-22 08:02:36 +02:00
|
|
|
var htf htfilter.Filter
|
|
|
|
htf.Imager = inlineimgsfor(honk)
|
|
|
|
htf.String(honk.Noise)
|
2019-10-22 06:19:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func translate(honk *Honk, redoimages bool) {
|
2019-09-18 19:46:42 +02:00
|
|
|
if honk.Format == "html" {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
noise := honk.Noise
|
|
|
|
if strings.HasPrefix(noise, "DZ:") {
|
|
|
|
idx := strings.Index(noise, "\n")
|
|
|
|
if idx == -1 {
|
|
|
|
honk.Precis = noise
|
|
|
|
noise = ""
|
|
|
|
} else {
|
|
|
|
honk.Precis = noise[:idx]
|
|
|
|
noise = noise[idx+1:]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
honk.Precis = strings.TrimSpace(honk.Precis)
|
|
|
|
|
|
|
|
noise = strings.TrimSpace(noise)
|
|
|
|
noise = quickrename(noise, honk.UserID)
|
2019-10-06 21:01:23 +02:00
|
|
|
noise = markitzero(noise)
|
2019-09-18 19:46:42 +02:00
|
|
|
honk.Noise = noise
|
|
|
|
honk.Onts = oneofakind(ontologies(honk.Noise))
|
2019-10-22 06:19:31 +02:00
|
|
|
|
|
|
|
if redoimages {
|
|
|
|
zap := make(map[string]bool)
|
|
|
|
{
|
2019-10-22 08:02:36 +02:00
|
|
|
var htf htfilter.Filter
|
|
|
|
htf.Imager = replaceimgsand(zap, true)
|
|
|
|
htf.SpanClasses = allowedclasses
|
|
|
|
p, _ := htf.String(honk.Precis)
|
|
|
|
n, _ := htf.String(honk.Noise)
|
2019-10-22 06:19:31 +02:00
|
|
|
honk.Precis = string(p)
|
|
|
|
honk.Noise = string(n)
|
|
|
|
}
|
|
|
|
j := 0
|
|
|
|
for i := 0; i < len(honk.Donks); i++ {
|
|
|
|
if !zap[honk.Donks[i].XID] {
|
|
|
|
honk.Donks[j] = honk.Donks[i]
|
|
|
|
j++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
honk.Donks = honk.Donks[:j]
|
|
|
|
}
|
2019-09-18 19:46:42 +02:00
|
|
|
}
|
|
|
|
|
2019-10-22 19:06:43 +02:00
|
|
|
func xcelerate(b []byte) string {
|
|
|
|
letters := "BCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz1234567891234567891234"
|
|
|
|
for i, c := range b {
|
|
|
|
b[i] = letters[c&63]
|
2019-05-20 16:48:02 +02:00
|
|
|
}
|
2019-10-22 19:06:43 +02:00
|
|
|
s := string(b)
|
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
|
|
|
func shortxid(xid string) string {
|
|
|
|
h := sha512.New512_256()
|
|
|
|
io.WriteString(h, xid)
|
|
|
|
return xcelerate(h.Sum(nil)[:20])
|
2019-05-20 16:48:02 +02:00
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
func xfiltrate() string {
|
2019-06-17 01:18:33 +02:00
|
|
|
var b [18]byte
|
|
|
|
rand.Read(b[:])
|
2019-10-22 19:06:43 +02:00
|
|
|
return xcelerate(b[:])
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
|
2019-09-17 15:58:18 +02:00
|
|
|
var re_hashes = regexp.MustCompile(`(?:^| )#[[:alnum:]][[:alnum:]_-]*`)
|
2019-06-16 00:24:18 +02:00
|
|
|
|
|
|
|
func ontologies(s string) []string {
|
|
|
|
m := re_hashes.FindAllString(s, -1)
|
2019-07-03 22:06:31 +02:00
|
|
|
j := 0
|
|
|
|
for _, h := range m {
|
|
|
|
if h[0] == '&' {
|
|
|
|
continue
|
|
|
|
}
|
2019-06-16 00:30:59 +02:00
|
|
|
if h[0] != '#' {
|
2019-07-03 22:06:31 +02:00
|
|
|
h = h[1:]
|
2019-06-16 00:30:59 +02:00
|
|
|
}
|
2019-07-03 22:06:31 +02:00
|
|
|
m[j] = h
|
|
|
|
j++
|
2019-06-16 00:30:59 +02:00
|
|
|
}
|
2019-07-03 22:06:31 +02:00
|
|
|
return m[:j]
|
2019-06-16 00:24:18 +02:00
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
type Mention struct {
|
|
|
|
who string
|
|
|
|
where string
|
|
|
|
}
|
|
|
|
|
2019-06-25 22:44:48 +02:00
|
|
|
var re_mentions = regexp.MustCompile(`@[[:alnum:]._-]+@[[:alnum:].-]*[[:alnum:]]`)
|
2019-04-19 18:35:31 +02:00
|
|
|
var re_urltions = regexp.MustCompile(`@https://\S+`)
|
2019-04-13 19:58:42 +02:00
|
|
|
|
|
|
|
func grapevine(s string) []string {
|
|
|
|
var mentions []string
|
2019-04-19 18:35:31 +02:00
|
|
|
m := re_mentions.FindAllString(s, -1)
|
2019-04-13 19:58:42 +02:00
|
|
|
for i := range m {
|
|
|
|
where := gofish(m[i])
|
|
|
|
if where != "" {
|
|
|
|
mentions = append(mentions, where)
|
|
|
|
}
|
|
|
|
}
|
2019-04-19 18:35:31 +02:00
|
|
|
m = re_urltions.FindAllString(s, -1)
|
|
|
|
for i := range m {
|
|
|
|
mentions = append(mentions, m[i][1:])
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
return mentions
|
|
|
|
}
|
|
|
|
|
|
|
|
func bunchofgrapes(s string) []Mention {
|
|
|
|
m := re_mentions.FindAllString(s, -1)
|
|
|
|
var mentions []Mention
|
|
|
|
for i := range m {
|
|
|
|
where := gofish(m[i])
|
|
|
|
if where != "" {
|
|
|
|
mentions = append(mentions, Mention{who: m[i], where: where})
|
|
|
|
}
|
|
|
|
}
|
2019-04-19 18:35:31 +02:00
|
|
|
m = re_urltions.FindAllString(s, -1)
|
|
|
|
for i := range m {
|
|
|
|
mentions = append(mentions, Mention{who: m[i][1:], where: m[i][1:]})
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
return mentions
|
|
|
|
}
|
|
|
|
|
|
|
|
type Emu struct {
|
|
|
|
ID string
|
|
|
|
Name string
|
|
|
|
}
|
|
|
|
|
2019-07-10 05:04:19 +02:00
|
|
|
var re_emus = regexp.MustCompile(`:[[:alnum:]_-]+:`)
|
2019-04-13 19:58:42 +02:00
|
|
|
|
|
|
|
func herdofemus(noise string) []Emu {
|
|
|
|
m := re_emus.FindAllString(noise, -1)
|
|
|
|
m = oneofakind(m)
|
|
|
|
var emus []Emu
|
|
|
|
for _, e := range m {
|
|
|
|
fname := e[1 : len(e)-1]
|
2019-07-08 02:07:16 +02:00
|
|
|
_, err := os.Stat("emus/" + fname + ".png")
|
|
|
|
if err != nil {
|
|
|
|
continue
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
url := fmt.Sprintf("https://%s/emu/%s.png", serverName, fname)
|
|
|
|
emus = append(emus, Emu{ID: url, Name: e})
|
|
|
|
}
|
|
|
|
return emus
|
|
|
|
}
|
|
|
|
|
2019-05-31 06:24:18 +02:00
|
|
|
var re_memes = regexp.MustCompile("meme: ?([[:alnum:]_.-]+)")
|
|
|
|
|
2019-07-01 01:04:37 +02:00
|
|
|
func memetize(honk *Honk) {
|
|
|
|
repl := func(x string) string {
|
2019-05-31 06:24:18 +02:00
|
|
|
name := x[5:]
|
|
|
|
if name[0] == ' ' {
|
|
|
|
name = name[1:]
|
|
|
|
}
|
|
|
|
fd, err := os.Open("memes/" + name)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("no meme for %s", name)
|
2019-07-01 01:04:37 +02:00
|
|
|
return x
|
2019-05-31 06:24:18 +02:00
|
|
|
}
|
|
|
|
var peek [512]byte
|
|
|
|
n, _ := fd.Read(peek[:])
|
|
|
|
ct := http.DetectContentType(peek[:n])
|
|
|
|
fd.Close()
|
|
|
|
|
|
|
|
url := fmt.Sprintf("https://%s/meme/%s", serverName, name)
|
2019-10-02 23:14:14 +02:00
|
|
|
fileid, err := savefile("", name, name, url, ct, false, nil)
|
2019-05-31 06:24:18 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("error saving meme: %s", err)
|
2019-07-01 01:04:37 +02:00
|
|
|
return x
|
2019-05-31 06:24:18 +02:00
|
|
|
}
|
2019-10-21 07:29:39 +02:00
|
|
|
d := &Donk{
|
|
|
|
FileID: fileid,
|
|
|
|
XID: "",
|
|
|
|
Name: name,
|
|
|
|
Media: ct,
|
|
|
|
URL: url,
|
|
|
|
Local: false,
|
|
|
|
}
|
|
|
|
honk.Donks = append(honk.Donks, d)
|
2019-07-01 01:04:37 +02:00
|
|
|
return ""
|
2019-05-31 06:24:18 +02:00
|
|
|
}
|
2019-07-01 01:04:37 +02:00
|
|
|
honk.Noise = re_memes.ReplaceAllStringFunc(honk.Noise, repl)
|
2019-05-31 06:24:18 +02:00
|
|
|
}
|
|
|
|
|
2019-10-11 00:51:09 +02:00
|
|
|
var re_quickmention = regexp.MustCompile("(^| )@[[:alnum:]]+( |$)")
|
2019-09-10 20:30:52 +02:00
|
|
|
|
|
|
|
func quickrename(s string, userid int64) string {
|
2019-10-11 00:48:03 +02:00
|
|
|
nonstop := true
|
|
|
|
for nonstop {
|
|
|
|
nonstop = false
|
|
|
|
s = re_quickmention.ReplaceAllStringFunc(s, func(m string) string {
|
|
|
|
prefix := ""
|
|
|
|
if m[0] == ' ' {
|
|
|
|
prefix = " "
|
|
|
|
m = m[1:]
|
|
|
|
}
|
|
|
|
prefix += "@"
|
2019-09-10 20:30:52 +02:00
|
|
|
m = m[1:]
|
2019-10-11 00:51:09 +02:00
|
|
|
if m[len(m)-1] == ' ' {
|
|
|
|
m = m[:len(m)-1]
|
|
|
|
}
|
2019-09-10 20:30:52 +02:00
|
|
|
|
2019-10-16 05:27:30 +02:00
|
|
|
xid := fullname(m, userid)
|
|
|
|
|
|
|
|
if xid != "" {
|
2019-10-11 00:48:03 +02:00
|
|
|
_, name := handles(xid)
|
|
|
|
if name != "" {
|
|
|
|
nonstop = true
|
|
|
|
m = name
|
|
|
|
}
|
2019-09-10 20:30:52 +02:00
|
|
|
}
|
2019-10-11 00:48:03 +02:00
|
|
|
return prefix + m + " "
|
|
|
|
})
|
|
|
|
}
|
|
|
|
return s
|
2019-09-10 20:30:52 +02:00
|
|
|
}
|
|
|
|
|
2019-10-13 01:25:23 +02:00
|
|
|
var shortnames = cache.New(cache.Options{Filler: func(userid int64) (map[string]string, bool) {
|
2019-10-12 16:34:23 +02:00
|
|
|
honkers := gethonkers(userid)
|
|
|
|
m := make(map[string]string)
|
|
|
|
for _, h := range honkers {
|
|
|
|
m[h.XID] = h.Name
|
|
|
|
}
|
|
|
|
return m, true
|
2019-10-16 05:27:30 +02:00
|
|
|
}, Invalidator: &honkerinvalidator})
|
2019-10-12 16:34:23 +02:00
|
|
|
|
|
|
|
func shortname(userid int64, xid string) string {
|
|
|
|
var m map[string]string
|
|
|
|
ok := shortnames.Get(userid, &m)
|
|
|
|
if ok {
|
|
|
|
return m[xid]
|
|
|
|
}
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
2019-10-16 05:27:30 +02:00
|
|
|
var fullnames = cache.New(cache.Options{Filler: func(userid int64) (map[string]string, bool) {
|
|
|
|
honkers := gethonkers(userid)
|
|
|
|
m := make(map[string]string)
|
|
|
|
for _, h := range honkers {
|
|
|
|
m[h.Name] = h.XID
|
|
|
|
}
|
|
|
|
return m, true
|
|
|
|
}, Invalidator: &honkerinvalidator})
|
|
|
|
|
|
|
|
func fullname(name string, userid int64) string {
|
|
|
|
var m map[string]string
|
|
|
|
ok := fullnames.Get(userid, &m)
|
|
|
|
if ok {
|
|
|
|
return m[name]
|
|
|
|
}
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
2019-04-20 05:23:22 +02:00
|
|
|
func mentionize(s string) string {
|
2019-04-13 19:58:42 +02:00
|
|
|
s = re_mentions.ReplaceAllStringFunc(s, func(m string) string {
|
2019-04-16 05:48:01 +02:00
|
|
|
where := gofish(m)
|
|
|
|
if where == "" {
|
|
|
|
return m
|
|
|
|
}
|
2019-04-17 02:33:01 +02:00
|
|
|
who := m[0 : 1+strings.IndexByte(m[1:], '@')]
|
2019-04-16 22:10:51 +02:00
|
|
|
return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
|
|
|
|
html.EscapeString(where), html.EscapeString(who))
|
2019-04-13 19:58:42 +02:00
|
|
|
})
|
2019-04-19 18:35:31 +02:00
|
|
|
s = re_urltions.ReplaceAllStringFunc(s, func(m string) string {
|
|
|
|
return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
|
|
|
|
html.EscapeString(m[1:]), html.EscapeString(m))
|
|
|
|
})
|
2019-04-13 19:58:42 +02:00
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
2019-08-25 05:03:05 +02:00
|
|
|
func ontologize(s string) string {
|
|
|
|
s = re_hashes.ReplaceAllStringFunc(s, func(o string) string {
|
|
|
|
if o[0] == '&' {
|
|
|
|
return o
|
|
|
|
}
|
|
|
|
p := ""
|
|
|
|
h := o
|
|
|
|
if h[0] != '#' {
|
|
|
|
p = h[:1]
|
|
|
|
h = h[1:]
|
|
|
|
}
|
2019-09-02 00:04:30 +02:00
|
|
|
return fmt.Sprintf(`%s<a class="mention u-url" href="https://%s/o/%s">%s</a>`, p, serverName,
|
2019-08-28 04:25:02 +02:00
|
|
|
strings.ToLower(h[1:]), h)
|
2019-08-25 05:03:05 +02:00
|
|
|
})
|
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
var re_unurl = regexp.MustCompile("https://([^/]+).*/([^/]+)")
|
2019-09-22 00:20:50 +02:00
|
|
|
var re_urlhost = regexp.MustCompile("https://([^/ ]+)")
|
2019-04-13 19:58:42 +02:00
|
|
|
|
2019-05-21 19:56:15 +02:00
|
|
|
func originate(u string) string {
|
2019-06-14 22:32:10 +02:00
|
|
|
m := re_urlhost.FindStringSubmatch(u)
|
|
|
|
if len(m) > 1 {
|
2019-05-21 19:56:15 +02:00
|
|
|
return m[1]
|
|
|
|
}
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
2019-08-16 06:11:30 +02:00
|
|
|
var allhandles = make(map[string]string)
|
|
|
|
var handlelock sync.Mutex
|
|
|
|
|
2019-08-16 05:27:55 +02:00
|
|
|
// handle, handle@host
|
|
|
|
func handles(xid string) (string, string) {
|
2019-08-19 00:32:44 +02:00
|
|
|
if xid == "" {
|
|
|
|
return "", ""
|
|
|
|
}
|
2019-08-16 06:11:30 +02:00
|
|
|
handlelock.Lock()
|
|
|
|
handle := allhandles[xid]
|
|
|
|
handlelock.Unlock()
|
|
|
|
if handle == "" {
|
|
|
|
handle = findhandle(xid)
|
|
|
|
handlelock.Lock()
|
|
|
|
allhandles[xid] = handle
|
|
|
|
handlelock.Unlock()
|
|
|
|
}
|
|
|
|
if handle == xid {
|
|
|
|
return xid, xid
|
|
|
|
}
|
|
|
|
return handle, handle + "@" + originate(xid)
|
|
|
|
}
|
|
|
|
|
|
|
|
func findhandle(xid string) string {
|
2019-08-16 05:27:55 +02:00
|
|
|
row := stmtGetXonker.QueryRow(xid, "handle")
|
|
|
|
var handle string
|
|
|
|
err := row.Scan(&handle)
|
2019-08-16 05:38:01 +02:00
|
|
|
if err != nil {
|
2019-10-27 00:31:42 +02:00
|
|
|
info, _ := investigate(xid)
|
|
|
|
if info == nil {
|
2019-08-16 05:38:01 +02:00
|
|
|
m := re_unurl.FindStringSubmatch(xid)
|
|
|
|
if len(m) > 2 {
|
|
|
|
handle = m[2]
|
|
|
|
} else {
|
|
|
|
handle = xid
|
|
|
|
}
|
|
|
|
} else {
|
2019-10-27 00:31:42 +02:00
|
|
|
handle = info.Name
|
2019-08-16 05:38:01 +02:00
|
|
|
}
|
2019-08-16 06:05:22 +02:00
|
|
|
_, err = stmtSaveXonker.Exec(xid, handle, "handle")
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("error saving handle: %s", err)
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
2019-08-16 06:11:30 +02:00
|
|
|
return handle
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
|
2019-08-19 00:32:44 +02:00
|
|
|
var handleprelock sync.Mutex
|
|
|
|
|
|
|
|
func prehandle(xid string) {
|
|
|
|
handleprelock.Lock()
|
|
|
|
defer handleprelock.Unlock()
|
|
|
|
handles(xid)
|
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
func prepend(s string, x []string) []string {
|
|
|
|
return append([]string{s}, x...)
|
|
|
|
}
|
|
|
|
|
2019-05-03 20:09:08 +02:00
|
|
|
// pleroma leaks followers addressed posts to followers
|
|
|
|
func butnottooloud(aud []string) {
|
|
|
|
for i, a := range aud {
|
|
|
|
if strings.HasSuffix(a, "/followers") {
|
|
|
|
aud[i] = ""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-28 08:36:47 +02:00
|
|
|
func keepitquiet(aud []string) bool {
|
|
|
|
for _, a := range aud {
|
|
|
|
if a == thewholeworld {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2019-08-24 02:35:58 +02:00
|
|
|
func firstclass(honk *Honk) bool {
|
|
|
|
return honk.Audience[0] == thewholeworld
|
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
func oneofakind(a []string) []string {
|
2019-10-19 21:37:33 +02:00
|
|
|
seen := make(map[string]bool)
|
|
|
|
seen[""] = true
|
|
|
|
j := 0
|
|
|
|
for _, s := range a {
|
|
|
|
if !seen[s] {
|
|
|
|
seen[s] = true
|
|
|
|
a[j] = s
|
|
|
|
j++
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
}
|
2019-10-19 21:37:33 +02:00
|
|
|
return a[:j]
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
|
2019-10-26 00:46:27 +02:00
|
|
|
var ziggies = cache.New(cache.Options{Filler: func(userid int64) (*KeyInfo, bool) {
|
|
|
|
var user *WhatAbout
|
|
|
|
ok := somenumberedusers.Get(userid, &user)
|
|
|
|
if !ok {
|
|
|
|
return nil, false
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
2019-10-26 00:46:27 +02:00
|
|
|
ki := new(KeyInfo)
|
|
|
|
ki.keyname = user.URL + "#key"
|
|
|
|
ki.seckey = user.SecKey
|
|
|
|
return ki, true
|
|
|
|
}})
|
|
|
|
|
|
|
|
func ziggy(userid int64) *KeyInfo {
|
|
|
|
var ki *KeyInfo
|
|
|
|
ziggies.Get(userid, &ki)
|
|
|
|
return ki
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
|
2019-10-26 00:46:27 +02:00
|
|
|
var zaggies = make(map[string]*rsa.PublicKey)
|
|
|
|
var zaggylock sync.Mutex
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
func zaggy(keyname string) (key *rsa.PublicKey) {
|
2019-10-26 00:46:27 +02:00
|
|
|
zaggylock.Lock()
|
2019-04-14 19:20:56 +02:00
|
|
|
key = zaggies[keyname]
|
2019-10-26 00:46:27 +02:00
|
|
|
zaggylock.Unlock()
|
2019-04-14 19:20:56 +02:00
|
|
|
if key != nil {
|
|
|
|
return
|
|
|
|
}
|
2019-06-03 06:15:06 +02:00
|
|
|
row := stmtGetXonker.QueryRow(keyname, "pubkey")
|
2019-04-13 19:58:42 +02:00
|
|
|
var data string
|
|
|
|
err := row.Scan(&data)
|
|
|
|
if err != nil {
|
2019-04-14 19:20:56 +02:00
|
|
|
log.Printf("hitting the webs for missing pubkey: %s", keyname)
|
2019-04-13 19:58:42 +02:00
|
|
|
j, err := GetJunk(keyname)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("error getting %s pubkey: %s", keyname, err)
|
|
|
|
return
|
|
|
|
}
|
2019-07-17 22:33:38 +02:00
|
|
|
keyobj, ok := j.GetMap("publicKey")
|
|
|
|
if ok {
|
|
|
|
j = keyobj
|
|
|
|
}
|
|
|
|
data, ok = j.GetString("publicKeyPem")
|
2019-04-13 19:58:42 +02:00
|
|
|
if !ok {
|
2019-04-20 17:41:20 +02:00
|
|
|
log.Printf("error finding %s pubkey", keyname)
|
2019-04-13 19:58:42 +02:00
|
|
|
return
|
|
|
|
}
|
2019-07-17 22:33:38 +02:00
|
|
|
_, ok = j.GetString("owner")
|
2019-04-13 19:58:42 +02:00
|
|
|
if !ok {
|
2019-04-20 17:41:20 +02:00
|
|
|
log.Printf("error finding %s pubkey owner", keyname)
|
|
|
|
return
|
|
|
|
}
|
2019-07-29 01:44:27 +02:00
|
|
|
_, key, err = httpsig.DecodeKey(data)
|
2019-04-20 17:41:20 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("error decoding %s pubkey: %s", keyname, err)
|
|
|
|
return
|
|
|
|
}
|
2019-06-03 06:15:06 +02:00
|
|
|
_, err = stmtSaveXonker.Exec(keyname, data, "pubkey")
|
2019-04-29 04:52:40 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("error saving key: %s", err)
|
|
|
|
}
|
2019-04-20 17:41:20 +02:00
|
|
|
} else {
|
2019-07-29 01:44:27 +02:00
|
|
|
_, key, err = httpsig.DecodeKey(data)
|
2019-04-20 17:41:20 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("error decoding %s pubkey: %s", keyname, err)
|
2019-04-13 19:58:42 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
2019-10-26 00:46:27 +02:00
|
|
|
zaggylock.Lock()
|
2019-04-14 19:20:56 +02:00
|
|
|
zaggies[keyname] = key
|
2019-10-26 00:46:27 +02:00
|
|
|
zaggylock.Unlock()
|
2019-04-13 19:58:42 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-04-20 22:12:41 +02:00
|
|
|
func makeitworksomehowwithoutregardforkeycontinuity(keyname string, r *http.Request, payload []byte) (string, error) {
|
2019-06-26 02:49:25 +02:00
|
|
|
_, err := stmtDeleteXonker.Exec(keyname, "pubkey")
|
2019-04-29 04:52:40 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("error deleting key: %s", err)
|
|
|
|
}
|
2019-10-26 00:46:27 +02:00
|
|
|
zaggylock.Lock()
|
2019-04-20 22:12:41 +02:00
|
|
|
delete(zaggies, keyname)
|
2019-10-26 00:46:27 +02:00
|
|
|
zaggylock.Unlock()
|
2019-07-29 01:44:27 +02:00
|
|
|
return httpsig.VerifyRequest(r, payload, zaggy)
|
2019-04-20 22:12:41 +02:00
|
|
|
}
|
|
|
|
|
2019-05-21 19:56:15 +02:00
|
|
|
func keymatch(keyname string, actor string) string {
|
2019-04-23 17:02:43 +02:00
|
|
|
hash := strings.IndexByte(keyname, '#')
|
|
|
|
if hash == -1 {
|
|
|
|
hash = len(keyname)
|
|
|
|
}
|
|
|
|
owner := keyname[0:hash]
|
|
|
|
if owner == actor {
|
2019-05-21 19:56:15 +02:00
|
|
|
return originate(actor)
|
2019-04-23 17:02:43 +02:00
|
|
|
}
|
2019-05-21 19:56:15 +02:00
|
|
|
return ""
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|