2019-04-13 19:58:42 +02:00
|
|
|
//
|
|
|
|
// Copyright (c) 2019 Ted Unangst <tedu@tedunangst.com>
|
|
|
|
//
|
|
|
|
// Permission to use, copy, modify, and distribute this software for any
|
|
|
|
// purpose with or without fee is hereby granted, provided that the above
|
|
|
|
// copyright notice and this permission notice appear in all copies.
|
|
|
|
//
|
|
|
|
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
|
|
|
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
|
|
|
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
|
|
|
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
|
|
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
|
|
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
|
|
|
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"crypto/rand"
|
|
|
|
"crypto/rsa"
|
2019-10-22 19:06:43 +02:00
|
|
|
"crypto/sha512"
|
2019-04-13 19:58:42 +02:00
|
|
|
"fmt"
|
|
|
|
"html/template"
|
2019-10-22 19:06:43 +02:00
|
|
|
"io"
|
2019-04-13 19:58:42 +02:00
|
|
|
"log"
|
2019-04-20 22:12:41 +02:00
|
|
|
"net/http"
|
2019-11-11 05:49:09 +01:00
|
|
|
"net/url"
|
2019-05-31 06:24:18 +02:00
|
|
|
"os"
|
2019-04-13 19:58:42 +02:00
|
|
|
"regexp"
|
|
|
|
"strings"
|
2019-05-18 01:37:43 +02:00
|
|
|
|
2019-09-19 06:50:26 +02:00
|
|
|
"golang.org/x/net/html"
|
2019-10-13 01:25:23 +02:00
|
|
|
"humungus.tedunangst.com/r/webs/cache"
|
2019-05-18 01:37:43 +02:00
|
|
|
"humungus.tedunangst.com/r/webs/htfilter"
|
2019-07-29 01:44:27 +02:00
|
|
|
"humungus.tedunangst.com/r/webs/httpsig"
|
2019-10-22 05:12:43 +02:00
|
|
|
"humungus.tedunangst.com/r/webs/templates"
|
2019-04-13 19:58:42 +02:00
|
|
|
)
|
|
|
|
|
2019-10-10 06:40:29 +02:00
|
|
|
var allowedclasses = make(map[string]bool)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
allowedclasses["kw"] = true
|
|
|
|
allowedclasses["bi"] = true
|
|
|
|
allowedclasses["st"] = true
|
|
|
|
allowedclasses["nm"] = true
|
|
|
|
allowedclasses["tp"] = true
|
|
|
|
allowedclasses["op"] = true
|
|
|
|
allowedclasses["cm"] = true
|
|
|
|
allowedclasses["al"] = true
|
|
|
|
allowedclasses["dl"] = true
|
|
|
|
}
|
|
|
|
|
2019-07-10 20:36:14 +02:00
|
|
|
func reverbolate(userid int64, honks []*Honk) {
|
2019-04-13 19:58:42 +02:00
|
|
|
for _, h := range honks {
|
|
|
|
h.What += "ed"
|
2019-07-08 02:24:54 +02:00
|
|
|
if h.What == "tonked" {
|
|
|
|
h.What = "honked back"
|
2019-10-29 21:00:41 +01:00
|
|
|
h.Style += " subtle"
|
2019-07-08 02:24:54 +02:00
|
|
|
}
|
|
|
|
if !h.Public {
|
2019-07-15 03:55:57 +02:00
|
|
|
h.Style += " limited"
|
2019-07-08 02:24:54 +02:00
|
|
|
}
|
2019-10-22 06:19:31 +02:00
|
|
|
translate(h, false)
|
2019-05-28 09:39:34 +02:00
|
|
|
if h.Whofore == 2 || h.Whofore == 3 {
|
2019-05-20 16:48:02 +02:00
|
|
|
h.URL = h.XID
|
2019-07-22 06:35:26 +02:00
|
|
|
if h.What != "bonked" {
|
2019-09-18 20:23:47 +02:00
|
|
|
h.Noise = re_memes.ReplaceAllString(h.Noise, "")
|
2019-07-22 06:35:26 +02:00
|
|
|
h.Noise = mentionize(h.Noise)
|
2019-08-25 05:03:05 +02:00
|
|
|
h.Noise = ontologize(h.Noise)
|
2019-07-22 06:35:26 +02:00
|
|
|
}
|
2019-08-16 05:27:55 +02:00
|
|
|
h.Username, h.Handle = handles(h.Honker)
|
2019-04-13 19:58:42 +02:00
|
|
|
} else {
|
2019-08-16 05:27:55 +02:00
|
|
|
_, h.Handle = handles(h.Honker)
|
2019-10-12 16:34:23 +02:00
|
|
|
short := shortname(userid, h.Honker)
|
|
|
|
if short != "" {
|
|
|
|
h.Username = short
|
|
|
|
} else {
|
|
|
|
h.Username = h.Handle
|
|
|
|
if len(h.Username) > 20 {
|
|
|
|
h.Username = h.Username[:20] + ".."
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
if h.URL == "" {
|
|
|
|
h.URL = h.XID
|
|
|
|
}
|
|
|
|
}
|
2019-07-05 19:07:59 +02:00
|
|
|
if h.Oonker != "" {
|
2019-08-16 05:27:55 +02:00
|
|
|
_, h.Oondle = handles(h.Oonker)
|
2019-07-05 19:07:59 +02:00
|
|
|
}
|
2019-10-07 03:49:04 +02:00
|
|
|
h.Precis = demoji(h.Precis)
|
|
|
|
h.Noise = demoji(h.Noise)
|
2019-07-10 20:36:14 +02:00
|
|
|
h.Open = "open"
|
2019-10-17 00:04:38 +02:00
|
|
|
|
2019-10-22 05:12:43 +02:00
|
|
|
zap := make(map[string]bool)
|
2019-10-17 00:04:38 +02:00
|
|
|
{
|
2019-10-22 08:02:36 +02:00
|
|
|
var htf htfilter.Filter
|
|
|
|
htf.Imager = replaceimgsand(zap, false)
|
|
|
|
htf.SpanClasses = allowedclasses
|
2019-11-11 05:49:09 +01:00
|
|
|
htf.BaseURL, _ = url.Parse(h.XID)
|
2019-10-22 08:02:36 +02:00
|
|
|
p, _ := htf.String(h.Precis)
|
|
|
|
n, _ := htf.String(h.Noise)
|
2019-10-17 00:04:38 +02:00
|
|
|
h.Precis = string(p)
|
|
|
|
h.Noise = string(n)
|
|
|
|
}
|
|
|
|
|
2019-10-01 04:28:53 +02:00
|
|
|
if userid == -1 {
|
|
|
|
if h.Precis != "" {
|
|
|
|
h.Open = ""
|
|
|
|
}
|
|
|
|
} else {
|
2019-10-07 02:26:21 +02:00
|
|
|
unsee(userid, h)
|
|
|
|
if h.Open == "open" && h.Precis == "unspecified horror" {
|
2019-10-01 04:28:53 +02:00
|
|
|
h.Precis = ""
|
2019-06-04 09:22:03 +02:00
|
|
|
}
|
2019-05-15 22:27:50 +02:00
|
|
|
}
|
2019-10-05 19:53:41 +02:00
|
|
|
if len(h.Noise) > 6000 && h.Open == "open" {
|
2019-10-03 06:22:01 +02:00
|
|
|
if h.Precis == "" {
|
|
|
|
h.Precis = "really freaking long"
|
|
|
|
}
|
|
|
|
h.Open = ""
|
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
emuxifier := func(e string) string {
|
|
|
|
for _, d := range h.Donks {
|
|
|
|
if d.Name == e {
|
2019-10-22 05:12:43 +02:00
|
|
|
zap[d.XID] = true
|
2019-07-09 02:58:47 +02:00
|
|
|
if d.Local {
|
|
|
|
return fmt.Sprintf(`<img class="emu" title="%s" src="/d/%s">`, d.Name, d.XID)
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
2019-10-17 00:04:38 +02:00
|
|
|
h.Precis = re_emus.ReplaceAllStringFunc(h.Precis, emuxifier)
|
|
|
|
h.Noise = re_emus.ReplaceAllStringFunc(h.Noise, emuxifier)
|
|
|
|
|
2019-04-17 04:36:05 +02:00
|
|
|
j := 0
|
2019-04-13 19:58:42 +02:00
|
|
|
for i := 0; i < len(h.Donks); i++ {
|
2019-10-22 05:12:43 +02:00
|
|
|
if !zap[h.Donks[i].XID] {
|
2019-04-17 04:36:05 +02:00
|
|
|
h.Donks[j] = h.Donks[i]
|
|
|
|
j++
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
}
|
2019-04-17 04:36:05 +02:00
|
|
|
h.Donks = h.Donks[:j]
|
2019-10-17 00:04:38 +02:00
|
|
|
|
|
|
|
h.HTPrecis = template.HTML(h.Precis)
|
|
|
|
h.HTML = template.HTML(h.Noise)
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-22 06:19:31 +02:00
|
|
|
func replaceimgsand(zap map[string]bool, absolute bool) func(node *html.Node) string {
|
2019-10-22 05:12:43 +02:00
|
|
|
return func(node *html.Node) string {
|
|
|
|
src := htfilter.GetAttr(node, "src")
|
|
|
|
alt := htfilter.GetAttr(node, "alt")
|
|
|
|
//title := GetAttr(node, "title")
|
|
|
|
if htfilter.HasClass(node, "Emoji") && alt != "" {
|
|
|
|
return alt
|
|
|
|
}
|
|
|
|
d := finddonk(src)
|
|
|
|
if d != nil {
|
|
|
|
zap[d.XID] = true
|
2019-10-22 06:19:31 +02:00
|
|
|
base := ""
|
|
|
|
if absolute {
|
|
|
|
base = "https://" + serverName
|
|
|
|
}
|
|
|
|
return string(templates.Sprintf(`<img alt="%s" title="%s" src="%s/d/%s">`, alt, alt, base, d.XID))
|
2019-10-22 05:12:43 +02:00
|
|
|
}
|
|
|
|
return string(templates.Sprintf(`<img alt="%s" src="<a href="%s">%s<a>">`, alt, src, src))
|
2019-10-03 05:02:01 +02:00
|
|
|
}
|
2019-09-19 06:50:26 +02:00
|
|
|
}
|
|
|
|
|
2019-10-22 05:12:43 +02:00
|
|
|
func inlineimgsfor(honk *Honk) func(node *html.Node) string {
|
|
|
|
return func(node *html.Node) string {
|
|
|
|
src := htfilter.GetAttr(node, "src")
|
|
|
|
alt := htfilter.GetAttr(node, "alt")
|
2019-10-22 06:21:21 +02:00
|
|
|
d := savedonk(src, "image", alt, "image", true)
|
|
|
|
if d != nil {
|
|
|
|
honk.Donks = append(honk.Donks, d)
|
2019-10-03 05:02:01 +02:00
|
|
|
}
|
2019-10-22 05:12:43 +02:00
|
|
|
log.Printf("inline img with src: %s", src)
|
|
|
|
return ""
|
2019-10-03 05:02:01 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-22 06:19:31 +02:00
|
|
|
func imaginate(honk *Honk) {
|
2019-10-22 08:02:36 +02:00
|
|
|
var htf htfilter.Filter
|
|
|
|
htf.Imager = inlineimgsfor(honk)
|
2019-11-11 05:49:09 +01:00
|
|
|
htf.BaseURL, _ = url.Parse(honk.XID)
|
2019-10-22 08:02:36 +02:00
|
|
|
htf.String(honk.Noise)
|
2019-10-22 06:19:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func translate(honk *Honk, redoimages bool) {
|
2019-09-18 19:46:42 +02:00
|
|
|
if honk.Format == "html" {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
noise := honk.Noise
|
|
|
|
if strings.HasPrefix(noise, "DZ:") {
|
|
|
|
idx := strings.Index(noise, "\n")
|
|
|
|
if idx == -1 {
|
|
|
|
honk.Precis = noise
|
|
|
|
noise = ""
|
|
|
|
} else {
|
|
|
|
honk.Precis = noise[:idx]
|
|
|
|
noise = noise[idx+1:]
|
|
|
|
}
|
|
|
|
}
|
2019-10-30 22:14:27 +01:00
|
|
|
honk.Precis = markitzero(strings.TrimSpace(honk.Precis))
|
2019-09-18 19:46:42 +02:00
|
|
|
|
|
|
|
noise = strings.TrimSpace(noise)
|
2019-10-06 21:01:23 +02:00
|
|
|
noise = markitzero(noise)
|
2019-09-18 19:46:42 +02:00
|
|
|
honk.Noise = noise
|
|
|
|
honk.Onts = oneofakind(ontologies(honk.Noise))
|
2019-10-22 06:19:31 +02:00
|
|
|
|
|
|
|
if redoimages {
|
|
|
|
zap := make(map[string]bool)
|
|
|
|
{
|
2019-10-22 08:02:36 +02:00
|
|
|
var htf htfilter.Filter
|
|
|
|
htf.Imager = replaceimgsand(zap, true)
|
|
|
|
htf.SpanClasses = allowedclasses
|
|
|
|
p, _ := htf.String(honk.Precis)
|
|
|
|
n, _ := htf.String(honk.Noise)
|
2019-10-22 06:19:31 +02:00
|
|
|
honk.Precis = string(p)
|
|
|
|
honk.Noise = string(n)
|
|
|
|
}
|
|
|
|
j := 0
|
|
|
|
for i := 0; i < len(honk.Donks); i++ {
|
|
|
|
if !zap[honk.Donks[i].XID] {
|
|
|
|
honk.Donks[j] = honk.Donks[i]
|
|
|
|
j++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
honk.Donks = honk.Donks[:j]
|
2019-10-29 21:24:32 +01:00
|
|
|
|
|
|
|
honk.Noise = re_memes.ReplaceAllString(honk.Noise, "")
|
|
|
|
honk.Noise = ontologize(mentionize(honk.Noise))
|
|
|
|
honk.Noise = strings.Replace(honk.Noise, "<a href=", "<a class=\"mention u-url\" href=", -1)
|
2019-10-22 06:19:31 +02:00
|
|
|
}
|
2019-09-18 19:46:42 +02:00
|
|
|
}
|
|
|
|
|
2019-10-22 19:06:43 +02:00
|
|
|
func xcelerate(b []byte) string {
|
|
|
|
letters := "BCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz1234567891234567891234"
|
|
|
|
for i, c := range b {
|
|
|
|
b[i] = letters[c&63]
|
2019-05-20 16:48:02 +02:00
|
|
|
}
|
2019-10-22 19:06:43 +02:00
|
|
|
s := string(b)
|
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
|
|
|
func shortxid(xid string) string {
|
|
|
|
h := sha512.New512_256()
|
|
|
|
io.WriteString(h, xid)
|
|
|
|
return xcelerate(h.Sum(nil)[:20])
|
2019-05-20 16:48:02 +02:00
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
func xfiltrate() string {
|
2019-06-17 01:18:33 +02:00
|
|
|
var b [18]byte
|
|
|
|
rand.Read(b[:])
|
2019-10-22 19:06:43 +02:00
|
|
|
return xcelerate(b[:])
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
|
2019-11-13 01:03:48 +01:00
|
|
|
var re_hashes = regexp.MustCompile(`(?:^| |>)#[[:alnum:]]*[[:alpha:]][[:alnum:]_-]*`)
|
2019-06-16 00:24:18 +02:00
|
|
|
|
|
|
|
func ontologies(s string) []string {
|
|
|
|
m := re_hashes.FindAllString(s, -1)
|
2019-07-03 22:06:31 +02:00
|
|
|
j := 0
|
|
|
|
for _, h := range m {
|
|
|
|
if h[0] == '&' {
|
|
|
|
continue
|
|
|
|
}
|
2019-06-16 00:30:59 +02:00
|
|
|
if h[0] != '#' {
|
2019-07-03 22:06:31 +02:00
|
|
|
h = h[1:]
|
2019-06-16 00:30:59 +02:00
|
|
|
}
|
2019-07-03 22:06:31 +02:00
|
|
|
m[j] = h
|
|
|
|
j++
|
2019-06-16 00:30:59 +02:00
|
|
|
}
|
2019-07-03 22:06:31 +02:00
|
|
|
return m[:j]
|
2019-06-16 00:24:18 +02:00
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
type Mention struct {
|
|
|
|
who string
|
|
|
|
where string
|
|
|
|
}
|
|
|
|
|
2019-06-25 22:44:48 +02:00
|
|
|
var re_mentions = regexp.MustCompile(`@[[:alnum:]._-]+@[[:alnum:].-]*[[:alnum:]]`)
|
2019-04-19 18:35:31 +02:00
|
|
|
var re_urltions = regexp.MustCompile(`@https://\S+`)
|
2019-04-13 19:58:42 +02:00
|
|
|
|
|
|
|
func grapevine(s string) []string {
|
|
|
|
var mentions []string
|
2019-04-19 18:35:31 +02:00
|
|
|
m := re_mentions.FindAllString(s, -1)
|
2019-04-13 19:58:42 +02:00
|
|
|
for i := range m {
|
|
|
|
where := gofish(m[i])
|
|
|
|
if where != "" {
|
|
|
|
mentions = append(mentions, where)
|
|
|
|
}
|
|
|
|
}
|
2019-04-19 18:35:31 +02:00
|
|
|
m = re_urltions.FindAllString(s, -1)
|
|
|
|
for i := range m {
|
|
|
|
mentions = append(mentions, m[i][1:])
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
return mentions
|
|
|
|
}
|
|
|
|
|
|
|
|
func bunchofgrapes(s string) []Mention {
|
|
|
|
m := re_mentions.FindAllString(s, -1)
|
|
|
|
var mentions []Mention
|
|
|
|
for i := range m {
|
|
|
|
where := gofish(m[i])
|
|
|
|
if where != "" {
|
|
|
|
mentions = append(mentions, Mention{who: m[i], where: where})
|
|
|
|
}
|
|
|
|
}
|
2019-04-19 18:35:31 +02:00
|
|
|
m = re_urltions.FindAllString(s, -1)
|
|
|
|
for i := range m {
|
|
|
|
mentions = append(mentions, Mention{who: m[i][1:], where: m[i][1:]})
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
return mentions
|
|
|
|
}
|
|
|
|
|
|
|
|
type Emu struct {
|
|
|
|
ID string
|
|
|
|
Name string
|
|
|
|
}
|
|
|
|
|
2019-07-10 05:04:19 +02:00
|
|
|
var re_emus = regexp.MustCompile(`:[[:alnum:]_-]+:`)
|
2019-04-13 19:58:42 +02:00
|
|
|
|
|
|
|
func herdofemus(noise string) []Emu {
|
|
|
|
m := re_emus.FindAllString(noise, -1)
|
|
|
|
m = oneofakind(m)
|
|
|
|
var emus []Emu
|
|
|
|
for _, e := range m {
|
|
|
|
fname := e[1 : len(e)-1]
|
2019-07-08 02:07:16 +02:00
|
|
|
_, err := os.Stat("emus/" + fname + ".png")
|
|
|
|
if err != nil {
|
|
|
|
continue
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
url := fmt.Sprintf("https://%s/emu/%s.png", serverName, fname)
|
|
|
|
emus = append(emus, Emu{ID: url, Name: e})
|
|
|
|
}
|
|
|
|
return emus
|
|
|
|
}
|
|
|
|
|
2019-05-31 06:24:18 +02:00
|
|
|
var re_memes = regexp.MustCompile("meme: ?([[:alnum:]_.-]+)")
|
|
|
|
|
2019-07-01 01:04:37 +02:00
|
|
|
func memetize(honk *Honk) {
|
|
|
|
repl := func(x string) string {
|
2019-05-31 06:24:18 +02:00
|
|
|
name := x[5:]
|
|
|
|
if name[0] == ' ' {
|
|
|
|
name = name[1:]
|
|
|
|
}
|
|
|
|
fd, err := os.Open("memes/" + name)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("no meme for %s", name)
|
2019-07-01 01:04:37 +02:00
|
|
|
return x
|
2019-05-31 06:24:18 +02:00
|
|
|
}
|
|
|
|
var peek [512]byte
|
|
|
|
n, _ := fd.Read(peek[:])
|
|
|
|
ct := http.DetectContentType(peek[:n])
|
|
|
|
fd.Close()
|
|
|
|
|
|
|
|
url := fmt.Sprintf("https://%s/meme/%s", serverName, name)
|
2019-10-02 23:14:14 +02:00
|
|
|
fileid, err := savefile("", name, name, url, ct, false, nil)
|
2019-05-31 06:24:18 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("error saving meme: %s", err)
|
2019-07-01 01:04:37 +02:00
|
|
|
return x
|
2019-05-31 06:24:18 +02:00
|
|
|
}
|
2019-10-21 07:29:39 +02:00
|
|
|
d := &Donk{
|
|
|
|
FileID: fileid,
|
|
|
|
XID: "",
|
|
|
|
Name: name,
|
|
|
|
Media: ct,
|
|
|
|
URL: url,
|
|
|
|
Local: false,
|
|
|
|
}
|
|
|
|
honk.Donks = append(honk.Donks, d)
|
2019-07-01 01:04:37 +02:00
|
|
|
return ""
|
2019-05-31 06:24:18 +02:00
|
|
|
}
|
2019-07-01 01:04:37 +02:00
|
|
|
honk.Noise = re_memes.ReplaceAllStringFunc(honk.Noise, repl)
|
2019-05-31 06:24:18 +02:00
|
|
|
}
|
|
|
|
|
2019-11-09 22:33:34 +01:00
|
|
|
var re_quickmention = regexp.MustCompile("(^|[ \n])@[[:alnum:]]+([ \n]|$)")
|
2019-09-10 20:30:52 +02:00
|
|
|
|
|
|
|
func quickrename(s string, userid int64) string {
|
2019-10-11 00:48:03 +02:00
|
|
|
nonstop := true
|
|
|
|
for nonstop {
|
|
|
|
nonstop = false
|
|
|
|
s = re_quickmention.ReplaceAllStringFunc(s, func(m string) string {
|
|
|
|
prefix := ""
|
2019-11-09 22:33:34 +01:00
|
|
|
if m[0] == ' ' || m[0] == '\n' {
|
|
|
|
prefix = m[:1]
|
2019-10-11 00:48:03 +02:00
|
|
|
m = m[1:]
|
|
|
|
}
|
|
|
|
prefix += "@"
|
2019-09-10 20:30:52 +02:00
|
|
|
m = m[1:]
|
2019-11-09 22:33:34 +01:00
|
|
|
tail := ""
|
|
|
|
if m[len(m)-1] == ' ' || m[len(m)-1] == '\n' {
|
|
|
|
tail = m[len(m)-1:]
|
2019-10-11 00:51:09 +02:00
|
|
|
m = m[:len(m)-1]
|
|
|
|
}
|
2019-09-10 20:30:52 +02:00
|
|
|
|
2019-10-16 05:27:30 +02:00
|
|
|
xid := fullname(m, userid)
|
|
|
|
|
|
|
|
if xid != "" {
|
2019-10-11 00:48:03 +02:00
|
|
|
_, name := handles(xid)
|
|
|
|
if name != "" {
|
|
|
|
nonstop = true
|
|
|
|
m = name
|
|
|
|
}
|
2019-09-10 20:30:52 +02:00
|
|
|
}
|
2019-11-09 22:33:34 +01:00
|
|
|
return prefix + m + tail
|
2019-10-11 00:48:03 +02:00
|
|
|
})
|
|
|
|
}
|
|
|
|
return s
|
2019-09-10 20:30:52 +02:00
|
|
|
}
|
|
|
|
|
2019-10-13 01:25:23 +02:00
|
|
|
var shortnames = cache.New(cache.Options{Filler: func(userid int64) (map[string]string, bool) {
|
2019-10-12 16:34:23 +02:00
|
|
|
honkers := gethonkers(userid)
|
|
|
|
m := make(map[string]string)
|
|
|
|
for _, h := range honkers {
|
|
|
|
m[h.XID] = h.Name
|
|
|
|
}
|
|
|
|
return m, true
|
2019-10-16 05:27:30 +02:00
|
|
|
}, Invalidator: &honkerinvalidator})
|
2019-10-12 16:34:23 +02:00
|
|
|
|
|
|
|
func shortname(userid int64, xid string) string {
|
|
|
|
var m map[string]string
|
|
|
|
ok := shortnames.Get(userid, &m)
|
|
|
|
if ok {
|
|
|
|
return m[xid]
|
|
|
|
}
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
2019-10-16 05:27:30 +02:00
|
|
|
var fullnames = cache.New(cache.Options{Filler: func(userid int64) (map[string]string, bool) {
|
|
|
|
honkers := gethonkers(userid)
|
|
|
|
m := make(map[string]string)
|
|
|
|
for _, h := range honkers {
|
|
|
|
m[h.Name] = h.XID
|
|
|
|
}
|
|
|
|
return m, true
|
|
|
|
}, Invalidator: &honkerinvalidator})
|
|
|
|
|
|
|
|
func fullname(name string, userid int64) string {
|
|
|
|
var m map[string]string
|
|
|
|
ok := fullnames.Get(userid, &m)
|
|
|
|
if ok {
|
|
|
|
return m[name]
|
|
|
|
}
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
2019-04-20 05:23:22 +02:00
|
|
|
func mentionize(s string) string {
|
2019-04-13 19:58:42 +02:00
|
|
|
s = re_mentions.ReplaceAllStringFunc(s, func(m string) string {
|
2019-04-16 05:48:01 +02:00
|
|
|
where := gofish(m)
|
|
|
|
if where == "" {
|
|
|
|
return m
|
|
|
|
}
|
2019-04-17 02:33:01 +02:00
|
|
|
who := m[0 : 1+strings.IndexByte(m[1:], '@')]
|
2019-04-16 22:10:51 +02:00
|
|
|
return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
|
|
|
|
html.EscapeString(where), html.EscapeString(who))
|
2019-04-13 19:58:42 +02:00
|
|
|
})
|
2019-04-19 18:35:31 +02:00
|
|
|
s = re_urltions.ReplaceAllStringFunc(s, func(m string) string {
|
|
|
|
return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
|
|
|
|
html.EscapeString(m[1:]), html.EscapeString(m))
|
|
|
|
})
|
2019-04-13 19:58:42 +02:00
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
2019-08-25 05:03:05 +02:00
|
|
|
func ontologize(s string) string {
|
|
|
|
s = re_hashes.ReplaceAllStringFunc(s, func(o string) string {
|
|
|
|
if o[0] == '&' {
|
|
|
|
return o
|
|
|
|
}
|
|
|
|
p := ""
|
|
|
|
h := o
|
|
|
|
if h[0] != '#' {
|
|
|
|
p = h[:1]
|
|
|
|
h = h[1:]
|
|
|
|
}
|
2019-10-28 21:05:18 +01:00
|
|
|
return fmt.Sprintf(`%s<a href="https://%s/o/%s">%s</a>`, p, serverName,
|
2019-08-28 04:25:02 +02:00
|
|
|
strings.ToLower(h[1:]), h)
|
2019-08-25 05:03:05 +02:00
|
|
|
})
|
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
var re_unurl = regexp.MustCompile("https://([^/]+).*/([^/]+)")
|
2019-09-22 00:20:50 +02:00
|
|
|
var re_urlhost = regexp.MustCompile("https://([^/ ]+)")
|
2019-04-13 19:58:42 +02:00
|
|
|
|
2019-05-21 19:56:15 +02:00
|
|
|
func originate(u string) string {
|
2019-06-14 22:32:10 +02:00
|
|
|
m := re_urlhost.FindStringSubmatch(u)
|
|
|
|
if len(m) > 1 {
|
2019-05-21 19:56:15 +02:00
|
|
|
return m[1]
|
|
|
|
}
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
2019-10-29 19:18:13 +01:00
|
|
|
var allhandles = cache.New(cache.Options{Filler: func(xid string) (string, bool) {
|
2019-08-16 05:27:55 +02:00
|
|
|
row := stmtGetXonker.QueryRow(xid, "handle")
|
|
|
|
var handle string
|
|
|
|
err := row.Scan(&handle)
|
2019-08-16 05:38:01 +02:00
|
|
|
if err != nil {
|
2019-11-11 18:56:37 +01:00
|
|
|
log.Printf("need to get a handle: %s", xid)
|
2019-10-29 19:18:13 +01:00
|
|
|
info, err := investigate(xid)
|
|
|
|
if err != nil {
|
2019-08-16 05:38:01 +02:00
|
|
|
m := re_unurl.FindStringSubmatch(xid)
|
|
|
|
if len(m) > 2 {
|
|
|
|
handle = m[2]
|
|
|
|
} else {
|
|
|
|
handle = xid
|
|
|
|
}
|
|
|
|
} else {
|
2019-10-27 00:31:42 +02:00
|
|
|
handle = info.Name
|
2019-08-16 06:05:22 +02:00
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
2019-10-29 19:18:13 +01:00
|
|
|
return handle, true
|
|
|
|
}})
|
2019-08-19 00:32:44 +02:00
|
|
|
|
2019-10-29 19:18:13 +01:00
|
|
|
// handle, handle@host
|
|
|
|
func handles(xid string) (string, string) {
|
|
|
|
if xid == "" {
|
|
|
|
return "", ""
|
|
|
|
}
|
|
|
|
var handle string
|
|
|
|
allhandles.Get(xid, &handle)
|
|
|
|
if handle == xid {
|
|
|
|
return xid, xid
|
|
|
|
}
|
|
|
|
return handle, handle + "@" + originate(xid)
|
2019-08-19 00:32:44 +02:00
|
|
|
}
|
|
|
|
|
2019-05-03 20:09:08 +02:00
|
|
|
func butnottooloud(aud []string) {
|
|
|
|
for i, a := range aud {
|
|
|
|
if strings.HasSuffix(a, "/followers") {
|
|
|
|
aud[i] = ""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-30 08:39:56 +01:00
|
|
|
func loudandproud(aud []string) bool {
|
2019-05-28 08:36:47 +02:00
|
|
|
for _, a := range aud {
|
|
|
|
if a == thewholeworld {
|
2019-10-30 08:39:56 +01:00
|
|
|
return true
|
2019-05-28 08:36:47 +02:00
|
|
|
}
|
|
|
|
}
|
2019-10-30 08:39:56 +01:00
|
|
|
return false
|
2019-05-28 08:36:47 +02:00
|
|
|
}
|
|
|
|
|
2019-08-24 02:35:58 +02:00
|
|
|
func firstclass(honk *Honk) bool {
|
|
|
|
return honk.Audience[0] == thewholeworld
|
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
func oneofakind(a []string) []string {
|
2019-10-19 21:37:33 +02:00
|
|
|
seen := make(map[string]bool)
|
|
|
|
seen[""] = true
|
|
|
|
j := 0
|
|
|
|
for _, s := range a {
|
|
|
|
if !seen[s] {
|
|
|
|
seen[s] = true
|
|
|
|
a[j] = s
|
|
|
|
j++
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
}
|
2019-10-19 21:37:33 +02:00
|
|
|
return a[:j]
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
|
2019-10-26 00:46:27 +02:00
|
|
|
var ziggies = cache.New(cache.Options{Filler: func(userid int64) (*KeyInfo, bool) {
|
|
|
|
var user *WhatAbout
|
|
|
|
ok := somenumberedusers.Get(userid, &user)
|
|
|
|
if !ok {
|
|
|
|
return nil, false
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
2019-10-26 00:46:27 +02:00
|
|
|
ki := new(KeyInfo)
|
|
|
|
ki.keyname = user.URL + "#key"
|
|
|
|
ki.seckey = user.SecKey
|
|
|
|
return ki, true
|
|
|
|
}})
|
|
|
|
|
|
|
|
func ziggy(userid int64) *KeyInfo {
|
|
|
|
var ki *KeyInfo
|
|
|
|
ziggies.Get(userid, &ki)
|
|
|
|
return ki
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
|
2019-10-30 08:31:16 +01:00
|
|
|
var zaggies = cache.New(cache.Options{Filler: func(keyname string) (*rsa.PublicKey, bool) {
|
2019-06-03 06:15:06 +02:00
|
|
|
row := stmtGetXonker.QueryRow(keyname, "pubkey")
|
2019-04-13 19:58:42 +02:00
|
|
|
var data string
|
|
|
|
err := row.Scan(&data)
|
2019-11-11 18:03:00 +01:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("hitting the webs for missing pubkey: %s", keyname)
|
|
|
|
j, err := GetJunk(keyname)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("error getting %s pubkey: %s", keyname, err)
|
|
|
|
return nil, true
|
|
|
|
}
|
|
|
|
allinjest(originate(keyname), j)
|
|
|
|
row = stmtGetXonker.QueryRow(keyname, "pubkey")
|
|
|
|
err = row.Scan(&data)
|
|
|
|
}
|
2019-10-30 08:31:16 +01:00
|
|
|
if err == nil {
|
|
|
|
_, key, err := httpsig.DecodeKey(data)
|
2019-04-20 17:41:20 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("error decoding %s pubkey: %s", keyname, err)
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
2019-10-30 08:31:16 +01:00
|
|
|
return key, true
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
2019-11-11 18:03:00 +01:00
|
|
|
return nil, true
|
2019-11-12 00:30:40 +01:00
|
|
|
}, Limit: 512})
|
2019-10-30 08:31:16 +01:00
|
|
|
|
|
|
|
func zaggy(keyname string) *rsa.PublicKey {
|
|
|
|
var key *rsa.PublicKey
|
|
|
|
zaggies.Get(keyname, &key)
|
|
|
|
return key
|
2019-04-20 22:12:41 +02:00
|
|
|
}
|
|
|
|
|
2019-05-21 19:56:15 +02:00
|
|
|
func keymatch(keyname string, actor string) string {
|
2019-04-23 17:02:43 +02:00
|
|
|
hash := strings.IndexByte(keyname, '#')
|
|
|
|
if hash == -1 {
|
|
|
|
hash = len(keyname)
|
|
|
|
}
|
|
|
|
owner := keyname[0:hash]
|
|
|
|
if owner == actor {
|
2019-05-21 19:56:15 +02:00
|
|
|
return originate(actor)
|
2019-04-23 17:02:43 +02:00
|
|
|
}
|
2019-05-21 19:56:15 +02:00
|
|
|
return ""
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|