2019-04-13 19:58:42 +02:00
|
|
|
//
|
|
|
|
// Copyright (c) 2019 Ted Unangst <tedu@tedunangst.com>
|
|
|
|
//
|
|
|
|
// Permission to use, copy, modify, and distribute this software for any
|
|
|
|
// purpose with or without fee is hereby granted, provided that the above
|
|
|
|
// copyright notice and this permission notice appear in all copies.
|
|
|
|
//
|
|
|
|
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
|
|
|
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
|
|
|
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
|
|
|
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
|
|
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
|
|
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
|
|
|
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"crypto/rand"
|
|
|
|
"crypto/rsa"
|
|
|
|
"fmt"
|
|
|
|
"html"
|
|
|
|
"html/template"
|
|
|
|
"log"
|
2019-04-20 22:12:41 +02:00
|
|
|
"net/http"
|
2019-05-31 06:24:18 +02:00
|
|
|
"os"
|
2019-04-13 19:58:42 +02:00
|
|
|
"regexp"
|
|
|
|
"strings"
|
2019-04-14 16:06:26 +02:00
|
|
|
"sync"
|
2019-05-18 01:37:43 +02:00
|
|
|
|
|
|
|
"humungus.tedunangst.com/r/webs/htfilter"
|
2019-04-13 19:58:42 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
func reverbolate(honks []*Honk) {
|
2019-05-18 01:37:43 +02:00
|
|
|
filt := htfilter.New()
|
2019-04-13 19:58:42 +02:00
|
|
|
for _, h := range honks {
|
|
|
|
h.What += "ed"
|
2019-05-28 09:39:34 +02:00
|
|
|
if h.Whofore == 2 || h.Whofore == 3 {
|
2019-05-20 16:48:02 +02:00
|
|
|
h.URL = h.XID
|
2019-04-20 05:23:22 +02:00
|
|
|
h.Noise = mentionize(h.Noise)
|
2019-06-03 06:22:00 +02:00
|
|
|
h.Username, h.Handle = honkerhandle(h.Honker)
|
2019-04-13 19:58:42 +02:00
|
|
|
} else {
|
2019-06-24 19:05:18 +02:00
|
|
|
_, h.Handle = honkerhandle(h.Honker)
|
|
|
|
h.Username = h.Handle
|
2019-06-24 20:16:12 +02:00
|
|
|
if len(h.Username) > 20 {
|
|
|
|
h.Username = h.Username[:20] + ".."
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
if h.URL == "" {
|
|
|
|
h.URL = h.XID
|
|
|
|
}
|
|
|
|
}
|
|
|
|
zap := make(map[*Donk]bool)
|
2019-04-24 07:16:34 +02:00
|
|
|
h.Noise = unpucker(h.Noise)
|
2019-05-15 22:27:50 +02:00
|
|
|
precis := h.Precis
|
2019-06-24 04:21:59 +02:00
|
|
|
if strings.HasPrefix(h.Noise, "<p>"+precis) {
|
2019-06-24 01:07:28 +02:00
|
|
|
precis = ""
|
|
|
|
}
|
2019-05-15 22:27:50 +02:00
|
|
|
if precis != "" {
|
2019-06-04 09:22:03 +02:00
|
|
|
if strings.IndexByte(precis, ':') == -1 {
|
|
|
|
precis = "summary: " + precis
|
|
|
|
}
|
|
|
|
precis = "<p>" + precis + "<p>"
|
2019-05-15 22:27:50 +02:00
|
|
|
}
|
2019-05-18 01:37:43 +02:00
|
|
|
h.HTML, _ = filt.String(precis + h.Noise)
|
2019-04-13 19:58:42 +02:00
|
|
|
emuxifier := func(e string) string {
|
|
|
|
for _, d := range h.Donks {
|
|
|
|
if d.Name == e {
|
|
|
|
zap[d] = true
|
|
|
|
return fmt.Sprintf(`<img class="emu" title="%s" src="/d/%s">`, d.Name, d.XID)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
h.HTML = template.HTML(re_emus.ReplaceAllStringFunc(string(h.HTML), emuxifier))
|
2019-04-17 04:36:05 +02:00
|
|
|
j := 0
|
2019-04-13 19:58:42 +02:00
|
|
|
for i := 0; i < len(h.Donks); i++ {
|
2019-04-17 04:36:05 +02:00
|
|
|
if !zap[h.Donks[i]] {
|
|
|
|
h.Donks[j] = h.Donks[i]
|
|
|
|
j++
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
}
|
2019-04-17 04:36:05 +02:00
|
|
|
h.Donks = h.Donks[:j]
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-31 00:29:59 +02:00
|
|
|
func osmosis(honks []*Honk, userid int64) []*Honk {
|
2019-06-24 04:21:59 +02:00
|
|
|
zords := getzords(userid)
|
2019-06-01 22:11:04 +02:00
|
|
|
j := 0
|
|
|
|
outer:
|
|
|
|
for _, h := range honks {
|
2019-06-24 04:21:59 +02:00
|
|
|
for _, z := range zords {
|
2019-06-01 22:08:19 +02:00
|
|
|
if z.MatchString(h.Precis) || z.MatchString(h.Noise) {
|
2019-06-01 22:11:04 +02:00
|
|
|
continue outer
|
2019-05-31 00:29:59 +02:00
|
|
|
}
|
|
|
|
}
|
2019-06-01 22:11:04 +02:00
|
|
|
honks[j] = h
|
|
|
|
j++
|
2019-05-31 00:29:59 +02:00
|
|
|
}
|
2019-06-01 22:11:04 +02:00
|
|
|
return honks[0:j]
|
2019-05-31 00:29:59 +02:00
|
|
|
}
|
|
|
|
|
2019-05-20 16:48:02 +02:00
|
|
|
func shortxid(xid string) string {
|
|
|
|
idx := strings.LastIndexByte(xid, '/')
|
|
|
|
if idx == -1 {
|
|
|
|
return xid
|
|
|
|
}
|
|
|
|
return xid[idx+1:]
|
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
func xfiltrate() string {
|
|
|
|
letters := "BCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz1234567891234567891234"
|
2019-06-17 01:18:33 +02:00
|
|
|
var b [18]byte
|
|
|
|
rand.Read(b[:])
|
|
|
|
for i, c := range b {
|
|
|
|
b[i] = letters[c&63]
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
2019-06-17 01:18:33 +02:00
|
|
|
s := string(b[:])
|
|
|
|
return s
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
|
2019-06-16 00:24:18 +02:00
|
|
|
var re_hashes = regexp.MustCompile(`(?:^|\W)#[[:alnum:]]+`)
|
|
|
|
|
|
|
|
func ontologies(s string) []string {
|
|
|
|
m := re_hashes.FindAllString(s, -1)
|
2019-06-16 00:30:59 +02:00
|
|
|
for i, h := range m {
|
|
|
|
if h[0] != '#' {
|
|
|
|
m[i] = h[1:]
|
|
|
|
}
|
|
|
|
}
|
2019-06-16 00:24:18 +02:00
|
|
|
return m
|
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
type Mention struct {
|
|
|
|
who string
|
|
|
|
where string
|
|
|
|
}
|
|
|
|
|
2019-06-25 22:44:48 +02:00
|
|
|
var re_mentions = regexp.MustCompile(`@[[:alnum:]._-]+@[[:alnum:].-]*[[:alnum:]]`)
|
2019-04-19 18:35:31 +02:00
|
|
|
var re_urltions = regexp.MustCompile(`@https://\S+`)
|
2019-04-13 19:58:42 +02:00
|
|
|
|
|
|
|
func grapevine(s string) []string {
|
|
|
|
var mentions []string
|
2019-04-19 18:35:31 +02:00
|
|
|
m := re_mentions.FindAllString(s, -1)
|
2019-04-13 19:58:42 +02:00
|
|
|
for i := range m {
|
|
|
|
where := gofish(m[i])
|
|
|
|
if where != "" {
|
|
|
|
mentions = append(mentions, where)
|
|
|
|
}
|
|
|
|
}
|
2019-04-19 18:35:31 +02:00
|
|
|
m = re_urltions.FindAllString(s, -1)
|
|
|
|
for i := range m {
|
|
|
|
mentions = append(mentions, m[i][1:])
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
return mentions
|
|
|
|
}
|
|
|
|
|
|
|
|
func bunchofgrapes(s string) []Mention {
|
|
|
|
m := re_mentions.FindAllString(s, -1)
|
|
|
|
var mentions []Mention
|
|
|
|
for i := range m {
|
|
|
|
where := gofish(m[i])
|
|
|
|
if where != "" {
|
|
|
|
mentions = append(mentions, Mention{who: m[i], where: where})
|
|
|
|
}
|
|
|
|
}
|
2019-04-19 18:35:31 +02:00
|
|
|
m = re_urltions.FindAllString(s, -1)
|
|
|
|
for i := range m {
|
|
|
|
mentions = append(mentions, Mention{who: m[i][1:], where: m[i][1:]})
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
return mentions
|
|
|
|
}
|
|
|
|
|
|
|
|
type Emu struct {
|
|
|
|
ID string
|
|
|
|
Name string
|
|
|
|
}
|
|
|
|
|
2019-04-19 18:35:31 +02:00
|
|
|
var re_link = regexp.MustCompile(`@?https?://[^\s"]+[\w/)]`)
|
2019-04-13 19:58:42 +02:00
|
|
|
var re_emus = regexp.MustCompile(`:[[:alnum:]_]+:`)
|
|
|
|
|
|
|
|
func herdofemus(noise string) []Emu {
|
|
|
|
m := re_emus.FindAllString(noise, -1)
|
|
|
|
m = oneofakind(m)
|
|
|
|
var emus []Emu
|
|
|
|
for _, e := range m {
|
|
|
|
fname := e[1 : len(e)-1]
|
|
|
|
url := fmt.Sprintf("https://%s/emu/%s.png", serverName, fname)
|
|
|
|
emus = append(emus, Emu{ID: url, Name: e})
|
|
|
|
}
|
|
|
|
return emus
|
|
|
|
}
|
|
|
|
|
2019-05-31 06:24:18 +02:00
|
|
|
var re_memes = regexp.MustCompile("meme: ?([[:alnum:]_.-]+)")
|
|
|
|
|
2019-07-01 01:04:37 +02:00
|
|
|
func memetize(honk *Honk) {
|
|
|
|
repl := func(x string) string {
|
2019-05-31 06:24:18 +02:00
|
|
|
name := x[5:]
|
|
|
|
if name[0] == ' ' {
|
|
|
|
name = name[1:]
|
|
|
|
}
|
|
|
|
fd, err := os.Open("memes/" + name)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("no meme for %s", name)
|
2019-07-01 01:04:37 +02:00
|
|
|
return x
|
2019-05-31 06:24:18 +02:00
|
|
|
}
|
|
|
|
var peek [512]byte
|
|
|
|
n, _ := fd.Read(peek[:])
|
|
|
|
ct := http.DetectContentType(peek[:n])
|
|
|
|
fd.Close()
|
|
|
|
|
|
|
|
url := fmt.Sprintf("https://%s/meme/%s", serverName, name)
|
|
|
|
res, err := stmtSaveFile.Exec("", name, url, ct, 0, "")
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("error saving meme: %s", err)
|
2019-07-01 01:04:37 +02:00
|
|
|
return x
|
2019-05-31 06:24:18 +02:00
|
|
|
}
|
|
|
|
var d Donk
|
|
|
|
d.FileID, _ = res.LastInsertId()
|
|
|
|
d.XID = ""
|
|
|
|
d.Name = name
|
|
|
|
d.Media = ct
|
|
|
|
d.URL = url
|
|
|
|
d.Local = false
|
2019-07-01 01:04:37 +02:00
|
|
|
honk.Donks = append(honk.Donks, &d)
|
|
|
|
log.Printf("replace with -")
|
|
|
|
return ""
|
2019-05-31 06:24:18 +02:00
|
|
|
}
|
2019-07-01 01:04:37 +02:00
|
|
|
honk.Noise = re_memes.ReplaceAllStringFunc(honk.Noise, repl)
|
2019-05-31 06:24:18 +02:00
|
|
|
}
|
|
|
|
|
2019-05-15 20:39:33 +02:00
|
|
|
var re_bolder = regexp.MustCompile(`(^|\W)\*\*([\w\s,.!?'-]+)\*\*($|\W)`)
|
|
|
|
var re_italicer = regexp.MustCompile(`(^|\W)\*([\w\s,.!?'-]+)\*($|\W)`)
|
2019-05-22 00:14:48 +02:00
|
|
|
var re_bigcoder = regexp.MustCompile("```\n?((?s:.*?))\n?```\n?")
|
2019-05-12 03:29:32 +02:00
|
|
|
var re_coder = regexp.MustCompile("`([^`]*)`")
|
2019-05-22 00:14:48 +02:00
|
|
|
var re_quoter = regexp.MustCompile(`(?m:^> (.*)\n?)`)
|
2019-04-24 07:16:34 +02:00
|
|
|
|
2019-04-23 22:24:48 +02:00
|
|
|
func markitzero(s string) string {
|
2019-05-12 03:29:32 +02:00
|
|
|
var bigcodes []string
|
|
|
|
bigsaver := func(code string) string {
|
|
|
|
bigcodes = append(bigcodes, code)
|
|
|
|
return "``````"
|
|
|
|
}
|
|
|
|
s = re_bigcoder.ReplaceAllStringFunc(s, bigsaver)
|
|
|
|
var lilcodes []string
|
|
|
|
lilsaver := func(code string) string {
|
|
|
|
lilcodes = append(lilcodes, code)
|
|
|
|
return "`x`"
|
|
|
|
}
|
|
|
|
s = re_coder.ReplaceAllStringFunc(s, lilsaver)
|
2019-04-23 22:24:48 +02:00
|
|
|
s = re_bolder.ReplaceAllString(s, "$1<b>$2</b>$3")
|
|
|
|
s = re_italicer.ReplaceAllString(s, "$1<i>$2</i>$3")
|
2019-05-22 00:14:48 +02:00
|
|
|
s = re_quoter.ReplaceAllString(s, "<blockquote>$1</blockquote><p>")
|
2019-05-12 03:29:32 +02:00
|
|
|
lilun := func(s string) string {
|
|
|
|
code := lilcodes[0]
|
|
|
|
lilcodes = lilcodes[1:]
|
|
|
|
return code
|
|
|
|
}
|
|
|
|
s = re_coder.ReplaceAllStringFunc(s, lilun)
|
|
|
|
bigun := func(s string) string {
|
|
|
|
code := bigcodes[0]
|
|
|
|
bigcodes = bigcodes[1:]
|
|
|
|
return code
|
|
|
|
}
|
|
|
|
s = re_bigcoder.ReplaceAllStringFunc(s, bigun)
|
2019-05-22 00:14:48 +02:00
|
|
|
s = re_bigcoder.ReplaceAllString(s, "<pre><code>$1</code></pre><p>")
|
2019-05-08 19:08:29 +02:00
|
|
|
s = re_coder.ReplaceAllString(s, "<code>$1</code>")
|
2019-04-23 22:24:48 +02:00
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
func obfusbreak(s string) string {
|
|
|
|
s = strings.TrimSpace(s)
|
|
|
|
s = strings.Replace(s, "\r", "", -1)
|
|
|
|
s = html.EscapeString(s)
|
2019-04-23 22:24:48 +02:00
|
|
|
// dammit go
|
|
|
|
s = strings.Replace(s, "'", "'", -1)
|
2019-04-13 19:58:42 +02:00
|
|
|
linkfn := func(url string) string {
|
2019-04-19 18:35:31 +02:00
|
|
|
if url[0] == '@' {
|
|
|
|
return url
|
|
|
|
}
|
2019-04-13 19:58:42 +02:00
|
|
|
addparen := false
|
|
|
|
adddot := false
|
|
|
|
if strings.HasSuffix(url, ")") && strings.IndexByte(url, '(') == -1 {
|
|
|
|
url = url[:len(url)-1]
|
|
|
|
addparen = true
|
|
|
|
}
|
|
|
|
if strings.HasSuffix(url, ".") {
|
|
|
|
url = url[:len(url)-1]
|
|
|
|
adddot = true
|
|
|
|
}
|
2019-06-12 07:35:27 +02:00
|
|
|
url = fmt.Sprintf(`<a class="mention" href="%s">%s</a>`, url, url)
|
2019-04-13 19:58:42 +02:00
|
|
|
if adddot {
|
|
|
|
url += "."
|
|
|
|
}
|
|
|
|
if addparen {
|
|
|
|
url += ")"
|
|
|
|
}
|
|
|
|
return url
|
|
|
|
}
|
|
|
|
s = re_link.ReplaceAllStringFunc(s, linkfn)
|
|
|
|
|
2019-04-23 22:24:48 +02:00
|
|
|
s = markitzero(s)
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
s = strings.Replace(s, "\n", "<br>", -1)
|
2019-04-20 05:23:22 +02:00
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
|
|
|
func mentionize(s string) string {
|
2019-04-13 19:58:42 +02:00
|
|
|
s = re_mentions.ReplaceAllStringFunc(s, func(m string) string {
|
2019-04-16 05:48:01 +02:00
|
|
|
where := gofish(m)
|
|
|
|
if where == "" {
|
|
|
|
return m
|
|
|
|
}
|
2019-04-17 02:33:01 +02:00
|
|
|
who := m[0 : 1+strings.IndexByte(m[1:], '@')]
|
2019-04-16 22:10:51 +02:00
|
|
|
return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
|
|
|
|
html.EscapeString(where), html.EscapeString(who))
|
2019-04-13 19:58:42 +02:00
|
|
|
})
|
2019-04-19 18:35:31 +02:00
|
|
|
s = re_urltions.ReplaceAllStringFunc(s, func(m string) string {
|
|
|
|
return fmt.Sprintf(`<span class="h-card"><a class="u-url mention" href="%s">%s</a></span>`,
|
|
|
|
html.EscapeString(m[1:]), html.EscapeString(m))
|
|
|
|
})
|
2019-04-13 19:58:42 +02:00
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
|
|
|
var re_unurl = regexp.MustCompile("https://([^/]+).*/([^/]+)")
|
2019-06-14 22:32:10 +02:00
|
|
|
var re_urlhost = regexp.MustCompile("https://([^/]+)")
|
2019-04-13 19:58:42 +02:00
|
|
|
|
2019-05-21 19:56:15 +02:00
|
|
|
func originate(u string) string {
|
2019-06-14 22:32:10 +02:00
|
|
|
m := re_urlhost.FindStringSubmatch(u)
|
|
|
|
if len(m) > 1 {
|
2019-05-21 19:56:15 +02:00
|
|
|
return m[1]
|
|
|
|
}
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
2019-06-03 06:22:00 +02:00
|
|
|
func honkerhandle(h string) (string, string) {
|
2019-04-13 19:58:42 +02:00
|
|
|
m := re_unurl.FindStringSubmatch(h)
|
|
|
|
if len(m) > 2 {
|
2019-06-03 06:22:00 +02:00
|
|
|
return m[2], fmt.Sprintf("%s@%s", m[2], m[1])
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
2019-06-24 19:05:18 +02:00
|
|
|
return h, h
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func prepend(s string, x []string) []string {
|
|
|
|
return append([]string{s}, x...)
|
|
|
|
}
|
|
|
|
|
2019-05-03 20:09:08 +02:00
|
|
|
// pleroma leaks followers addressed posts to followers
|
|
|
|
func butnottooloud(aud []string) {
|
|
|
|
for i, a := range aud {
|
|
|
|
if strings.HasSuffix(a, "/followers") {
|
|
|
|
aud[i] = ""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-28 08:36:47 +02:00
|
|
|
func keepitquiet(aud []string) bool {
|
|
|
|
for _, a := range aud {
|
|
|
|
if a == thewholeworld {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2019-04-13 19:58:42 +02:00
|
|
|
func oneofakind(a []string) []string {
|
|
|
|
var x []string
|
|
|
|
for n, s := range a {
|
|
|
|
if s != "" {
|
|
|
|
x = append(x, s)
|
|
|
|
for i := n + 1; i < len(a); i++ {
|
|
|
|
if a[i] == s {
|
|
|
|
a[i] = ""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return x
|
|
|
|
}
|
|
|
|
|
2019-04-14 16:06:26 +02:00
|
|
|
var ziggies = make(map[string]*rsa.PrivateKey)
|
2019-04-14 19:20:56 +02:00
|
|
|
var zaggies = make(map[string]*rsa.PublicKey)
|
2019-04-14 16:06:26 +02:00
|
|
|
var ziggylock sync.Mutex
|
|
|
|
|
|
|
|
func ziggy(username string) (keyname string, key *rsa.PrivateKey) {
|
|
|
|
ziggylock.Lock()
|
|
|
|
key = ziggies[username]
|
2019-04-14 19:20:56 +02:00
|
|
|
ziggylock.Unlock()
|
2019-04-14 16:06:26 +02:00
|
|
|
if key == nil {
|
|
|
|
db := opendatabase()
|
|
|
|
row := db.QueryRow("select seckey from users where username = ?", username)
|
|
|
|
var data string
|
|
|
|
row.Scan(&data)
|
|
|
|
var err error
|
|
|
|
key, _, err = pez(data)
|
|
|
|
if err != nil {
|
2019-04-14 19:20:56 +02:00
|
|
|
log.Printf("error decoding %s seckey: %s", username, err)
|
2019-04-14 16:06:26 +02:00
|
|
|
return
|
|
|
|
}
|
2019-04-14 19:20:56 +02:00
|
|
|
ziggylock.Lock()
|
|
|
|
ziggies[username] = key
|
|
|
|
ziggylock.Unlock()
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|
2019-04-14 16:06:26 +02:00
|
|
|
keyname = fmt.Sprintf("https://%s/u/%s#key", serverName, username)
|
2019-04-13 19:58:42 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func zaggy(keyname string) (key *rsa.PublicKey) {
|
2019-04-14 19:20:56 +02:00
|
|
|
ziggylock.Lock()
|
|
|
|
key = zaggies[keyname]
|
|
|
|
ziggylock.Unlock()
|
|
|
|
if key != nil {
|
|
|
|
return
|
|
|
|
}
|
2019-06-03 06:15:06 +02:00
|
|
|
row := stmtGetXonker.QueryRow(keyname, "pubkey")
|
2019-04-13 19:58:42 +02:00
|
|
|
var data string
|
|
|
|
err := row.Scan(&data)
|
|
|
|
if err != nil {
|
2019-04-14 19:20:56 +02:00
|
|
|
log.Printf("hitting the webs for missing pubkey: %s", keyname)
|
2019-04-13 19:58:42 +02:00
|
|
|
j, err := GetJunk(keyname)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("error getting %s pubkey: %s", keyname, err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
var ok bool
|
2019-06-11 16:14:12 +02:00
|
|
|
data, ok = j.FindString([]string{"publicKey", "publicKeyPem"})
|
2019-04-13 19:58:42 +02:00
|
|
|
if !ok {
|
2019-04-20 17:41:20 +02:00
|
|
|
log.Printf("error finding %s pubkey", keyname)
|
2019-04-13 19:58:42 +02:00
|
|
|
return
|
|
|
|
}
|
2019-06-11 16:14:12 +02:00
|
|
|
_, ok = j.FindString([]string{"publicKey", "owner"})
|
2019-04-13 19:58:42 +02:00
|
|
|
if !ok {
|
2019-04-20 17:41:20 +02:00
|
|
|
log.Printf("error finding %s pubkey owner", keyname)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
_, key, err = pez(data)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("error decoding %s pubkey: %s", keyname, err)
|
|
|
|
return
|
|
|
|
}
|
2019-06-03 06:15:06 +02:00
|
|
|
_, err = stmtSaveXonker.Exec(keyname, data, "pubkey")
|
2019-04-29 04:52:40 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("error saving key: %s", err)
|
|
|
|
}
|
2019-04-20 17:41:20 +02:00
|
|
|
} else {
|
|
|
|
_, key, err = pez(data)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("error decoding %s pubkey: %s", keyname, err)
|
2019-04-13 19:58:42 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
2019-04-14 19:20:56 +02:00
|
|
|
ziggylock.Lock()
|
|
|
|
zaggies[keyname] = key
|
|
|
|
ziggylock.Unlock()
|
2019-04-13 19:58:42 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-04-20 22:12:41 +02:00
|
|
|
func makeitworksomehowwithoutregardforkeycontinuity(keyname string, r *http.Request, payload []byte) (string, error) {
|
2019-06-26 02:49:25 +02:00
|
|
|
_, err := stmtDeleteXonker.Exec(keyname, "pubkey")
|
2019-04-29 04:52:40 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("error deleting key: %s", err)
|
|
|
|
}
|
2019-04-20 22:12:41 +02:00
|
|
|
ziggylock.Lock()
|
|
|
|
delete(zaggies, keyname)
|
|
|
|
ziggylock.Unlock()
|
|
|
|
return zag(r, payload)
|
|
|
|
}
|
|
|
|
|
2019-04-26 21:11:24 +02:00
|
|
|
var thumbbiters map[int64]map[string]bool
|
2019-06-24 04:21:59 +02:00
|
|
|
var zordses map[int64][]*regexp.Regexp
|
2019-04-26 21:11:24 +02:00
|
|
|
var thumblock sync.Mutex
|
|
|
|
|
|
|
|
func bitethethumbs() {
|
|
|
|
rows, err := stmtThumbBiters.Query()
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("error getting thumbbiters: %s", err)
|
|
|
|
return
|
2019-04-20 04:35:21 +02:00
|
|
|
}
|
2019-04-26 21:11:24 +02:00
|
|
|
defer rows.Close()
|
2019-06-17 03:21:56 +02:00
|
|
|
|
2019-04-26 21:11:24 +02:00
|
|
|
thumblock.Lock()
|
|
|
|
defer thumblock.Unlock()
|
|
|
|
thumbbiters = make(map[int64]map[string]bool)
|
2019-06-24 04:21:59 +02:00
|
|
|
zordses = make(map[int64][]*regexp.Regexp)
|
2019-04-26 21:11:24 +02:00
|
|
|
for rows.Next() {
|
|
|
|
var userid int64
|
|
|
|
var name, wherefore string
|
|
|
|
err = rows.Scan(&userid, &name, &wherefore)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("error scanning zonker: %s", err)
|
|
|
|
continue
|
|
|
|
}
|
2019-06-24 04:21:59 +02:00
|
|
|
if wherefore == "zord" {
|
|
|
|
zord := "\\b(?i:" + name + ")\\b"
|
|
|
|
re, err := regexp.Compile(zord)
|
2019-05-31 00:29:59 +02:00
|
|
|
if err != nil {
|
2019-06-24 04:21:59 +02:00
|
|
|
log.Printf("error compiling zord: %s", err)
|
2019-05-31 00:29:59 +02:00
|
|
|
} else {
|
2019-06-24 04:21:59 +02:00
|
|
|
zordses[userid] = append(zordses[userid], re)
|
2019-05-31 00:29:59 +02:00
|
|
|
}
|
|
|
|
continue
|
|
|
|
}
|
2019-04-26 21:11:24 +02:00
|
|
|
m := thumbbiters[userid]
|
|
|
|
if m == nil {
|
|
|
|
m = make(map[string]bool)
|
|
|
|
thumbbiters[userid] = m
|
|
|
|
}
|
|
|
|
m[name] = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-24 04:21:59 +02:00
|
|
|
func getzords(userid int64) []*regexp.Regexp {
|
2019-05-31 00:29:59 +02:00
|
|
|
thumblock.Lock()
|
|
|
|
defer thumblock.Unlock()
|
2019-06-24 04:21:59 +02:00
|
|
|
return zordses[userid]
|
2019-05-31 00:29:59 +02:00
|
|
|
}
|
|
|
|
|
2019-04-26 21:11:24 +02:00
|
|
|
func thoudostbitethythumb(userid int64, who []string, objid string) bool {
|
|
|
|
thumblock.Lock()
|
|
|
|
biters := thumbbiters[userid]
|
|
|
|
thumblock.Unlock()
|
|
|
|
for _, w := range who {
|
|
|
|
if biters[w] {
|
|
|
|
return true
|
|
|
|
}
|
2019-05-21 19:57:45 +02:00
|
|
|
where := originate(w)
|
|
|
|
if where != "" {
|
2019-04-26 21:11:24 +02:00
|
|
|
if biters[where] {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
2019-04-20 04:35:21 +02:00
|
|
|
}
|
2019-04-26 15:35:22 +02:00
|
|
|
return false
|
2019-04-20 04:35:21 +02:00
|
|
|
}
|
|
|
|
|
2019-05-21 19:56:15 +02:00
|
|
|
func keymatch(keyname string, actor string) string {
|
2019-04-23 17:02:43 +02:00
|
|
|
hash := strings.IndexByte(keyname, '#')
|
|
|
|
if hash == -1 {
|
|
|
|
hash = len(keyname)
|
|
|
|
}
|
|
|
|
owner := keyname[0:hash]
|
|
|
|
if owner == actor {
|
2019-05-21 19:56:15 +02:00
|
|
|
return originate(actor)
|
2019-04-23 17:02:43 +02:00
|
|
|
}
|
2019-05-21 19:56:15 +02:00
|
|
|
return ""
|
2019-04-13 19:58:42 +02:00
|
|
|
}
|