Compare commits

...

49 Commits

Author SHA1 Message Date
Dirk Nederveen 3b1bed8517
Update dockerfiles 2023-07-07 20:11:45 +02:00
Dirk Nederveen 5d36d0ecf9
Add docker files
Build: docker build
2023-07-07 20:11:44 +02:00
Ted Unangst b5091cc5e1 note threaded display as change 2023-07-03 00:47:35 -04:00
Ted Unangst 8a14e67b32 add some more search capabilities 2023-07-03 00:45:35 -04:00
Ted Unangst 6b46a2a597 identify hashtag links as hashtag 2023-07-02 18:31:51 -04:00
Ted Unangst 63f6a949bc we don't care about dislikes 2023-07-02 14:47:00 -04:00
Ted Unangst 5d03fbac6a tune up quotes. save the fetched post. 2023-07-02 14:44:52 -04:00
Ted Unangst cb7c9d700b go vet claims we need a buffered chan for signal 2023-07-02 14:08:26 -04:00
Ted Unangst 0faedcb9c5 fix hashtag linking 2023-06-23 03:37:52 -04:00
Ted Unangst 29494d7ac2 too lazy to change the oonker mistake, add another test in threadsort 2023-06-21 19:50:17 -04:00
Ted Unangst 91492f866e can inline remnant of old marker code 2023-06-21 01:36:22 -04:00
Ted Unangst e79a05b983 newline for csp violations 2023-06-21 01:17:22 -04:00
Ted Unangst 9f878bffde don't report errors when the backend is expected to die 2023-06-21 01:08:13 -04:00
Ted Unangst e8ee70d0f0 get should return 200, but some people are weird 2023-06-21 01:07:45 -04:00
Ted Unangst 975de7188e helps if it compiles 2023-06-18 21:48:45 -04:00
Ted Unangst e3c4040aa3 use helper function 2023-06-18 21:47:26 -04:00
Ted Unangst 7d6556ee36 cache webfinger responses 2023-06-18 21:44:39 -04:00
Ted Unangst cd7518c0a5 sometimes a thread gets weird. don't double process trees. 2023-06-18 21:22:21 -04:00
Ted Unangst c5ab532e64 less aggressive rewrite filter rewriting 2023-06-17 20:26:38 -04:00
Ted Unangst 2f52375872 try to keep the thread flat for simple reply chains 2023-06-14 20:40:11 -04:00
Ted Unangst d86c38136f note that cleanup doesn't vacuum 2023-06-14 20:21:02 -04:00
Ted Unangst 20a6d2b962 try pulling self reply up in thread 2023-06-13 23:42:09 -04:00
Ted Unangst 7cc2e6f2da this does a better job on broken threads 2023-06-13 23:31:52 -04:00
Ted Unangst 17be6564ee simplify 2023-06-13 19:48:22 -04:00
Ted Unangst 796de02bb9 need to reorder this logic now, seems to work better 2023-06-13 19:39:59 -04:00
Ted Unangst 67496c1208 another try at flattening logic 2023-06-13 19:26:35 -04:00
Ted Unangst 6cb5152611 logic fail after refactor 2023-06-13 19:21:35 -04:00
Ted Unangst 23f9a3b8bf try to flatten self replies in threading 2023-06-13 19:18:10 -04:00
Ted Unangst 7feb254618 experiment with nested thread sorting 2023-06-13 16:29:27 -04:00
Ted Unangst 23e2c9276a unauthorized space invaders 2023-06-13 15:00:55 -04:00
Ted Unangst e20c344f11 an attempt at opengraph 2023-06-13 14:58:35 -04:00
Ted Unangst 712873418b for fatal error (no such host), advance the retry clock 2023-06-13 13:56:42 -04:00
Ted Unangst dccbec5c8e refine the redeliver timeout schedule 2023-06-13 13:42:15 -04:00
Ted Unangst 24cee3302a superdeliverator worth a mention 2023-06-12 17:35:24 -04:00
Ted Unangst fea5df77dd basic import from instagram 2023-06-12 17:31:13 -04:00
Ted Unangst e8cf29056e need to scan into pointer 2023-06-12 15:13:46 -04:00
Ted Unangst 3baff9e64e include userid in doover queries 2023-06-12 15:08:09 -04:00
Ted Unangst 4fe838f5ed some cleanup for the deliverator functions 2023-06-12 15:01:03 -04:00
Ted Unangst d75e1671f7 convert deliverater queue to per rcpt fifo 2023-06-12 14:40:28 -04:00
Ted Unangst 6e43340381 all messages matter again 2023-06-12 13:44:03 -04:00
Ted Unangst 3ff98a261e sometimes we need to go deeper to find the honk in the bonk 2023-06-11 01:40:34 -04:00
Ted Unangst 7a332ead41 oops, owner field doesn't exist.
only check attributedto for collections.
2023-06-11 00:02:05 -04:00
Ted Unangst 846e9820e9 check for owner before attributedTo 2023-06-10 23:51:31 -04:00
Ted Unangst 2bea5e1be9 allow Group as an actor 2023-06-10 23:42:07 -04:00
Ted Unangst a78a345890 note changes 2023-06-10 23:34:11 -04:00
Ted Unangst d622d8781a better handling of Link attachment types 2023-06-10 23:33:27 -04:00
Ted Unangst d63e338089 only apply attachment limit to localized atts 2023-06-10 23:23:11 -04:00
Ted Unangst 3b036f4975 some fixups to grab "image" property as seen on lemmy 2023-06-10 23:19:31 -04:00
Ted Unangst 314e0bb805 record the csp violators 2023-05-27 22:47:55 -04:00
22 changed files with 593 additions and 182 deletions

4
.dockerignore Normal file
View File

@ -0,0 +1,4 @@
honk.db*
blob.db*
./honk
.git*

20
Dockerfile Normal file
View File

@ -0,0 +1,20 @@
FROM golang:1.19
RUN apt update && apt install -y libsqlite3-dev && rm -rf /var/cache/apt/*
RUN mkdir /honk-src
WORKDIR /honk-src
COPY go.mod go.sum /honk-src/
COPY . /honk-src/
RUN go get && go mod vendor
RUN make
RUN mv ./honk /bin/honk
CMD ["/bin/honk", "-datadir", "/honk-data"]
COPY docker-entrypoint.sh /honk-src/docker-entrypoint.sh
ENTRYPOINT ["/honk-src/docker-entrypoint.sh"]

View File

@ -25,7 +25,6 @@ import (
"io"
notrand "math/rand"
"net/http"
"net/url"
"os"
"regexp"
"strings"
@ -166,7 +165,11 @@ func junkGet(userid int64, url string, args junk.GetArgs) (junk.Junk, error) {
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
switch resp.StatusCode {
case 200:
case 201:
case 202:
default:
return nil, fmt.Errorf("http get status: %d", resp.StatusCode)
}
return junk.Read(resp.Body)
@ -508,36 +511,13 @@ func firstofmany(obj junk.Junk, key string) string {
}
var re_mast0link = regexp.MustCompile(`https://[[:alnum:].]+/users/[[:alnum:]]+/statuses/[[:digit:]]+`)
var re_masto1ink = regexp.MustCompile(`https://[[:alnum:].]+/@[[:alnum:]]+/[[:digit:]]+`)
var re_masto1ink = regexp.MustCompile(`https://([[:alnum:].]+)/@([[:alnum:]]+)/([[:digit:]]+)`)
var re_misslink = regexp.MustCompile(`https://[[:alnum:].]+/notes/[[:alnum:]]+`)
var re_honklink = regexp.MustCompile(`https://[[:alnum:].]+/u/[[:alnum:]]+/h/[[:alnum:]]+`)
var re_r0malink = regexp.MustCompile(`https://[[:alnum:].]+/objects/[[:alnum:]-]+`)
var re_roma1ink = regexp.MustCompile(`https://[[:alnum:].]+/notice/[[:alnum:]]+`)
var re_qtlinks = regexp.MustCompile(`>https://[^\s<]+<`)
func qutify(user *WhatAbout, content string) string {
// well this is gross
malcontent := strings.ReplaceAll(content, `</span><span class="ellipsis">`, "")
malcontent = strings.ReplaceAll(malcontent, `</span><span class="invisible">`, "")
mlinks := re_qtlinks.FindAllString(malcontent, -1)
for _, m := range mlinks {
m = m[1 : len(m)-1]
if re_mast0link.MatchString(m) || re_masto1ink.MatchString(m) ||
re_misslink.MatchString(m) ||
re_honklink.MatchString(m) ||
re_r0malink.MatchString(m) || re_roma1ink.MatchString(m) {
j, err := GetJunk(user.ID, m)
if err == nil {
q, ok := j.GetString("content")
if ok {
content = fmt.Sprintf("%s<blockquote>%s</blockquote>", content, q)
}
}
}
}
return content
}
func xonksaver(user *WhatAbout, item junk.Junk, origin string) *Honk {
depth := 0
maxdepth := 10
@ -545,6 +525,44 @@ func xonksaver(user *WhatAbout, item junk.Junk, origin string) *Honk {
goingup := 0
var xonkxonkfn func(item junk.Junk, origin string, isUpdate bool) *Honk
qutify := func(user *WhatAbout, content string) string {
if depth >= maxdepth {
ilog.Printf("in too deep")
return content
}
// well this is gross
malcontent := strings.ReplaceAll(content, `</span><span class="ellipsis">`, "")
malcontent = strings.ReplaceAll(malcontent, `</span><span class="invisible">`, "")
mlinks := re_qtlinks.FindAllString(malcontent, -1)
for _, m := range mlinks {
tryit := false
m = m[1 : len(m)-1]
if re_mast0link.MatchString(m) || re_misslink.MatchString(m) ||
re_honklink.MatchString(m) || re_r0malink.MatchString(m) ||
re_roma1ink.MatchString(m) {
tryit = true
} else if re_masto1ink.MatchString(m) {
m = re_masto1ink.ReplaceAllString(m, "https://$1/users/$2/statuses/$3")
tryit = true
}
if tryit {
if x := getxonk(user.ID, m); x != nil {
content = fmt.Sprintf("%s<blockquote>%s</blockquote>", content, x.Noise)
} else if j, err := GetJunk(user.ID, m); err == nil {
q, ok := j.GetString("content")
if ok {
content = fmt.Sprintf("%s<blockquote>%s</blockquote>", content, q)
}
prevdepth := depth
depth = maxdepth
xonkxonkfn(j, originate(m), false)
depth = prevdepth
}
}
}
return content
}
saveonemore := func(xid string) {
dlog.Printf("getting onemore: %s", xid)
if depth >= maxdepth {
@ -613,6 +631,14 @@ func xonksaver(user *WhatAbout, item junk.Junk, origin string) *Honk {
case "Announce":
obj, ok = item.GetMap("object")
if ok {
what, ok := obj.GetString("type")
if ok && what == "Create" {
obj, ok = obj.GetMap("object")
if !ok {
ilog.Printf("lost object inside create %s", id)
return nil
}
}
xid, _ = obj.GetString("id")
} else {
xid, _ = item.GetString("object")
@ -620,12 +646,16 @@ func xonksaver(user *WhatAbout, item junk.Junk, origin string) *Honk {
if !needbonkid(user, xid) {
return nil
}
dlog.Printf("getting bonk: %s", xid)
obj, err = GetJunkHardMode(user.ID, xid)
if err != nil {
ilog.Printf("error getting bonk: %s: %s", xid, err)
}
origin = originate(xid)
if ok && originate(id) == origin {
dlog.Printf("using object in announce for %s", xid)
} else {
dlog.Printf("getting bonk: %s", xid)
obj, err = GetJunkHardMode(user.ID, xid)
if err != nil {
ilog.Printf("error getting bonk: %s: %s", xid, err)
}
}
what = "bonk"
case "Update":
isUpdate = true
@ -689,7 +719,9 @@ func xonksaver(user *WhatAbout, item junk.Junk, origin string) *Honk {
case "Audio":
fallthrough
case "Image":
preferorig = true
if what == "Image" {
preferorig = true
}
fallthrough
case "Video":
fallthrough
@ -837,6 +869,9 @@ func xonksaver(user *WhatAbout, item junk.Junk, origin string) *Honk {
procatt := func(att junk.Junk) {
at, _ := att.GetString("type")
mt, _ := att.GetString("mediaType")
if mt == "" {
mt = "image"
}
u, ok := att.GetString("url")
if !ok {
u, ok = att.GetString("href")
@ -866,14 +901,24 @@ func xonksaver(user *WhatAbout, item junk.Junk, origin string) *Honk {
desc = name
}
localize := false
if numatts > 4 {
ilog.Printf("excessive attachment: %s", at)
} else if at == "Document" || at == "Image" || (preferorig && at == "Link") {
if at == "Document" || at == "Image" {
mt = strings.ToLower(mt)
dlog.Printf("attachment: %s %s", mt, u)
if mt == "text/plain" || mt == "application/pdf" ||
strings.HasPrefix(mt, "image") {
localize = true
if numatts > 4 {
ilog.Printf("excessive attachment: %s", at)
} else {
localize = true
}
}
} else if at == "Link" {
if waspage {
xonk.Noise += fmt.Sprintf(`<p><a href="%s">%s</a>`, u, u)
return
}
if name == "" {
name = u
}
} else {
ilog.Printf("unknown attachment: %s", at)
@ -890,6 +935,9 @@ func xonksaver(user *WhatAbout, item junk.Junk, origin string) *Honk {
}
numatts++
}
if img, ok := obj.GetMap("image"); ok {
procatt(img)
}
if preferorig {
atts, _ := obj.GetArray("url")
for _, atti := range atts {
@ -1119,7 +1167,7 @@ func rubadubdub(user *WhatAbout, req junk.Junk) {
j["published"] = time.Now().UTC().Format(time.RFC3339)
j["object"] = req
deliverate(0, user.ID, actor, j.ToBytes(), true)
deliverate(user.ID, actor, j.ToBytes())
}
func itakeitallback(user *WhatAbout, xid string, owner string, folxid string) {
@ -1138,7 +1186,7 @@ func itakeitallback(user *WhatAbout, xid string, owner string, folxid string) {
j["object"] = f
j["published"] = time.Now().UTC().Format(time.RFC3339)
deliverate(0, user.ID, owner, j.ToBytes(), true)
deliverate(user.ID, owner, j.ToBytes())
}
func subsub(user *WhatAbout, xid string, owner string, folxid string) {
@ -1155,7 +1203,7 @@ func subsub(user *WhatAbout, xid string, owner string, folxid string) {
j["object"] = xid
j["published"] = time.Now().UTC().Format(time.RFC3339)
deliverate(0, user.ID, owner, j.ToBytes(), true)
deliverate(user.ID, owner, j.ToBytes())
}
func activatedonks(donks []*Donk) []junk.Junk {
@ -1465,7 +1513,7 @@ func sendchonk(user *WhatAbout, ch *Chonk) {
rcpts := make(map[string]bool)
rcpts[ch.Target] = true
for a := range rcpts {
go deliverate(0, user.ID, a, msg, true)
go deliverate(user.ID, a, msg)
}
}
@ -1504,25 +1552,13 @@ func honkworldwide(user *WhatAbout, honk *Honk) {
}
}
for a := range rcpts {
go deliverate(0, user.ID, a, msg, doesitmatter(honk.What))
go deliverate(user.ID, a, msg)
}
if honk.Public && len(honk.Onts) > 0 {
collectiveaction(honk)
}
}
func doesitmatter(what string) bool {
switch what {
case "ack":
return false
case "react":
return false
case "deack":
return false
}
return true
}
func collectiveaction(honk *Honk) {
user := getserveruser()
for _, ont := range honk.Onts {
@ -1549,7 +1585,7 @@ func collectiveaction(honk *Honk) {
}
msg := j.ToBytes()
for a := range rcpts {
go deliverate(0, user.ID, a, msg, false)
go deliverate(user.ID, a, msg)
}
}
}
@ -1584,12 +1620,7 @@ func junkuser(user *WhatAbout) junk.Junk {
a := junk.New()
a["type"] = "Image"
a["mediaType"] = "image/png"
if ava := user.Options.Avatar; ava != "" {
a["url"] = ava
} else {
u := fmt.Sprintf("https://%s/a?a=%s", serverName, url.QueryEscape(user.URL))
a["url"] = u
}
a["url"] = avatarURL(user)
j["icon"] = a
if ban := user.Options.Banner; ban != "" {
a := junk.New()
@ -1615,10 +1646,8 @@ var oldjonkers = cache.New(cache.Options{Filler: func(name string) ([]byte, bool
if err != nil {
return nil, false
}
var buf bytes.Buffer
j := junkuser(user)
j.Write(&buf)
return buf.Bytes(), true
return j.ToBytes(), true
}, Duration: 1 * time.Minute})
func asjonker(name string) ([]byte, bool) {
@ -1696,9 +1725,12 @@ func investigate(name string) (*SomeThing, error) {
func somethingabout(obj junk.Junk) (*SomeThing, error) {
info := new(SomeThing)
t, _ := obj.GetString("type")
isowned := false
switch t {
case "Person":
fallthrough
case "Group":
fallthrough
case "Organization":
fallthrough
case "Application":
@ -1706,6 +1738,7 @@ func somethingabout(obj junk.Junk) (*SomeThing, error) {
case "Service":
info.What = SomeActor
case "OrderedCollection":
isowned = true
fallthrough
case "Collection":
info.What = SomeCollection
@ -1717,7 +1750,9 @@ func somethingabout(obj junk.Junk) (*SomeThing, error) {
if info.Name == "" {
info.Name, _ = obj.GetString("name")
}
info.Owner, _ = obj.GetString("attributedTo")
if isowned {
info.Owner, _ = obj.GetString("attributedTo")
}
if info.Owner == "" {
info.Owner = info.XID
}
@ -1857,7 +1892,7 @@ func updateMe(username string) {
}
}
for a := range rcpts {
go deliverate(0, user.ID, a, msg, false)
go deliverate(user.ID, a, msg)
}
}

View File

@ -125,7 +125,7 @@ func adminscreen() {
}
defer restore()
go func() {
sig := make(chan os.Signal)
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt)
<-sig
restore()

View File

@ -23,6 +23,7 @@ import (
"image"
"image/png"
"net/http"
"net/url"
"regexp"
"strconv"
"strings"
@ -103,6 +104,13 @@ func genAvatar(name string) []byte {
return buf.Bytes()
}
func avatarURL(user *WhatAbout) string {
if ava := user.Options.Avatar; ava != "" {
return ava
}
return fmt.Sprintf("https://%s/a?a=%s", serverName, url.QueryEscape(user.URL))
}
func showflag(writer http.ResponseWriter, req *http.Request) {
code := mux.Vars(req)["code"]
colors := strings.Split(code, ",")

View File

@ -23,7 +23,10 @@ import (
"net/rpc"
"os"
"os/exec"
"os/signal"
"strings"
"sync"
"syscall"
"humungus.tedunangst.com/r/webs/gate"
"humungus.tedunangst.com/r/webs/image"
@ -112,6 +115,7 @@ func orphancheck() {
func backendServer() {
dlog.Printf("backend server running")
go orphancheck()
signal.Ignore(syscall.SIGINT)
shrinker := new(Shrinker)
srv := rpc.NewServer()
err := srv.Register(shrinker)
@ -152,9 +156,23 @@ func runBackendServer() {
if err != nil {
elog.Panicf("can't exec backend: %s", err)
}
workinprogress++
var mtx sync.Mutex
go func() {
<-endoftheworld
mtx.Lock()
defer mtx.Unlock()
w.Close()
w = nil
readyalready <- true
}()
go func() {
proc.Wait()
elog.Printf("lost the backend: %s", err)
w.Close()
mtx.Lock()
defer mtx.Unlock()
if w != nil {
elog.Printf("lost the backend: %s", err)
w.Close()
}
}()
}

View File

@ -303,6 +303,26 @@ func gethonksbysearch(userid int64, q string, wanted int64) []*Honk {
if t == "" {
continue
}
if t == "@me" {
queries = append(queries, "whofore = 1")
continue
}
if t == "@self" {
queries = append(queries, "(whofore = 2 or whofore = 3)")
continue
}
if strings.HasPrefix(t, "before:") {
before := t[7:]
queries = append(queries, "dt < ?")
params = append(params, before)
continue
}
if strings.HasPrefix(t, "after:") {
after := t[6:]
queries = append(queries, "dt > ?")
params = append(params, after)
continue
}
if strings.HasPrefix(t, "site:") {
site := t[5:]
site = "%" + site + "%"
@ -1106,6 +1126,7 @@ var stmtSaveMeta, stmtDeleteAllMeta, stmtDeleteOneMeta, stmtDeleteSomeMeta, stmt
var stmtHonksISaved, stmtGetFilters, stmtSaveFilter, stmtDeleteFilter *sql.Stmt
var stmtGetTracks *sql.Stmt
var stmtSaveChonk, stmtLoadChonks, stmtGetChatters *sql.Stmt
var stmtDeliquentCheck, stmtDeliquentUpdate *sql.Stmt
func preparetodie(db *sql.DB, s string) *sql.Stmt {
stmt, err := db.Prepare(s)
@ -1192,4 +1213,6 @@ func prepareStatements(db *sql.DB) {
stmtSaveChonk = preparetodie(db, "insert into chonks (userid, xid, who, target, dt, noise, format) values (?, ?, ?, ?, ?, ?, ?)")
stmtLoadChonks = preparetodie(db, "select chonkid, userid, xid, who, target, dt, noise, format from chonks where userid = ? and dt > ? order by chonkid asc")
stmtGetChatters = preparetodie(db, "select distinct(target) from chonks where userid = ?")
stmtDeliquentCheck = preparetodie(db, "select dooverid, msg from doovers where userid = ? and rcpt = ?")
stmtDeliquentUpdate = preparetodie(db, "update doovers set msg = ? where dooverid = ?")
}

View File

@ -16,39 +16,42 @@
package main
import (
"fmt"
"bytes"
"database/sql"
notrand "math/rand"
"strings"
"sync"
"time"
"humungus.tedunangst.com/r/webs/gate"
)
type Doover struct {
ID int64
When time.Time
ID int64
When time.Time
Userid int64
Tries int64
Rcpt string
Msgs [][]byte
}
func sayitagain(goarounds int64, userid int64, rcpt string, msg []byte) {
func sayitagain(doover Doover) {
doover.Tries += 1
var drift time.Duration
switch goarounds {
case 1:
drift = 5 * time.Minute
case 2:
drift = 1 * time.Hour
case 3:
drift = 4 * time.Hour
case 4:
drift = 12 * time.Hour
case 5:
drift = 24 * time.Hour
default:
ilog.Printf("he's dead jim: %s", rcpt)
clearoutbound(rcpt)
if doover.Tries <= 3 { // 5, 10, 15 minutes
drift = time.Duration(doover.Tries*5) * time.Minute
} else if doover.Tries <= 6 { // 1, 2, 3 hours
drift = time.Duration(doover.Tries-3) * time.Hour
} else if doover.Tries <= 9 { // 12, 12, 12 hours
drift = time.Duration(12) * time.Hour
} else {
ilog.Printf("he's dead jim: %s", doover.Rcpt)
return
}
drift += time.Duration(notrand.Int63n(int64(drift / 10)))
when := time.Now().Add(drift)
_, err := stmtAddDoover.Exec(when.UTC().Format(dbtimeformat), goarounds, userid, rcpt, msg)
data := bytes.Join(doover.Msgs, []byte{0})
_, err := stmtAddDoover.Exec(when.UTC().Format(dbtimeformat), doover.Tries, doover.Userid, doover.Rcpt, data)
if err != nil {
elog.Printf("error saving doover: %s", err)
}
@ -58,30 +61,66 @@ func sayitagain(goarounds int64, userid int64, rcpt string, msg []byte) {
}
}
func clearoutbound(rcpt string) {
hostname := originate(rcpt)
if hostname == "" {
func lethaldose(err error) int64 {
str := err.Error()
if strings.Contains(str, "no such host") {
return 8
}
return 0
}
var dqmtx sync.Mutex
func delinquent(userid int64, rcpt string, msg []byte) bool {
dqmtx.Lock()
defer dqmtx.Unlock()
row := stmtDeliquentCheck.QueryRow(userid, rcpt)
var dooverid int64
var data []byte
err := row.Scan(&dooverid, &data)
if err == sql.ErrNoRows {
return false
}
if err != nil {
elog.Printf("error scanning deliquent check: %s", err)
return true
}
data = append(data, 0)
data = append(data, msg...)
_, err = stmtDeliquentUpdate.Exec(data, dooverid)
if err != nil {
elog.Printf("error updating deliquent: %s", err)
return true
}
return true
}
func deliverate(userid int64, rcpt string, msg []byte) {
if delinquent(userid, rcpt, msg) {
return
}
xid := fmt.Sprintf("%%https://%s/%%", hostname)
ilog.Printf("clearing outbound for %s", xid)
db := opendatabase()
db.Exec("delete from doovers where rcpt like ?", xid)
var d Doover
d.Userid = userid
d.Tries = 0
d.Rcpt = rcpt
d.Msgs = append(d.Msgs, msg)
deliveration(d)
}
var garage = gate.NewLimiter(40)
func deliverate(goarounds int64, userid int64, rcpt string, msg []byte, prio bool) {
func deliveration(doover Doover) {
garage.Start()
defer garage.Finish()
var ki *KeyInfo
ok := ziggies.Get(userid, &ki)
ok := ziggies.Get(doover.Userid, &ki)
if !ok {
elog.Printf("lost key for delivery")
return
}
var inbox string
rcpt := doover.Rcpt
// already did the box indirection
if rcpt[0] == '%' {
inbox = rcpt[1:]
@ -90,18 +129,25 @@ func deliverate(goarounds int64, userid int64, rcpt string, msg []byte, prio boo
ok := boxofboxes.Get(rcpt, &box)
if !ok {
ilog.Printf("failed getting inbox for %s", rcpt)
sayitagain(goarounds+1, userid, rcpt, msg)
sayitagain(doover)
return
}
inbox = box.In
}
err := PostMsg(ki.keyname, ki.seckey, inbox, msg)
if err != nil {
ilog.Printf("failed to post json to %s: %s", inbox, err)
if prio {
sayitagain(goarounds+1, userid, rcpt, msg)
for i, msg := range doover.Msgs {
if i > 0 {
time.Sleep(2 * time.Second)
}
err := PostMsg(ki.keyname, ki.seckey, inbox, msg)
if err != nil {
ilog.Printf("failed to post json to %s: %s", inbox, err)
if t := lethaldose(err); t > doover.Tries {
doover.Tries = t
}
doover.Msgs = doover.Msgs[i:]
sayitagain(doover)
return
}
return
}
}
@ -130,6 +176,23 @@ func getdoovers() []Doover {
return doovers
}
func extractdoover(d *Doover) error {
dqmtx.Lock()
defer dqmtx.Unlock()
row := stmtLoadDoover.QueryRow(d.ID)
var data []byte
err := row.Scan(&d.Tries, &d.Userid, &d.Rcpt, &data)
if err != nil {
return err
}
_, err = stmtZapDoover.Exec(d.ID)
if err != nil {
return err
}
d.Msgs = bytes.Split(data, []byte{0})
return nil
}
func redeliverator() {
sleeper := time.NewTimer(5 * time.Second)
for {
@ -148,22 +211,13 @@ func redeliverator() {
nexttime := now.Add(24 * time.Hour)
for _, d := range doovers {
if d.When.Before(now) {
var goarounds, userid int64
var rcpt string
var msg []byte
row := stmtLoadDoover.QueryRow(d.ID)
err := row.Scan(&goarounds, &userid, &rcpt, &msg)
err := extractdoover(&d)
if err != nil {
elog.Printf("error scanning doover: %s", err)
elog.Printf("error extracting doover: %s", err)
continue
}
_, err = stmtZapDoover.Exec(d.ID)
if err != nil {
elog.Printf("error deleting doover: %s", err)
continue
}
ilog.Printf("redeliverating %s try %d", rcpt, goarounds)
deliverate(goarounds, userid, rcpt, msg, true)
ilog.Printf("redeliverating %s try %d", d.Rcpt, d.Tries)
deliveration(d)
} else if d.When.Before(nexttime) {
nexttime = d.When
}

15
docker-entrypoint.sh Executable file
View File

@ -0,0 +1,15 @@
#!/bin/sh
if [ ! -r "/honk-data/honk.db" ]; then
set -u
(
echo "$HONK_USERNAME"
echo "$HONK_PASSWORD"
echo "${HONK_LISTEN_ADDR:-"0.0.0.0:8080"}"
echo "${HONK_SERVER_HOSTNAME}"
) | honk -datadir "/honk-data" init
set +u
fi
exec $*

View File

@ -2,6 +2,16 @@ changelog
=== next
+ New threaded display order.
+ Improved search.
+ Tuned up superdeliverator.
+ Import from instagram.
+ improve handling of some Page and Link objects
+ search can now load external posts
=== 0.9.91 One More Time

View File

@ -143,10 +143,18 @@ section of the manual for details of honk composition.
Find old honks.
It's basic substring match with a few extensions.
The following keywords are supported:
.Bl -tag -width honker
.It site
.Bl -tag -width honker:
.It @me
Honks mentioning the user.
.It @self
Honks by the user.
.It before:
Honks posted before YYYY-MM-DD.
.It after:
As above.
.It site:
Substring match on the post domain name.
.It honker
.It honker:
Exact match, either AP actor or honker nickname.
.It -
Negate term.

View File

@ -154,6 +154,10 @@ The
command exists to purge old external data, by default 30 days.
This removes unreferenced, unsaved posts and attachments.
It does not remove any original content.
This will not immediately reduce the size of the database, but frees space
for future use.
A vacuum may be performed manually if necessary, but will require more time
and additional disk space.
.Pp
Backups may be performed by running
.Ic backup dirname .
@ -190,7 +194,7 @@ and templates are reloaded every request.
Data may be imported and converted from other services using the
.Ic import
command.
Currently supports Mastodon and Twitter exported data.
Currently supports Mastodon, Twitter, and Instagram exported data.
Posts are imported and backdated to appear as old honks.
The Mastodon following list is imported, but must be refollowed.
.Pp
@ -201,6 +205,9 @@ To prepare a Twitter data archive, extract the twitter-longhash.zip file.
After unzipping the data archive, navigate to the tweet_media directory
and unzip any zip files contained within.
.Dl ./honk import username twitter source-directory
.Pp
To prepare an Instagram data archive, extract the igusername.zip file.
.Dl ./honk import username instagram source-directory
.Ss Advanced Options
Advanced configuration values may be set by running the
.Ic setconfig Ar key value

8
fun.go
View File

@ -211,7 +211,8 @@ func replaceimgsand(zap map[string]bool, absolute bool) func(node *html.Node) st
func translatechonk(ch *Chonk) {
noise := ch.Noise
if ch.Format == "markdown" {
noise = markitzero(noise)
var marker mz.Marker
noise = marker.Mark(noise)
}
var htf htfilter.Filter
htf.SpanClasses = allowedclasses
@ -300,7 +301,8 @@ func precipitate(honk *Honk) {
honk.Precis = noise[:idx]
noise = noise[idx+1:]
}
honk.Precis = markitzero(strings.TrimSpace(honk.Precis))
var marker mz.Marker
honk.Precis = marker.Mark(strings.TrimSpace(honk.Precis))
honk.Noise = noise
}
}
@ -547,7 +549,7 @@ func attoreplacer(m string) string {
}
func ontoreplacer(h string) string {
return fmt.Sprintf(`<a href="https://%s/o/%s">%s</a>`, serverName,
return fmt.Sprintf(`<a class="mention hashtag" href="https://%s/o/%s">%s</a>`, serverName,
strings.ToLower(h[1:]), h)
}

2
go.mod
View File

@ -9,5 +9,5 @@ require (
golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4
golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4
humungus.tedunangst.com/r/go-sqlite3 v1.1.3
humungus.tedunangst.com/r/webs v0.6.61
humungus.tedunangst.com/r/webs v0.6.62
)

4
go.sum
View File

@ -25,5 +25,5 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
humungus.tedunangst.com/r/go-sqlite3 v1.1.3 h1:G2N4wzDS0NbuvrZtQJhh4F+3X+s7BF8b9ga8k38geUI=
humungus.tedunangst.com/r/go-sqlite3 v1.1.3/go.mod h1:FtEEmQM7U2Ey1TuEEOyY1BmphTZnmiEjPsNLEAkpf/M=
humungus.tedunangst.com/r/webs v0.6.61 h1:Sgy0Htb8Y0jmmLPp73nYDuD4NebeUsgXftP+wB86wSg=
humungus.tedunangst.com/r/webs v0.6.61/go.mod h1:03R0N9BcT49HB4TDd1YmarpbiPvPzVDm74Mk4h1hYPc=
humungus.tedunangst.com/r/webs v0.6.62 h1:T/T0a2xWw1cYKTMqKXwP4GStRPUfOWYytN9zCMMlqpA=
humungus.tedunangst.com/r/webs v0.6.62/go.mod h1:03R0N9BcT49HB4TDd1YmarpbiPvPzVDm74Mk4h1hYPc=

View File

@ -20,6 +20,7 @@ import (
"regexp"
"sort"
"time"
"unicode"
"humungus.tedunangst.com/r/webs/cache"
)
@ -109,8 +110,8 @@ func filtcachefiller(userid int64) (afiltermap, bool) {
}
}
if t := filt.Text; t != "" && t != "." {
wordfront := t[0] != '#'
wordtail := true
wordfront := unicode.IsLetter(rune(t[0]))
wordtail := unicode.IsLetter(rune(t[len(t)-1]))
t = "(?i:" + t + ")"
if wordfront {
t = "\\b" + t
@ -125,8 +126,8 @@ func filtcachefiller(userid int64) (afiltermap, bool) {
}
}
if t := filt.Rewrite; t != "" {
wordfront := t[0] != '#'
wordtail := true
wordfront := unicode.IsLetter(rune(t[0]))
wordtail := unicode.IsLetter(rune(t[len(t)-1]))
t = "(?i:" + t + ")"
if wordfront {
t = "\\b" + t

View File

@ -35,6 +35,8 @@ func importMain(username, flavor, source string) {
importMastodon(username, source)
case "twitter":
importTwitter(username, source)
case "instagram":
importInstagram(username, source)
default:
elog.Fatal("unknown source flavor")
}
@ -445,3 +447,79 @@ func importTwitter(username, source string) {
log.Printf("honk saved %v -> %v", xid, err)
}
}
func importInstagram(username, source string) {
user, err := butwhatabout(username)
if err != nil {
elog.Fatal(err)
}
type Gram struct {
Media []struct {
URI string
Creation int64 `json:"creation_timestamp"`
Title string
}
}
var grams []*Gram
fd, err := os.Open(source + "/content/posts_1.json")
if err != nil {
elog.Fatal(err)
}
dec := json.NewDecoder(fd)
err = dec.Decode(&grams)
if err != nil {
elog.Fatalf("error parsing json: %s", err)
}
fd.Close()
log.Printf("importing %d grams", len(grams))
sort.Slice(grams, func(i, j int) bool {
return grams[i].Media[0].Creation < grams[j].Media[0].Creation
})
for _, g0 := range grams {
g := g0.Media[0]
xid := fmt.Sprintf("%s/%s/%s", user.URL, honkSep, xfiltrate())
what := "honk"
noise := g.Title
convoy := "data:,acoustichonkytonk-" + xfiltrate()
date := time.Unix(g.Creation, 0)
audience := []string{thewholeworld}
honk := Honk{
UserID: user.ID,
Username: user.Name,
What: what,
Honker: user.URL,
XID: xid,
Date: date,
Format: "markdown",
Audience: audience,
Convoy: convoy,
Public: true,
Whofore: 2,
}
{
u := xfiltrate()
fname := fmt.Sprintf("%s/%s", source, g.URI)
data, err := ioutil.ReadFile(fname)
if err != nil {
elog.Printf("error reading media: %s", fname)
continue
}
newurl := fmt.Sprintf("https://%s/d/%s", serverName, u)
fileid, err := savefile(u, u, newurl, "image/jpg", true, data)
if err != nil {
elog.Printf("error saving media: %s", fname)
continue
}
donk := &Donk{
FileID: fileid,
}
honk.Donks = append(honk.Donks, donk)
}
honk.Noise = noise
err := savehonk(&honk)
log.Printf("honk saved %v -> %v", xid, err)
}
}

View File

@ -1,25 +0,0 @@
//
// Copyright (c) 2019 Ted Unangst <tedu@tedunangst.com>
//
// Permission to use, copy, modify, and distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
package main
import (
"humungus.tedunangst.com/r/webs/mz"
)
func markitzero(s string) string {
var marker mz.Marker
return marker.Mark(s)
}

View File

@ -90,7 +90,7 @@ func initdb() {
os.Remove(dbname)
os.Exit(1)
}()
c := make(chan os.Signal)
c := make(chan os.Signal, 1)
signal.Notify(c, os.Interrupt)
go func() {
<-c
@ -209,7 +209,7 @@ func adduser() {
defer func() {
os.Exit(1)
}()
c := make(chan os.Signal)
c := make(chan os.Signal, 1)
signal.Notify(c, os.Interrupt)
go func() {
<-c
@ -263,7 +263,7 @@ func chpass(username string) {
defer func() {
os.Exit(1)
}()
c := make(chan os.Signal)
c := make(chan os.Signal, 1)
signal.Notify(c, os.Interrupt)
go func() {
<-c

View File

@ -7,6 +7,7 @@
<link href="/local.css{{ .LocalStyleParam }}" rel="stylesheet">
{{ end }}
{{ .APAltLink }}
{{ .Honkology }}
<link href="/icon.png" rel="icon">
<meta name="theme-color" content="#305">
<meta name="viewport" content="width=device-width">

View File

@ -175,7 +175,6 @@ input[type=file] {
.glow {
box-shadow: 0px 0px 16px var(--hl);
}
.honk {
margin: auto;
background: var(--bg-dark);
@ -188,6 +187,35 @@ input[type=file] {
overflow: hidden;
}
.level1 {
margin-left: 0.5em;
}
.level1::before {
position: absolute;
content: ">";
}
.level2 {
margin-left: 1.0em;
}
.level2::before {
position: absolute;
content: ">>";
}
.level3 {
margin-left: 1.5em;
}
.level3::before {
position: absolute;
content: ">>>";
}
.level4 {
margin-left: 2.0em;
}
.level4::before {
position: absolute;
content: ">>>>";
}
.chat {
border-bottom: 0.5px solid var(--fg-subtle);
padding-left: 1em;

170
web.go
View File

@ -364,7 +364,7 @@ func inbox(w http.ResponseWriter, r *http.Request) {
}
what, _ := j.GetString("type")
obj, _ := j.GetString("object")
if what == "Like" || (what == "EmojiReact" && originate(obj) != serverName) {
if what == "Like" || what == "Dislike" || (what == "EmojiReact" && originate(obj) != serverName) {
return
}
who, _ := j.GetString("actor")
@ -779,7 +779,8 @@ func showconvoy(w http.ResponseWriter, r *http.Request) {
templinfo["TopHID"] = honks[0].ID
}
honks = osmosis(honks, u.UserID, false)
reversehonks(honks)
//reversehonks(honks)
honks = threadsort(honks)
templinfo["PageName"] = "convoy"
templinfo["PageArg"] = c
templinfo["ServerMessage"] = "honks in convoy: " + c
@ -1017,6 +1018,106 @@ func trackback(xid string, r *http.Request) {
}
}
func sameperson(h1, h2 *Honk) bool {
n1, n2 := h1.Honker, h2.Honker
if h1.Oonker != "" {
n1 = h1.Oonker
}
if h2.Oonker != "" {
n2 = h2.Oonker
}
return n1 == n2
}
func threadsort(honks []*Honk) []*Honk {
sort.Slice(honks, func(i, j int) bool {
return honks[i].Date.Before(honks[j].Date)
})
honkx := make(map[string]*Honk)
kids := make(map[string][]*Honk)
for _, h := range honks {
honkx[h.XID] = h
rid := h.RID
kids[rid] = append(kids[rid], h)
}
done := make(map[*Honk]bool)
var thread []*Honk
var nextlevel func(p *Honk)
level := 0
nextlevel = func(p *Honk) {
levelup := level < 4
if pp := honkx[p.RID]; p.RID == "" || (pp != nil && sameperson(p, pp)) {
levelup = false
}
if level > 0 && len(kids[p.RID]) == 1 {
if pp := honkx[p.RID]; pp != nil && len(kids[pp.RID]) == 1 {
levelup = false
}
}
if levelup {
level++
}
p.Style += fmt.Sprintf(" level%d", level)
childs := kids[p.XID]
sort.SliceStable(childs, func(i, j int) bool {
return sameperson(childs[i], p) && !sameperson(childs[j], p)
})
for _, h := range childs {
if !done[h] {
done[h] = true
thread = append(thread, h)
nextlevel(h)
}
}
if levelup {
level--
}
}
for _, h := range honks {
if !done[h] && h.RID == "" {
done[h] = true
thread = append(thread, h)
nextlevel(h)
}
}
for _, h := range honks {
if !done[h] {
done[h] = true
thread = append(thread, h)
nextlevel(h)
}
}
return thread
}
func honkology(honk *Honk) template.HTML {
var user *WhatAbout
ok := somenumberedusers.Get(honk.UserID, &user)
if !ok {
return ""
}
title := fmt.Sprintf("%s: %s", user.Display, honk.Precis)
imgurl := avatarURL(user)
for _, d := range honk.Donks {
if d.Local && strings.HasPrefix(d.Media, "image") {
imgurl = d.URL
break
}
}
short := honk.Noise
if len(short) > 160 {
short = short[0:160] + "..."
}
return templates.Sprintf(
`<meta property="og:title" content="%s" />
<meta property="og:type" content="article" />
<meta property="article:author" content="%s" />
<meta property="og:url" content="%s" />
<meta property="og:image" content="%s" />
<meta property="og:description" content="%s" />`,
title, user.URL, honk.XID, imgurl, short)
}
func showonehonk(w http.ResponseWriter, r *http.Request) {
name := mux.Vars(r)["name"]
user, err := butwhatabout(name)
@ -1064,19 +1165,24 @@ func showonehonk(w http.ResponseWriter, r *http.Request) {
honkpage(w, u, honks, templinfo)
return
}
templinfo := getInfo(r)
rawhonks := gethonksbyconvoy(honk.UserID, honk.Convoy, 0)
reversehonks(rawhonks)
//reversehonks(rawhonks)
rawhonks = threadsort(rawhonks)
var honks []*Honk
for _, h := range rawhonks {
if h.XID == xid && len(honks) != 0 {
h.Style += " glow"
if h.XID == xid {
templinfo["Honkology"] = honkology(h)
if len(honks) != 0 {
h.Style += " glow"
}
}
if h.Public && (h.Whofore == 2 || h.IsAcked()) {
honks = append(honks, h)
}
}
templinfo := getInfo(r)
templinfo["ServerMessage"] = "one honk maybe more"
templinfo["HonkCSRF"] = login.GetCSRF("honkhonk", r)
templinfo["APAltLink"] = templates.Sprintf("<link href='%s' rel='alternate' type='application/activity+json'>", xid)
@ -2076,15 +2182,10 @@ func dochpass(w http.ResponseWriter, r *http.Request) {
http.Redirect(w, r, "/account", http.StatusSeeOther)
}
func fingerlicker(w http.ResponseWriter, r *http.Request) {
orig := r.FormValue("resource")
dlog.Printf("finger lick: %s", orig)
var oldfingers = cache.New(cache.Options{Filler: func(orig string) ([]byte, bool) {
if strings.HasPrefix(orig, "acct:") {
orig = orig[5:]
}
name := orig
idx := strings.LastIndexByte(name, '/')
if idx != -1 {
@ -2105,12 +2206,7 @@ func fingerlicker(w http.ResponseWriter, r *http.Request) {
}
user, err := butwhatabout(name)
if err != nil {
http.NotFound(w, r)
return
}
if stealthmode(user.ID, r) {
http.NotFound(w, r)
return
return nil, false
}
j := junk.New()
@ -2121,9 +2217,22 @@ func fingerlicker(w http.ResponseWriter, r *http.Request) {
l["type"] = `application/activity+json`
l["href"] = user.URL
j["links"] = []junk.Junk{l}
return j.ToBytes(), true
}})
w.Header().Set("Content-Type", "application/jrd+json")
j.Write(w)
func fingerlicker(w http.ResponseWriter, r *http.Request) {
orig := r.FormValue("resource")
dlog.Printf("finger lick: %s", orig)
var j []byte
ok := oldfingers.Get(orig, &j)
if ok {
w.Header().Set("Content-Type", "application/jrd+json")
w.Write(j)
} else {
http.NotFound(w, r)
}
}
func somedays() string {
@ -2295,6 +2404,8 @@ func webhydra(w http.ResponseWriter, r *http.Request) {
c := r.FormValue("c")
honks = gethonksbyconvoy(userid, c, wanted)
honks = osmosis(honks, userid, false)
honks = threadsort(honks)
reversehonks(honks)
hydra.Srvmsg = templates.Sprintf("honks in convoy: %s", c)
case "honker":
xid := r.FormValue("xid")
@ -2422,7 +2533,7 @@ func apihandler(w http.ResponseWriter, r *http.Request) {
rcpts := boxuprcpts(user, r.Form["rcpt"], public)
msg := []byte(r.FormValue("msg"))
for rcpt := range rcpts {
go deliverate(0, userid, rcpt, msg, true)
go deliverate(userid, rcpt, msg)
}
case "gethonkers":
j := junk.New()
@ -2440,13 +2551,24 @@ func apihandler(w http.ResponseWriter, r *http.Request) {
}
}
func fiveoh(w http.ResponseWriter, r *http.Request) {
fd, err := os.OpenFile("violations.json", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
if err != nil {
elog.Printf("error opening violations! %s", err)
return
}
defer fd.Close()
io.Copy(fd, r.Body)
fd.WriteString("\n")
}
var endoftheworld = make(chan bool)
var readyalready = make(chan bool)
var workinprogress = 0
func enditall() {
sig := make(chan os.Signal)
signal.Notify(sig, os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT)
sig := make(chan os.Signal, 1)
signal.Notify(sig, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT)
<-sig
ilog.Printf("stopping...")
for i := 0; i < workinprogress; i++ {
@ -2594,6 +2716,8 @@ func serve() {
posters.HandleFunc("/server/inbox", serverinbox)
posters.HandleFunc("/inbox", serverinbox)
posters.HandleFunc("/csp-violation", fiveoh)
getters.HandleFunc("/style.css", serveviewasset)
getters.HandleFunc("/honkpage.js", serveviewasset)
getters.HandleFunc("/misc.js", serveviewasset)