honk/hoot.go

139 lines
3.7 KiB
Go
Raw Permalink Normal View History

//
// Copyright (c) 2019 Ted Unangst <tedu@tedunangst.com>
//
// Permission to use, copy, modify, and distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
2019-06-05 09:52:56 +02:00
package main
import (
"fmt"
2019-07-18 05:11:48 +02:00
"io"
2019-06-05 09:52:56 +02:00
"log"
"net/http"
2019-07-18 05:11:48 +02:00
"os"
2019-06-05 09:52:56 +02:00
"regexp"
"strings"
"github.com/andybalholm/cascadia"
"golang.org/x/net/html"
"humungus.tedunangst.com/r/webs/htfilter"
)
var tweetsel = cascadia.MustCompile("div.tweet-text")
var linksel = cascadia.MustCompile("td.timestamp a")
2019-10-31 05:14:43 +01:00
var replyingto = cascadia.MustCompile(".ReplyingToContextBelowAuthor")
2019-06-05 09:52:56 +02:00
var authorregex = regexp.MustCompile("twitter.com/([^/]+)")
2019-10-31 04:05:27 +01:00
var re_hoots = regexp.MustCompile(`hoot: ?https://\S+`)
2019-06-05 09:52:56 +02:00
2019-10-31 05:00:31 +01:00
func hootextractor(r io.Reader, url string, seen map[string]bool) string {
root, err := html.Parse(r)
if err != nil {
log.Printf("error parsing hoot: %s", err)
return url
}
divs := tweetsel.MatchAll(root)
2019-06-05 09:52:56 +02:00
url = strings.Replace(url, "mobile.twitter.com", "twitter.com", -1)
2019-10-31 05:00:31 +01:00
var wanted string
wantmatch := authorregex.FindStringSubmatch(url)
if len(wantmatch) == 2 {
wanted = wantmatch[1]
}
var buf strings.Builder
2019-10-31 04:05:27 +01:00
2019-10-31 05:00:31 +01:00
fmt.Fprintf(&buf, "%s\n", url)
var htf htfilter.Filter
2019-12-13 23:59:53 +01:00
htf.Imager = func(node *html.Node) string {
return ""
}
for i, div := range divs {
2019-10-31 05:00:31 +01:00
twp := div.Parent.Parent.Parent
link := url
2019-10-31 05:00:31 +01:00
alink := linksel.MatchFirst(twp)
if alink == nil {
if i != 0 {
log.Printf("missing link")
continue
}
} else {
link = "https://twitter.com" + htfilter.GetAttr(alink, "href")
2019-10-31 04:05:27 +01:00
}
2019-10-31 05:14:43 +01:00
replto := replyingto.MatchFirst(twp)
if replto != nil {
continue
}
2019-10-31 05:00:31 +01:00
authormatch := authorregex.FindStringSubmatch(link)
if len(authormatch) < 2 {
log.Printf("no author?: %s", link)
2019-10-31 05:00:31 +01:00
continue
}
author := authormatch[1]
if wanted == "" {
wanted = author
}
if author != wanted {
continue
}
2019-12-13 23:59:53 +01:00
text := htf.NodeText(div)
2019-10-31 05:00:31 +01:00
text = strings.Replace(text, "\n", " ", -1)
text = strings.Replace(text, "pic.twitter.com", "https://pic.twitter.com", -1)
2019-10-31 04:05:27 +01:00
2019-10-31 05:00:31 +01:00
if seen[text] {
continue
2019-10-31 04:05:27 +01:00
}
2019-10-31 05:00:31 +01:00
fmt.Fprintf(&buf, "> @%s: %s\n", author, text)
seen[text] = true
}
2019-10-31 05:00:31 +01:00
return buf.String()
}
func hooterize(noise string) string {
seen := make(map[string]bool)
2019-06-05 09:52:56 +02:00
2019-10-31 04:05:27 +01:00
hootfetcher := func(hoot string) string {
url := hoot[5:]
if url[0] == ' ' {
url = url[1:]
2019-06-05 09:52:56 +02:00
}
2019-10-31 04:05:27 +01:00
url = strings.Replace(url, "mobile.twitter.com", "twitter.com", -1)
url = strings.Replace(url, "twitter.com", "mobile.twitter.com", -1)
2019-10-31 04:05:27 +01:00
log.Printf("hooterizing %s", url)
req, err := http.NewRequest("GET", url, nil)
if err != nil {
log.Printf("error: %s", err)
return hoot
2019-06-05 09:52:56 +02:00
}
2019-10-31 04:05:27 +01:00
req.Header.Set("User-Agent", "OpenBSD ftp")
req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
resp, err := http.DefaultClient.Do(req)
if err != nil {
log.Printf("error: %s", err)
return hoot
2019-06-05 09:52:56 +02:00
}
2019-10-31 04:05:27 +01:00
defer resp.Body.Close()
if resp.StatusCode != 200 {
log.Printf("error getting %s: %d", url, resp.StatusCode)
return hoot
}
ld, _ := os.Create("lasthoot.html")
r := io.TeeReader(resp.Body, ld)
2019-10-31 05:00:31 +01:00
return hootextractor(r, url, seen)
2019-06-05 09:52:56 +02:00
}
2019-07-18 05:11:48 +02:00
return re_hoots.ReplaceAllStringFunc(noise, hootfetcher)
2019-06-05 09:52:56 +02:00
}