2019-08-19 00:21:07 +02:00
|
|
|
//
|
|
|
|
// Copyright (c) 2019 Ted Unangst <tedu@tedunangst.com>
|
|
|
|
//
|
|
|
|
// Permission to use, copy, modify, and distribute this software for any
|
|
|
|
// purpose with or without fee is hereby granted, provided that the above
|
|
|
|
// copyright notice and this permission notice appear in all copies.
|
|
|
|
//
|
|
|
|
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
|
|
|
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
|
|
|
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
|
|
|
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
|
|
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
|
|
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
|
|
|
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
|
|
|
2019-06-05 09:52:56 +02:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
2019-07-18 05:11:48 +02:00
|
|
|
"io"
|
2019-06-05 09:52:56 +02:00
|
|
|
"net/http"
|
2019-07-18 05:11:48 +02:00
|
|
|
"os"
|
2019-06-05 09:52:56 +02:00
|
|
|
"regexp"
|
|
|
|
"strings"
|
|
|
|
|
|
|
|
"github.com/andybalholm/cascadia"
|
|
|
|
"golang.org/x/net/html"
|
|
|
|
"humungus.tedunangst.com/r/webs/htfilter"
|
|
|
|
)
|
|
|
|
|
2022-08-07 22:02:58 +02:00
|
|
|
var tweetsel = cascadia.MustCompile("div[data-testid=tweetText]")
|
2022-07-02 23:57:29 +02:00
|
|
|
var linksel = cascadia.MustCompile("a time")
|
2019-10-31 05:14:43 +01:00
|
|
|
var replyingto = cascadia.MustCompile(".ReplyingToContextBelowAuthor")
|
2022-07-03 22:17:18 +02:00
|
|
|
var imgsel = cascadia.MustCompile("div[data-testid=tweetPhoto] img")
|
2019-06-05 09:52:56 +02:00
|
|
|
var authorregex = regexp.MustCompile("twitter.com/([^/]+)")
|
|
|
|
|
2019-10-31 04:05:27 +01:00
|
|
|
var re_hoots = regexp.MustCompile(`hoot: ?https://\S+`)
|
2022-01-03 08:44:38 +01:00
|
|
|
var re_removepics = regexp.MustCompile(`pic\.twitter\.com/[[:alnum:]]+`)
|
2019-06-05 09:52:56 +02:00
|
|
|
|
2019-10-31 05:00:31 +01:00
|
|
|
func hootextractor(r io.Reader, url string, seen map[string]bool) string {
|
|
|
|
root, err := html.Parse(r)
|
|
|
|
if err != nil {
|
2022-02-06 06:42:13 +01:00
|
|
|
elog.Printf("error parsing hoot: %s", err)
|
2019-10-31 05:00:31 +01:00
|
|
|
return url
|
|
|
|
}
|
2019-06-05 09:52:56 +02:00
|
|
|
|
2020-06-30 22:37:35 +02:00
|
|
|
url = strings.Replace(url, "mobile.twitter.com", "twitter.com", -1)
|
2019-10-31 05:00:31 +01:00
|
|
|
wantmatch := authorregex.FindStringSubmatch(url)
|
2022-02-05 22:23:40 +01:00
|
|
|
var wanted string
|
2019-10-31 05:00:31 +01:00
|
|
|
if len(wantmatch) == 2 {
|
|
|
|
wanted = wantmatch[1]
|
|
|
|
}
|
2022-02-05 22:23:40 +01:00
|
|
|
|
2019-10-31 05:00:31 +01:00
|
|
|
var htf htfilter.Filter
|
2019-12-13 23:59:53 +01:00
|
|
|
htf.Imager = func(node *html.Node) string {
|
2022-02-12 00:03:32 +01:00
|
|
|
alt := htfilter.GetAttr(node, "alt")
|
|
|
|
if htfilter.HasClass(node, "Emoji") && alt != "" {
|
|
|
|
return alt
|
|
|
|
}
|
2023-02-12 01:46:29 +01:00
|
|
|
return fmt.Sprintf(" <img src='%s' alt='%s'>", htfilter.GetAttr(node, "src"), alt)
|
2019-12-13 23:59:53 +01:00
|
|
|
}
|
2022-02-05 22:23:40 +01:00
|
|
|
|
|
|
|
var buf strings.Builder
|
|
|
|
fmt.Fprintf(&buf, "%s\n", url)
|
|
|
|
|
|
|
|
divs := tweetsel.MatchAll(root)
|
2020-06-30 22:37:35 +02:00
|
|
|
for i, div := range divs {
|
2022-07-02 23:57:29 +02:00
|
|
|
{
|
2022-08-07 22:02:58 +02:00
|
|
|
twp := div.Parent.Parent.Parent.Parent.Parent
|
2022-07-02 23:57:29 +02:00
|
|
|
link := url
|
|
|
|
alink := linksel.MatchFirst(twp)
|
|
|
|
if alink == nil {
|
|
|
|
if i != 0 {
|
|
|
|
dlog.Printf("missing link")
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
alink = alink.Parent
|
|
|
|
link = "https://twitter.com" + htfilter.GetAttr(alink, "href")
|
|
|
|
}
|
|
|
|
authormatch := authorregex.FindStringSubmatch(link)
|
|
|
|
if len(authormatch) < 2 {
|
|
|
|
dlog.Printf("no author?: %s", link)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
author := authormatch[1]
|
2022-07-03 22:17:18 +02:00
|
|
|
if wanted == "" {
|
|
|
|
wanted = author
|
|
|
|
}
|
2022-07-02 23:57:29 +02:00
|
|
|
if author != wanted {
|
|
|
|
continue
|
|
|
|
}
|
2022-07-03 22:17:18 +02:00
|
|
|
for _, img := range imgsel.MatchAll(twp) {
|
|
|
|
img.Parent.RemoveChild(img)
|
|
|
|
div.AppendChild(img)
|
|
|
|
}
|
2022-07-02 23:57:29 +02:00
|
|
|
text := htf.NodeText(div)
|
|
|
|
text = strings.Replace(text, "\n", " ", -1)
|
|
|
|
fmt.Fprintf(&buf, "> @%s: %s\n", author, text)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2022-08-07 22:02:58 +02:00
|
|
|
twp := div.Parent.Parent.Parent.Parent.Parent
|
2020-06-30 22:37:35 +02:00
|
|
|
link := url
|
2019-10-31 05:00:31 +01:00
|
|
|
alink := linksel.MatchFirst(twp)
|
|
|
|
if alink == nil {
|
2020-06-30 22:37:35 +02:00
|
|
|
if i != 0 {
|
2022-02-06 06:42:13 +01:00
|
|
|
dlog.Printf("missing link")
|
2020-06-30 22:37:35 +02:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
link = "https://twitter.com" + htfilter.GetAttr(alink, "href")
|
2019-10-31 04:05:27 +01:00
|
|
|
}
|
2019-10-31 05:14:43 +01:00
|
|
|
replto := replyingto.MatchFirst(twp)
|
|
|
|
if replto != nil {
|
|
|
|
continue
|
|
|
|
}
|
2019-10-31 05:00:31 +01:00
|
|
|
authormatch := authorregex.FindStringSubmatch(link)
|
|
|
|
if len(authormatch) < 2 {
|
2022-02-06 06:42:13 +01:00
|
|
|
dlog.Printf("no author?: %s", link)
|
2019-10-31 05:00:31 +01:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
author := authormatch[1]
|
|
|
|
if wanted == "" {
|
|
|
|
wanted = author
|
|
|
|
}
|
|
|
|
if author != wanted {
|
|
|
|
continue
|
|
|
|
}
|
2022-02-06 09:35:27 +01:00
|
|
|
for _, img := range imgsel.MatchAll(twp) {
|
2022-01-03 08:44:38 +01:00
|
|
|
img.Parent.RemoveChild(img)
|
|
|
|
div.AppendChild(img)
|
|
|
|
}
|
2019-12-13 23:59:53 +01:00
|
|
|
text := htf.NodeText(div)
|
2019-10-31 05:00:31 +01:00
|
|
|
text = strings.Replace(text, "\n", " ", -1)
|
2022-01-03 08:44:38 +01:00
|
|
|
text = re_removepics.ReplaceAllString(text, "")
|
2019-10-31 04:05:27 +01:00
|
|
|
|
2019-10-31 05:00:31 +01:00
|
|
|
if seen[text] {
|
|
|
|
continue
|
2019-10-31 04:05:27 +01:00
|
|
|
}
|
2019-10-31 05:00:31 +01:00
|
|
|
|
|
|
|
fmt.Fprintf(&buf, "> @%s: %s\n", author, text)
|
|
|
|
seen[text] = true
|
2019-09-27 22:15:12 +02:00
|
|
|
}
|
2019-10-31 05:00:31 +01:00
|
|
|
return buf.String()
|
|
|
|
}
|
|
|
|
|
|
|
|
func hooterize(noise string) string {
|
|
|
|
seen := make(map[string]bool)
|
2019-06-05 09:52:56 +02:00
|
|
|
|
2019-10-31 04:05:27 +01:00
|
|
|
hootfetcher := func(hoot string) string {
|
|
|
|
url := hoot[5:]
|
|
|
|
if url[0] == ' ' {
|
|
|
|
url = url[1:]
|
2019-06-05 09:52:56 +02:00
|
|
|
}
|
2019-10-31 04:05:27 +01:00
|
|
|
url = strings.Replace(url, "mobile.twitter.com", "twitter.com", -1)
|
2022-02-06 06:42:13 +01:00
|
|
|
dlog.Printf("hooterizing %s", url)
|
2019-10-31 04:05:27 +01:00
|
|
|
req, err := http.NewRequest("GET", url, nil)
|
|
|
|
if err != nil {
|
2022-02-06 06:42:13 +01:00
|
|
|
ilog.Printf("error: %s", err)
|
2019-10-31 04:05:27 +01:00
|
|
|
return hoot
|
2019-06-05 09:52:56 +02:00
|
|
|
}
|
2020-12-22 19:04:04 +01:00
|
|
|
req.Header.Set("User-Agent", "Bot")
|
2019-10-31 04:05:27 +01:00
|
|
|
req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
|
|
|
|
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
|
|
|
resp, err := http.DefaultClient.Do(req)
|
|
|
|
if err != nil {
|
2022-02-06 06:42:13 +01:00
|
|
|
ilog.Printf("error: %s", err)
|
2019-10-31 04:05:27 +01:00
|
|
|
return hoot
|
2019-06-05 09:52:56 +02:00
|
|
|
}
|
2019-10-31 04:05:27 +01:00
|
|
|
defer resp.Body.Close()
|
|
|
|
if resp.StatusCode != 200 {
|
2022-02-06 06:42:13 +01:00
|
|
|
ilog.Printf("error getting %s: %d", url, resp.StatusCode)
|
2019-10-31 04:05:27 +01:00
|
|
|
return hoot
|
|
|
|
}
|
|
|
|
ld, _ := os.Create("lasthoot.html")
|
|
|
|
r := io.TeeReader(resp.Body, ld)
|
2019-10-31 05:00:31 +01:00
|
|
|
return hootextractor(r, url, seen)
|
2019-06-05 09:52:56 +02:00
|
|
|
}
|
|
|
|
|
2019-07-18 05:11:48 +02:00
|
|
|
return re_hoots.ReplaceAllStringFunc(noise, hootfetcher)
|
2019-06-05 09:52:56 +02:00
|
|
|
}
|