dedupe the hoots
This commit is contained in:
parent
7eee7d7600
commit
4e225e16f3
71
hoot.go
71
hoot.go
|
@ -33,37 +33,12 @@ var tweetsel = cascadia.MustCompile("p.tweet-text")
|
||||||
var linksel = cascadia.MustCompile(".time a.tweet-timestamp")
|
var linksel = cascadia.MustCompile(".time a.tweet-timestamp")
|
||||||
var authorregex = regexp.MustCompile("twitter.com/([^/]+)")
|
var authorregex = regexp.MustCompile("twitter.com/([^/]+)")
|
||||||
|
|
||||||
func hootfetcher(hoot string) string {
|
var re_hoots = regexp.MustCompile(`hoot: ?https://\S+`)
|
||||||
url := hoot[5:]
|
|
||||||
if url[0] == ' ' {
|
|
||||||
url = url[1:]
|
|
||||||
}
|
|
||||||
url = strings.Replace(url, "mobile.twitter.com", "twitter.com", -1)
|
|
||||||
log.Printf("hooterizing %s", url)
|
|
||||||
req, err := http.NewRequest("GET", url, nil)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("error: %s", err)
|
|
||||||
return hoot
|
|
||||||
}
|
|
||||||
req.Header.Set("User-Agent", "OpenBSD ftp")
|
|
||||||
req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
|
|
||||||
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
|
||||||
resp, err := http.DefaultClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("error: %s", err)
|
|
||||||
return hoot
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
if resp.StatusCode != 200 {
|
|
||||||
log.Printf("error getting %s: %d", url, resp.StatusCode)
|
|
||||||
return hoot
|
|
||||||
}
|
|
||||||
ld, _ := os.Create("lasthoot.html")
|
|
||||||
r := io.TeeReader(resp.Body, ld)
|
|
||||||
return hootfixer(r, url)
|
|
||||||
}
|
|
||||||
|
|
||||||
func hootfixer(r io.Reader, url string) string {
|
func hooterize(noise string) string {
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
|
||||||
|
hootfixer := func(r io.Reader, url string) string {
|
||||||
root, err := html.Parse(r)
|
root, err := html.Parse(r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("error parsing hoot: %s", err)
|
log.Printf("error parsing hoot: %s", err)
|
||||||
|
@ -101,13 +76,45 @@ func hootfixer(r io.Reader, url string) string {
|
||||||
text = strings.Replace(text, "\n", " ", -1)
|
text = strings.Replace(text, "\n", " ", -1)
|
||||||
text = strings.Replace(text, "pic.twitter.com", "https://pic.twitter.com", -1)
|
text = strings.Replace(text, "pic.twitter.com", "https://pic.twitter.com", -1)
|
||||||
|
|
||||||
|
if seen[text] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
fmt.Fprintf(&buf, "> @%s: %s\n", author, text)
|
fmt.Fprintf(&buf, "> @%s: %s\n", author, text)
|
||||||
|
seen[text] = true
|
||||||
}
|
}
|
||||||
return buf.String()
|
return buf.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
var re_hoots = regexp.MustCompile(`hoot: ?https://\S+`)
|
hootfetcher := func(hoot string) string {
|
||||||
|
url := hoot[5:]
|
||||||
|
if url[0] == ' ' {
|
||||||
|
url = url[1:]
|
||||||
|
}
|
||||||
|
url = strings.Replace(url, "mobile.twitter.com", "twitter.com", -1)
|
||||||
|
log.Printf("hooterizing %s", url)
|
||||||
|
req, err := http.NewRequest("GET", url, nil)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("error: %s", err)
|
||||||
|
return hoot
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", "OpenBSD ftp")
|
||||||
|
req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
|
||||||
|
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
||||||
|
resp, err := http.DefaultClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("error: %s", err)
|
||||||
|
return hoot
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
log.Printf("error getting %s: %d", url, resp.StatusCode)
|
||||||
|
return hoot
|
||||||
|
}
|
||||||
|
ld, _ := os.Create("lasthoot.html")
|
||||||
|
r := io.TeeReader(resp.Body, ld)
|
||||||
|
return hootfixer(r, url)
|
||||||
|
}
|
||||||
|
|
||||||
func hooterize(noise string) string {
|
|
||||||
return re_hoots.ReplaceAllStringFunc(noise, hootfetcher)
|
return re_hoots.ReplaceAllStringFunc(noise, hootfetcher)
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue