basic import from instagram
This commit is contained in:
parent
e8cf29056e
commit
fea5df77dd
|
@ -2,6 +2,8 @@ changelog
|
||||||
|
|
||||||
=== next
|
=== next
|
||||||
|
|
||||||
|
+ Import from instagram.
|
||||||
|
|
||||||
+ improve handling of some Page and Link objects
|
+ improve handling of some Page and Link objects
|
||||||
|
|
||||||
+ search can now load external posts
|
+ search can now load external posts
|
||||||
|
|
|
@ -190,7 +190,7 @@ and templates are reloaded every request.
|
||||||
Data may be imported and converted from other services using the
|
Data may be imported and converted from other services using the
|
||||||
.Ic import
|
.Ic import
|
||||||
command.
|
command.
|
||||||
Currently supports Mastodon and Twitter exported data.
|
Currently supports Mastodon, Twitter, and Instagram exported data.
|
||||||
Posts are imported and backdated to appear as old honks.
|
Posts are imported and backdated to appear as old honks.
|
||||||
The Mastodon following list is imported, but must be refollowed.
|
The Mastodon following list is imported, but must be refollowed.
|
||||||
.Pp
|
.Pp
|
||||||
|
@ -201,6 +201,9 @@ To prepare a Twitter data archive, extract the twitter-longhash.zip file.
|
||||||
After unzipping the data archive, navigate to the tweet_media directory
|
After unzipping the data archive, navigate to the tweet_media directory
|
||||||
and unzip any zip files contained within.
|
and unzip any zip files contained within.
|
||||||
.Dl ./honk import username twitter source-directory
|
.Dl ./honk import username twitter source-directory
|
||||||
|
.Pp
|
||||||
|
To prepare an Instagram data archive, extract the igusername.zip file.
|
||||||
|
.Dl ./honk import username instagram source-directory
|
||||||
.Ss Advanced Options
|
.Ss Advanced Options
|
||||||
Advanced configuration values may be set by running the
|
Advanced configuration values may be set by running the
|
||||||
.Ic setconfig Ar key value
|
.Ic setconfig Ar key value
|
||||||
|
|
78
import.go
78
import.go
|
@ -35,6 +35,8 @@ func importMain(username, flavor, source string) {
|
||||||
importMastodon(username, source)
|
importMastodon(username, source)
|
||||||
case "twitter":
|
case "twitter":
|
||||||
importTwitter(username, source)
|
importTwitter(username, source)
|
||||||
|
case "instagram":
|
||||||
|
importInstagram(username, source)
|
||||||
default:
|
default:
|
||||||
elog.Fatal("unknown source flavor")
|
elog.Fatal("unknown source flavor")
|
||||||
}
|
}
|
||||||
|
@ -445,3 +447,79 @@ func importTwitter(username, source string) {
|
||||||
log.Printf("honk saved %v -> %v", xid, err)
|
log.Printf("honk saved %v -> %v", xid, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func importInstagram(username, source string) {
|
||||||
|
user, err := butwhatabout(username)
|
||||||
|
if err != nil {
|
||||||
|
elog.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Gram struct {
|
||||||
|
Media []struct {
|
||||||
|
URI string
|
||||||
|
Creation int64 `json:"creation_timestamp"`
|
||||||
|
Title string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var grams []*Gram
|
||||||
|
fd, err := os.Open(source + "/content/posts_1.json")
|
||||||
|
if err != nil {
|
||||||
|
elog.Fatal(err)
|
||||||
|
}
|
||||||
|
dec := json.NewDecoder(fd)
|
||||||
|
err = dec.Decode(&grams)
|
||||||
|
if err != nil {
|
||||||
|
elog.Fatalf("error parsing json: %s", err)
|
||||||
|
}
|
||||||
|
fd.Close()
|
||||||
|
log.Printf("importing %d grams", len(grams))
|
||||||
|
sort.Slice(grams, func(i, j int) bool {
|
||||||
|
return grams[i].Media[0].Creation < grams[j].Media[0].Creation
|
||||||
|
})
|
||||||
|
for _, g0 := range grams {
|
||||||
|
g := g0.Media[0]
|
||||||
|
xid := fmt.Sprintf("%s/%s/%s", user.URL, honkSep, xfiltrate())
|
||||||
|
what := "honk"
|
||||||
|
noise := g.Title
|
||||||
|
convoy := "data:,acoustichonkytonk-" + xfiltrate()
|
||||||
|
date := time.Unix(g.Creation, 0)
|
||||||
|
audience := []string{thewholeworld}
|
||||||
|
honk := Honk{
|
||||||
|
UserID: user.ID,
|
||||||
|
Username: user.Name,
|
||||||
|
What: what,
|
||||||
|
Honker: user.URL,
|
||||||
|
XID: xid,
|
||||||
|
Date: date,
|
||||||
|
Format: "markdown",
|
||||||
|
Audience: audience,
|
||||||
|
Convoy: convoy,
|
||||||
|
Public: true,
|
||||||
|
Whofore: 2,
|
||||||
|
}
|
||||||
|
{
|
||||||
|
u := xfiltrate()
|
||||||
|
fname := fmt.Sprintf("%s/%s", source, g.URI)
|
||||||
|
data, err := ioutil.ReadFile(fname)
|
||||||
|
if err != nil {
|
||||||
|
elog.Printf("error reading media: %s", fname)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
newurl := fmt.Sprintf("https://%s/d/%s", serverName, u)
|
||||||
|
|
||||||
|
fileid, err := savefile(u, u, newurl, "image/jpg", true, data)
|
||||||
|
if err != nil {
|
||||||
|
elog.Printf("error saving media: %s", fname)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
donk := &Donk{
|
||||||
|
FileID: fileid,
|
||||||
|
}
|
||||||
|
honk.Donks = append(honk.Donks, donk)
|
||||||
|
}
|
||||||
|
honk.Noise = noise
|
||||||
|
err := savehonk(&honk)
|
||||||
|
log.Printf("honk saved %v -> %v", xid, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue