basic import from instagram
Ted Unangst tedu@tedunangst.com
Mon, 12 Jun 2023 17:31:13 -0400
3 files changed,
84 insertions(+),
1 deletions(-)
M
docs/changelog.txt
→
docs/changelog.txt
@@ -2,6 +2,8 @@ changelog
=== next ++ Import from instagram. + + improve handling of some Page and Link objects + search can now load external posts
M
docs/honk.8
→
docs/honk.8
@@ -190,7 +190,7 @@ .Ss Import
Data may be imported and converted from other services using the .Ic import command. -Currently supports Mastodon and Twitter exported data. +Currently supports Mastodon, Twitter, and Instagram exported data. Posts are imported and backdated to appear as old honks. The Mastodon following list is imported, but must be refollowed. .Pp@@ -201,6 +201,9 @@ To prepare a Twitter data archive, extract the twitter-longhash.zip file.
After unzipping the data archive, navigate to the tweet_media directory and unzip any zip files contained within. .Dl ./honk import username twitter source-directory +.Pp +To prepare an Instagram data archive, extract the igusername.zip file. +.Dl ./honk import username instagram source-directory .Ss Advanced Options Advanced configuration values may be set by running the .Ic setconfig Ar key value
M
import.go
→
import.go
@@ -35,6 +35,8 @@ case "mastodon":
importMastodon(username, source) case "twitter": importTwitter(username, source) + case "instagram": + importInstagram(username, source) default: elog.Fatal("unknown source flavor") }@@ -445,3 +447,79 @@ err := savehonk(&honk)
log.Printf("honk saved %v -> %v", xid, err) } } + +func importInstagram(username, source string) { + user, err := butwhatabout(username) + if err != nil { + elog.Fatal(err) + } + + type Gram struct { + Media []struct { + URI string + Creation int64 `json:"creation_timestamp"` + Title string + } + } + + var grams []*Gram + fd, err := os.Open(source + "/content/posts_1.json") + if err != nil { + elog.Fatal(err) + } + dec := json.NewDecoder(fd) + err = dec.Decode(&grams) + if err != nil { + elog.Fatalf("error parsing json: %s", err) + } + fd.Close() + log.Printf("importing %d grams", len(grams)) + sort.Slice(grams, func(i, j int) bool { + return grams[i].Media[0].Creation < grams[j].Media[0].Creation + }) + for _, g0 := range grams { + g := g0.Media[0] + xid := fmt.Sprintf("%s/%s/%s", user.URL, honkSep, xfiltrate()) + what := "honk" + noise := g.Title + convoy := "data:,acoustichonkytonk-" + xfiltrate() + date := time.Unix(g.Creation, 0) + audience := []string{thewholeworld} + honk := Honk{ + UserID: user.ID, + Username: user.Name, + What: what, + Honker: user.URL, + XID: xid, + Date: date, + Format: "markdown", + Audience: audience, + Convoy: convoy, + Public: true, + Whofore: 2, + } + { + u := xfiltrate() + fname := fmt.Sprintf("%s/%s", source, g.URI) + data, err := ioutil.ReadFile(fname) + if err != nil { + elog.Printf("error reading media: %s", fname) + continue + } + newurl := fmt.Sprintf("https://%s/d/%s", serverName, u) + + fileid, err := savefile(u, u, newurl, "image/jpg", true, data) + if err != nil { + elog.Printf("error saving media: %s", fname) + continue + } + donk := &Donk{ + FileID: fileid, + } + honk.Donks = append(honk.Donks, donk) + } + honk.Noise = noise + err := savehonk(&honk) + log.Printf("honk saved %v -> %v", xid, err) + } +}