|
| 1 | +# SPDX-FileCopyrightText: 2024 Tim Cocks |
| 2 | +# |
| 3 | +# SPDX-License-Identifier: MIT |
| 4 | +""" |
| 5 | +Bluesky_RPi_TFT_Scroller code.py |
| 6 | +Infinitely scroll Bluesky posts on a 320x240 pixel TFT |
| 7 | +""" |
| 8 | +import json |
| 9 | +import os |
| 10 | + |
| 11 | +import requests |
| 12 | +import webview |
| 13 | + |
| 14 | +FEEDLINK_RETROCOMPUTING = "https://bsky.app/profile/did:plc:tbo4hkau3p2itkar2vsnb3gp/feed/aaabo5oe7bzok" |
| 15 | + |
| 16 | +# Un-comment a single key inside of FEED_ARGS and set it's value to the feed, list or search |
| 17 | +# that you want to scroll. |
| 18 | +FETCH_ARGS = { |
| 19 | + # "feed_share_link": FEEDLINK_RETROCOMPUTING, |
| 20 | + # "feed_share_link": "https://bsky.app/profile/did:plc:463touruejpokvutnn5ikxb5/lists/3lbfdtahfzt2a", |
| 21 | + # "search_args": {"q": "Adafruit", "sort": "latest"} |
| 22 | + "search_args": {"q": "#circuitpython", "sort": "latest"} |
| 23 | +} |
| 24 | + |
| 25 | + |
| 26 | +def at_feed_uri_from_share_link(share_link): |
| 27 | + """ |
| 28 | + Converts a share link into an AT URI for that resource. |
| 29 | +
|
| 30 | + :param share_link: The share link to convert. |
| 31 | + :return str: The AT URI pointing at the resource. |
| 32 | + """ |
| 33 | + at_feed_uri = share_link.replace("https://bsky.app/profile/", "at://") |
| 34 | + if "/feed/" in share_link: |
| 35 | + at_feed_uri = at_feed_uri.replace("/feed/", "/app.bsky.feed.generator/") |
| 36 | + if "/lists/" in share_link: |
| 37 | + at_feed_uri = at_feed_uri.replace("/lists/", "/app.bsky.graph.list/") |
| 38 | + return at_feed_uri |
| 39 | + |
| 40 | + |
| 41 | +def fetch_data(feed_share_link=None, search_args=None): |
| 42 | + """ |
| 43 | + Fetch posts from Bluesky API and write them into the local cached |
| 44 | + data files. After posts are written locally iterates over them |
| 45 | + and downloads the relevant photos from them. |
| 46 | +
|
| 47 | + Must pass either feed_share_link or search_args. |
| 48 | +
|
| 49 | + :param feed_share_link: The link copied from Bluesky front end to share the feed or list. |
| 50 | + :param search_args: A dictionary containing at minimum a ``q`` key with string value of |
| 51 | + the hashtag or term to search for. See bsky API docs for other supported keys. |
| 52 | + :return: None |
| 53 | + """ |
| 54 | + if feed_share_link is None and search_args is None: |
| 55 | + # If both inputs are None, just use retrocomputing feed. |
| 56 | + feed_share_link = FEEDLINK_RETROCOMPUTING |
| 57 | + |
| 58 | + # if a share link input was provided |
| 59 | + if feed_share_link is not None: |
| 60 | + FEED_AT = at_feed_uri_from_share_link(feed_share_link) |
| 61 | + # print(FEED_AT) |
| 62 | + |
| 63 | + # if it's a feed |
| 64 | + if "/app.bsky.feed.generator/" in FEED_AT: |
| 65 | + URL = f"https://public.api.bsky.app/xrpc/app.bsky.feed.getFeed?feed={FEED_AT}&limit=30" |
| 66 | + headers = {"Accept-Language": "en"} |
| 67 | + resp = requests.get(URL, headers=headers) |
| 68 | + |
| 69 | + # if it's a list |
| 70 | + elif "/app.bsky.graph.list/" in FEED_AT: |
| 71 | + URL = f"https://public.api.bsky.app/xrpc/app.bsky.feed.getListFeed?list={FEED_AT}&limit=30" |
| 72 | + headers = {"Accept-Language": "en"} |
| 73 | + resp = requests.get(URL, headers=headers) |
| 74 | + |
| 75 | + # raise error if it's an unknown type |
| 76 | + else: |
| 77 | + raise ValueError("Only 'app.bsky.feed.generator' and 'app.bsky.graph.list' URIs are supported.") |
| 78 | + |
| 79 | + # if a search input was provided |
| 80 | + if search_args is not None: |
| 81 | + URL = "https://public.api.bsky.app/xrpc/app.bsky.feed.searchPosts" |
| 82 | + headers = {"Accept-Language": "en"} |
| 83 | + resp = requests.get(URL, headers=headers, params=search_args) |
| 84 | + |
| 85 | + with open(".data/raw_data.json", "wb") as f: |
| 86 | + # write raw response to cache |
| 87 | + f.write(resp.content) |
| 88 | + |
| 89 | + # Process the post data into a smaller subset |
| 90 | + # containing just the bits we need for showing |
| 91 | + # on the TFT. |
| 92 | + resp_json = json.loads(resp.text) |
| 93 | + processed_posts = {"posts": []} |
| 94 | + fetched_posts = None |
| 95 | + if "feed" in resp_json.keys(): |
| 96 | + fetched_posts = resp_json["feed"] |
| 97 | + elif "posts" in resp_json.keys(): |
| 98 | + fetched_posts = resp_json["posts"] |
| 99 | + |
| 100 | + for post in fetched_posts: |
| 101 | + cur_post = {} |
| 102 | + if "post" in post.keys(): |
| 103 | + post = post["post"] |
| 104 | + cur_post["author"] = post["author"]["handle"] |
| 105 | + cur_post["text"] = post["record"]["text"] |
| 106 | + |
| 107 | + # image handling |
| 108 | + if "embed" in post.keys(): |
| 109 | + cid = post["cid"] |
| 110 | + if "images" in post["embed"].keys(): |
| 111 | + cur_post["image_url"] = post["embed"]["images"][0]["thumb"] |
| 112 | + elif "thumbnail" in post["embed"].keys(): |
| 113 | + cur_post["image_url"] = post["embed"]["thumbnail"] |
| 114 | + elif "external" in post["embed"].keys() and "thumb" in post["embed"]["external"].keys(): |
| 115 | + cur_post["image_url"] = post["embed"]["external"]["thumb"] |
| 116 | + |
| 117 | + # if we actually have an image to show |
| 118 | + if "image_url" in cur_post.keys(): |
| 119 | + # check if we already downloaded this image |
| 120 | + if f"{cid}.jpg" not in os.listdir("static/imgs/"): |
| 121 | + print(f"downloading: {cur_post['image_url']}") |
| 122 | + |
| 123 | + # download image and write to file |
| 124 | + img_resp = requests.get(cur_post["image_url"]) |
| 125 | + with open(f"static/imgs/{cid}.jpg", "wb") as f: |
| 126 | + f.write(img_resp.content) |
| 127 | + |
| 128 | + cur_post["image_file"] = f"{cid}.jpg" |
| 129 | + processed_posts["posts"].append(cur_post) |
| 130 | + |
| 131 | + # save the processed data to a file |
| 132 | + with open(".data/processed_data.json", "w", encoding="utf-8") as f: |
| 133 | + f.write(json.dumps(processed_posts)) |
| 134 | + |
| 135 | + |
| 136 | +def read_cached_data(): |
| 137 | + """ |
| 138 | + Load the cached processed data file and return |
| 139 | + the data from within it. |
| 140 | +
|
| 141 | + :return: The posts data loaded from JSON |
| 142 | + """ |
| 143 | + with open(".data/processed_data.json", "r") as f: |
| 144 | + return json.load(f) |
| 145 | + |
| 146 | + |
| 147 | +class Api: |
| 148 | + """ |
| 149 | + API object for interaction between python code here |
| 150 | + and JS code running inside the page. |
| 151 | + """ |
| 152 | + |
| 153 | + def get_posts(self): |
| 154 | + """ |
| 155 | + Fetch new posts data from Bluesky API, cache and return it. |
| 156 | + :return: Processed data containing everything necessary to show |
| 157 | + posts on the TFT. |
| 158 | + """ |
| 159 | + fetch_data(**FETCH_ARGS) |
| 160 | + return read_cached_data() |
| 161 | + |
| 162 | + def check_quit(self): |
| 163 | + """ |
| 164 | + Allows the python code to correctly handle KeyboardInterrupt |
| 165 | + more quickly. |
| 166 | +
|
| 167 | + :return: None |
| 168 | + """ |
| 169 | + pass |
| 170 | + |
| 171 | + |
| 172 | +# create a webview and load the index.html page |
| 173 | +webview.create_window("bsky posts", "static/index.html", |
| 174 | + js_api=Api(), width=320, height=240) |
| 175 | +webview.start() |
| 176 | +# webview.start(debug=True) # use this one to enable chromium dev tools to see console.log() output from the page. |
0 commit comments