|
| 1 | +# SPDX-FileCopyrightText: 2024 Tim Cocks |
| 2 | +# |
| 3 | +# SPDX-License-Identifier: MIT |
| 4 | +""" |
| 5 | +Bluesky_RPi_TFT_Scroller code.py |
| 6 | +Infinitely scroll Bluesky posts on a 320x240 pixel TFT |
| 7 | +""" |
| 8 | +import json |
| 9 | +import os |
| 10 | +import sys |
| 11 | + |
| 12 | +import requests |
| 13 | +import webview |
| 14 | + |
| 15 | +FEEDLINK_RETROCOMPUTING = ( |
| 16 | + "https://bsky.app/profile/did:plc:tbo4hkau3p2itkar2vsnb3gp/feed/aaabo5oe7bzok" |
| 17 | +) |
| 18 | + |
| 19 | +# Un-comment a single key inside of FEED_ARGS and set it's value to the feed, list or search |
| 20 | +# that you want to scroll. |
| 21 | +FETCH_ARGS = { |
| 22 | + # "feed_share_link": FEEDLINK_RETROCOMPUTING, |
| 23 | + # "feed_share_link": "https://bsky.app/profile/did:plc:463touruejpokvutnn5ikxb5/lists/3lbfdtahfzt2a", # pylint: disable=line-too-long |
| 24 | + # "search_args": {"q": "Adafruit", "sort": "latest"} |
| 25 | + "search_args": {"q": "#circuitpython", "sort": "latest"} |
| 26 | +} |
| 27 | + |
| 28 | + |
| 29 | +def at_feed_uri_from_share_link(share_link): |
| 30 | + """ |
| 31 | + Converts a share link into an AT URI for that resource. |
| 32 | +
|
| 33 | + :param share_link: The share link to convert. |
| 34 | + :return str: The AT URI pointing at the resource. |
| 35 | + """ |
| 36 | + at_feed_uri = share_link.replace("https://bsky.app/profile/", "at://") |
| 37 | + if "/feed/" in share_link: |
| 38 | + at_feed_uri = at_feed_uri.replace("/feed/", "/app.bsky.feed.generator/") |
| 39 | + if "/lists/" in share_link: |
| 40 | + at_feed_uri = at_feed_uri.replace("/lists/", "/app.bsky.graph.list/") |
| 41 | + return at_feed_uri |
| 42 | + |
| 43 | + |
| 44 | +def fetch_data(feed_share_link=None, search_args=None): |
| 45 | + """ |
| 46 | + Fetch posts from Bluesky API and write them into the local cached |
| 47 | + data files. After posts are written locally iterates over them |
| 48 | + and downloads the relevant photos from them. |
| 49 | +
|
| 50 | + Must pass either feed_share_link or search_args. |
| 51 | +
|
| 52 | + :param feed_share_link: The link copied from Bluesky front end to share the feed or list. |
| 53 | + :param search_args: A dictionary containing at minimum a ``q`` key with string value of |
| 54 | + the hashtag or term to search for. See bsky API docs for other supported keys. |
| 55 | + :return: None |
| 56 | + """ |
| 57 | + # pylint: disable=too-many-statements,too-many-branches |
| 58 | + if feed_share_link is None and search_args is None: |
| 59 | + # If both inputs are None, just use retrocomputing feed. |
| 60 | + feed_share_link = FEEDLINK_RETROCOMPUTING |
| 61 | + |
| 62 | + # if a share link input was provided |
| 63 | + if feed_share_link is not None: |
| 64 | + FEED_AT = at_feed_uri_from_share_link(feed_share_link) |
| 65 | + # print(FEED_AT) |
| 66 | + |
| 67 | + # if it's a feed |
| 68 | + if "/app.bsky.feed.generator/" in FEED_AT: |
| 69 | + URL = (f"https://public.api.bsky.app/xrpc/app.bsky.feed.getFeed?" |
| 70 | + f"feed={FEED_AT}&limit=30") |
| 71 | + headers = {"Accept-Language": "en"} |
| 72 | + resp = requests.get(URL, headers=headers) |
| 73 | + |
| 74 | + # if it's a list |
| 75 | + elif "/app.bsky.graph.list/" in FEED_AT: |
| 76 | + URL = (f"https://public.api.bsky.app/xrpc/app.bsky.feed.getListFeed?" |
| 77 | + f"list={FEED_AT}&limit=30") |
| 78 | + headers = {"Accept-Language": "en"} |
| 79 | + resp = requests.get(URL, headers=headers) |
| 80 | + |
| 81 | + # raise error if it's an unknown type |
| 82 | + else: |
| 83 | + raise ValueError( |
| 84 | + "Only 'app.bsky.feed.generator' and 'app.bsky.graph.list' URIs are supported." |
| 85 | + ) |
| 86 | + |
| 87 | + # if a search input was provided |
| 88 | + if search_args is not None: |
| 89 | + URL = "https://public.api.bsky.app/xrpc/app.bsky.feed.searchPosts" |
| 90 | + headers = {"Accept-Language": "en"} |
| 91 | + resp = requests.get(URL, headers=headers, params=search_args) |
| 92 | + |
| 93 | + with open(".data/raw_data.json", "wb") as f: |
| 94 | + # write raw response to cache |
| 95 | + f.write(resp.content) |
| 96 | + |
| 97 | + # Process the post data into a smaller subset |
| 98 | + # containing just the bits we need for showing |
| 99 | + # on the TFT. |
| 100 | + resp_json = json.loads(resp.text) |
| 101 | + processed_posts = {"posts": []} |
| 102 | + fetched_posts = None |
| 103 | + if "feed" in resp_json.keys(): |
| 104 | + fetched_posts = resp_json["feed"] |
| 105 | + elif "posts" in resp_json.keys(): |
| 106 | + fetched_posts = resp_json["posts"] |
| 107 | + |
| 108 | + for post in fetched_posts: |
| 109 | + cur_post = {} |
| 110 | + if "post" in post.keys(): |
| 111 | + post = post["post"] |
| 112 | + cur_post["author"] = post["author"]["handle"] |
| 113 | + cur_post["text"] = post["record"]["text"] |
| 114 | + |
| 115 | + # image handling |
| 116 | + if "embed" in post.keys(): |
| 117 | + cid = post["cid"] |
| 118 | + if "images" in post["embed"].keys(): |
| 119 | + cur_post["image_url"] = post["embed"]["images"][0]["thumb"] |
| 120 | + elif "thumbnail" in post["embed"].keys(): |
| 121 | + cur_post["image_url"] = post["embed"]["thumbnail"] |
| 122 | + elif ( |
| 123 | + "external" in post["embed"].keys() |
| 124 | + and "thumb" in post["embed"]["external"].keys() |
| 125 | + ): |
| 126 | + cur_post["image_url"] = post["embed"]["external"]["thumb"] |
| 127 | + |
| 128 | + # if we actually have an image to show |
| 129 | + if "image_url" in cur_post.keys(): |
| 130 | + # check if we already downloaded this image |
| 131 | + if f"{cid}.jpg" not in os.listdir("static/imgs/"): |
| 132 | + print(f"downloading: {cur_post['image_url']}") |
| 133 | + |
| 134 | + # download image and write to file |
| 135 | + img_resp = requests.get(cur_post["image_url"]) |
| 136 | + with open(f"static/imgs/{cid}.jpg", "wb") as f: |
| 137 | + f.write(img_resp.content) |
| 138 | + |
| 139 | + cur_post["image_file"] = f"{cid}.jpg" |
| 140 | + processed_posts["posts"].append(cur_post) |
| 141 | + |
| 142 | + # save the processed data to a file |
| 143 | + with open(".data/processed_data.json", "w", encoding="utf-8") as f: |
| 144 | + f.write(json.dumps(processed_posts)) |
| 145 | + |
| 146 | + |
| 147 | +def read_cached_data(): |
| 148 | + """ |
| 149 | + Load the cached processed data file and return |
| 150 | + the data from within it. |
| 151 | +
|
| 152 | + :return: The posts data loaded from JSON |
| 153 | + """ |
| 154 | + with open(".data/processed_data.json", "r") as f: |
| 155 | + return json.load(f) |
| 156 | + |
| 157 | + |
| 158 | +class Api: |
| 159 | + """ |
| 160 | + API object for interaction between python code here |
| 161 | + and JS code running inside the page. |
| 162 | + """ |
| 163 | + |
| 164 | + # pylint: disable=no-self-use |
| 165 | + def get_posts(self): |
| 166 | + """ |
| 167 | + Fetch new posts data from Bluesky API, cache and return it. |
| 168 | + :return: Processed data containing everything necessary to show |
| 169 | + posts on the TFT. |
| 170 | + """ |
| 171 | + fetch_data(**FETCH_ARGS) |
| 172 | + return read_cached_data() |
| 173 | + |
| 174 | + def check_quit(self): |
| 175 | + """ |
| 176 | + Allows the python code to correctly handle KeyboardInterrupt |
| 177 | + more quickly. |
| 178 | +
|
| 179 | + :return: None |
| 180 | + """ |
| 181 | + # pylint: disable=unnecessary-pass |
| 182 | + pass |
| 183 | + |
| 184 | + def quit(self): |
| 185 | + window.destroy() |
| 186 | + sys.exit(0) |
| 187 | + |
| 188 | + |
| 189 | +# create a webview and load the index.html page |
| 190 | +window = webview.create_window( |
| 191 | + "bsky posts", "static/index.html", js_api=Api(), width=320, height=240, |
| 192 | + x=0, y=0, frameless=True, fullscreen=True |
| 193 | + |
| 194 | +) |
| 195 | +webview.start() |
| 196 | +# webview.start(debug=True) # use this one to enable chromium dev tools to see console.log() output from the page. |
0 commit comments