social media crossposting tool. 3rd time's the charm
mastodon misskey crossposting bluesky

initial commit

+4084
+26
.dockerignore
··· 1 + # Python-generated files 2 + __pycache__/ 3 + *.py[oc] 4 + build/ 5 + dist/ 6 + wheels/ 7 + *.egg-info 8 + .pytest_cache/ 9 + .venv/ 10 + .mypy_cache/ 11 + .ruff_cache/ 12 + .coverage 13 + htmlcov/ 14 + 15 + # Random junk 16 + .env 17 + .env.* 18 + .DS_Store 19 + data/ 20 + testdata/ 21 + 22 + # IDE 23 + .idea/ 24 + .vscode/ 25 + *.swp 26 + *.swo
+26
.gitignore
··· 1 + # Python-generated files 2 + __pycache__/ 3 + *.py[oc] 4 + build/ 5 + dist/ 6 + wheels/ 7 + *.egg-info 8 + .pytest_cache/ 9 + .venv/ 10 + .mypy_cache/ 11 + .ruff_cache/ 12 + .coverage 13 + htmlcov/ 14 + 15 + # Random junk 16 + .env 17 + .env.* 18 + .DS_Store 19 + data/ 20 + testdata/ 21 + 22 + # IDE 23 + .idea/ 24 + .vscode/ 25 + *.swp 26 + *.swo
+1
.python-version
··· 1 + 3.12
+50
.tangled/workflows/build-images.yml
··· 1 + when: 2 + - event: ["push", "manual"] 3 + branch: master 4 + 5 + engine: nixery 6 + 7 + dependencies: 8 + nixpkgs: 9 + - kaniko 10 + - regctl 11 + 12 + environment: 13 + GHCR_USER: "zenfyrdev" 14 + 15 + steps: 16 + - name: create auth configs 17 + command: | 18 + mkdir -p $HOME/.docker $HOME/.regctl 19 + 20 + cat > $HOME/.docker/config.json <<EOF 21 + {"auths": {"ghcr.io": {"auth": "$(echo -n "$GHCR_USER:$GHCR_PAT" | base64 -w0)"}}} 22 + EOF 23 + 24 + cat > $HOME/.regctl/config.json <<EOF 25 + {"hosts": {"ghcr.io": {"user": "$GHCR_USER","pass": "$GHCR_PAT"}}} 26 + EOF 27 + 28 + - name: build amd64 29 + command: | 30 + executor \ 31 + --context=dir://. \ 32 + --dockerfile=Containerfile \ 33 + --verbosity=info \ 34 + --destination=ghcr.io/$GHCR_USER/xpost:amd64-latest \ 35 + --custom-platform=linux/amd64 36 + 37 + - name: build arm64 38 + command: | 39 + executor \ 40 + --context=dir://. \ 41 + --dockerfile=Containerfile \ 42 + --verbosity=info \ 43 + --destination=ghcr.io/$GHCR_USER/xpost:arm64-latest \ 44 + --custom-platform=linux/arm64 45 + 46 + - name: tag latest artifact 47 + command: | 48 + regctl index create ghcr.io/$GHCR_USER/xpost:latest \ 49 + --ref ghcr.io/$GHCR_USER/xpost:amd64-latest --platform linux/amd64 \ 50 + --ref ghcr.io/$GHCR_USER/xpost:arm64-latest --platform linux/arm64
+41
Containerfile
··· 1 + FROM python:3.12-alpine 2 + COPY --from=ghcr.io/astral-sh/uv:0.7.12 /uv /uvx /bin/ 3 + 4 + # Install build tools & runtime dependencies 5 + RUN apk add --no-cache \ 6 + ffmpeg \ 7 + file \ 8 + libmagic 9 + 10 + RUN mkdir -p /app/data 11 + WORKDIR /app 12 + 13 + # switch to a non-root user 14 + RUN adduser -D -u 1000 app && \ 15 + chown -R app:app /app 16 + USER app 17 + 18 + # Enable bytecode compilation 19 + ENV UV_COMPILE_BYTECODE=1 20 + 21 + # Copy from the cache instead of linking since it's a mounted volume 22 + ENV UV_LINK_MODE=copy 23 + 24 + # Install the project's dependencies using the lockfile and settings 25 + COPY ./uv.lock ./pyproject.toml /app/ 26 + RUN --mount=type=cache,target=/root/.cache/uv \ 27 + uv sync --locked --no-install-project --no-dev 28 + 29 + # Define app data volume 30 + VOLUME /app/data 31 + 32 + # Then, add the rest of the project source code and install it 33 + COPY . /app 34 + RUN --mount=type=cache,target=/root/.cache/uv \ 35 + uv sync --locked --no-dev 36 + 37 + # Place executables in the environment at the front of the path 38 + ENV PATH="/app/.venv/bin:$PATH" 39 + 40 + # Set entrypoint to run the app using uv 41 + ENTRYPOINT ["uv", "run", "main.py"]
+21
LICENSE
··· 1 + MIT License 2 + 3 + Copyright (c) 2025 4 + 5 + Permission is hereby granted, free of charge, to any person obtaining a copy 6 + of this software and associated documentation files (the "Software"), to deal 7 + in the Software without restriction, including without limitation the rights 8 + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 + copies of the Software, and to permit persons to whom the Software is 10 + furnished to do so, subject to the following conditions: 11 + 12 + The above copyright notice and this permission notice shall be included in all 13 + copies or substantial portions of the Software. 14 + 15 + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 + SOFTWARE.
+174
README.md
··· 1 + # XPost 2 + 3 + XPost is a social media cross-posting tool that differs from others by using streaming APIs to allow instant, zero-input cross-posting. this means you can continue posting on your preferred platform without using special apps. 4 + 5 + XPost tries to support as many features as possible. for example, when cross-posting from mastodon to bluesky, unsupported file types will be attached as links. posts with mixed media or too many files will be split and spread across text. 6 + 7 + the tool may undergo breaking changes as new features are added, so proceed with caution when deploying. 8 + 9 + # Installation 10 + 11 + ## Native 12 + 13 + first install `ffmpeg`, `ffprobe` and `libmagic`, make sure that `ffmpeg` is available on PATH! `ffmpeg` and `libmagic` are required to crosspost media. 14 + 15 + then get [uv](https://github.com/astral-sh/uv) and sync the project 16 + 17 + ``` 18 + uv sync 19 + ``` 20 + 21 + generate settings.json on first launch 22 + 23 + ``` 24 + uv run main.py 25 + ``` 26 + 27 + ## Docker Compose 28 + 29 + the official immage is available on [docker hub](https://hub.docker.com/r/melontini/xpost). example `compose.yaml`. this assumes that data dir is `./data`, and env file is `./.config/docker.env`. add `:Z` to volume mounts for podman. 30 + 31 + ```yaml 32 + services: 33 + xpost: 34 + image: melontini/xpost:latest 35 + restart: unless-stopped 36 + env_file: ./.config/docker.env 37 + volumes: 38 + - ./data:/app/data 39 + ``` 40 + 41 + # Settings 42 + 43 + the tool allows you to specify an input and multiple outputs to post to. 44 + 45 + some options accept a envvar syntax: 46 + 47 + ```json 48 + { 49 + "token": "env:TOKEN" 50 + } 51 + ``` 52 + 53 + ## Inputs 54 + 55 + all inputs have common options. 56 + 57 + ```json5 58 + { 59 + "options": { 60 + "regex_filters": [ //posts matching any of the following regexes will be skipped 61 + "(?i)\\b(?:test|hello|hi)\\b" 62 + ] 63 + } 64 + } 65 + ``` 66 + 67 + ### Bluesky Jetstream 68 + 69 + listens to repo operation events emmited by Jetstream. handle becomes optional if you specify a DID. 70 + 71 + ```json5 72 + { 73 + "type": "bluesky-jetstream-wss", 74 + "handle": "env:BLUESKY_HANDLE", // handle (e.g. melontini.me) 75 + "did": "env:BLUESKY_DID", // use a DID instead of handle (avoids handle resolution) 76 + "jetstream": "wss://jetstream2.us-east.bsky.network/subscribe" //optional, change jetstream endpoint 77 + } 78 + ``` 79 + 80 + ### Mastodon WebSocket `mastodon-wss` 81 + 82 + listens to the user's home timeline for new posts, crossposts only the public/unlisted ones by the user. 83 + 84 + ```json5 85 + { 86 + "type": "mastodon-wss", // type 87 + "instance": "env:MASTODON_INSTANCE", // mastodon api compatible instance 88 + "token": "env:MASTODON_TOKEN", // Must be a mastodon token. get from something like phanpy + webtools. or https://getauth.thms.uk/?client_name=xpost&scopes=read:statuses%20write:statuses%20profile but doesn't work with all software 89 + "options": { 90 + "allowed_visibility": [ 91 + "public", 92 + "unlisted" 93 + ] 94 + } 95 + } 96 + ``` 97 + 98 + any instance implementing `/api/v1/instance`, `/api/v1/accounts/verify_credentials` and `/api/v1/streaming?stream` will work fine. 99 + 100 + confirmed supported: 101 + - Mastodon 102 + - Iceshrimp.NET 103 + - Akkoma 104 + 105 + confirmed unsupported: 106 + - Mitra 107 + - Sharkey 108 + 109 + ### Misskey WebSocket 110 + 111 + listens to the homeTimeline channel for new posts, crossposts only the public/home ones by the user. 112 + 113 + **IMPORTANT**: Misskey WSS does Not support deletes, you must delete posts manually. if you know how i can listen to all note events, i would appreciate your help. 114 + 115 + ```json5 116 + { 117 + "type": "misskey-wss", // type 118 + "instance": "env:MISSKEY_INSTANCE", // misskey instance 119 + "token": "env:MISSKEY_TOKEN", // access token with the `View your account information` scope 120 + "options": { 121 + "allowed_visibility": [ 122 + "public", 123 + "home" 124 + ] 125 + } 126 + } 127 + ``` 128 + 129 + Misskey API is not very good, this also wasn't tested on vanilla misskey. 130 + 131 + confirmed supported: 132 + - Sharkey 133 + 134 + ## Outputs 135 + 136 + ### Mastodon API 137 + 138 + no remarks. 139 + 140 + ```json5 141 + { 142 + "type": "mastodon", 143 + "token": "env:MASTODON_TOKEN", // Must be a mastodon token. get from something like phanpy + webtools. or https://getauth.thms.uk/?client_name=xpost&scopes=read%20write%20profile but doesn't work with all software 144 + "instance": "env:MASTODON_INSTNACE", // mastodon api compatible instance 145 + "options": { 146 + "visibility": "public" 147 + } 148 + } 149 + ``` 150 + 151 + ### Bluesky 152 + 153 + in the bluesky block, you can configure who is allowed to reply to and quote the new posts. handle becomes optional if you specify a DID. 154 + 155 + ```json5 156 + { 157 + "type": "bluesky", // type 158 + "handle": "env:BLUESKY_HANDLE", // handle (e.g. melontini.me) 159 + "app_password": "env:BLUESKY_APP_PASSWORD", // https://bsky.app/settings/app-passwords 160 + "did": "env:BLUESKY_DID", // use a DID instead of handle (avoids handle resolution) 161 + "pds": "env:BLUESKY_PDS", // specify Your PDS directly (avoids DID doc lookup) 162 + "bsky_appview": "env:BLUESKY_APPVIEW", // bypass suspensions by specifying a different appview (e.g. did:web:bsky.zeppelin.social) 163 + "options": { 164 + "encode_videos": true, // bluesky only accepts mp4 videos, try to convert if the video is not mp4 165 + "quote_gate": false, // block users from quoting the post 166 + "thread_gate": [ // block replies. leave empty to disable replies 167 + "mentioned", 168 + "following", 169 + "followers", 170 + "everybody" // allow everybody to reply (ignores other options) 171 + ] 172 + } 173 + } 174 + ```
+196
bluesky/atproto2.py
··· 1 + from typing import Any 2 + 3 + from atproto import AtUri, Client, IdResolver, client_utils 4 + from atproto_client import models 5 + 6 + from util.util import LOGGER 7 + 8 + 9 + def resolve_identity( 10 + handle: str | None = None, did: str | None = None, pds: str | None = None 11 + ): 12 + """helper to try and resolve identity from provided parameters, a valid handle is enough""" 13 + 14 + if did and pds: 15 + return did, pds[:-1] if pds.endswith("/") else pds 16 + 17 + resolver = IdResolver() 18 + if not did: 19 + if not handle: 20 + raise Exception("ATP handle not specified!") 21 + LOGGER.info("Resolving ATP identity for %s...", handle) 22 + did = resolver.handle.resolve(handle) 23 + if not did: 24 + raise Exception("Failed to resolve DID!") 25 + 26 + if not pds: 27 + LOGGER.info("Resolving PDS from DID document...") 28 + did_doc = resolver.did.resolve(did) 29 + if not did_doc: 30 + raise Exception("Failed to resolve DID doc for '%s'", did) 31 + pds = did_doc.get_pds_endpoint() 32 + if not pds: 33 + raise Exception("Failed to resolve PDS!") 34 + 35 + return did, pds[:-1] if pds.endswith("/") else pds 36 + 37 + 38 + class Client2(Client): 39 + def __init__(self, base_url: str | None = None, *args: Any, **kwargs: Any) -> None: 40 + super().__init__(base_url, *args, **kwargs) 41 + 42 + def send_video( 43 + self, 44 + text: str | client_utils.TextBuilder, 45 + video: bytes, 46 + video_alt: str | None = None, 47 + video_aspect_ratio: models.AppBskyEmbedDefs.AspectRatio | None = None, 48 + reply_to: models.AppBskyFeedPost.ReplyRef | None = None, 49 + langs: list[str] | None = None, 50 + facets: list[models.AppBskyRichtextFacet.Main] | None = None, 51 + labels: models.ComAtprotoLabelDefs.SelfLabels | None = None, 52 + time_iso: str | None = None, 53 + ) -> models.AppBskyFeedPost.CreateRecordResponse: 54 + """same as send_video, but with labels""" 55 + 56 + if video_alt is None: 57 + video_alt = "" 58 + 59 + upload = self.upload_blob(video) 60 + 61 + return self.send_post( 62 + text, 63 + reply_to=reply_to, 64 + embed=models.AppBskyEmbedVideo.Main( 65 + video=upload.blob, alt=video_alt, aspect_ratio=video_aspect_ratio 66 + ), 67 + langs=langs, 68 + facets=facets, 69 + labels=labels, 70 + time_iso=time_iso, 71 + ) 72 + 73 + def send_images( 74 + self, 75 + text: str | client_utils.TextBuilder, 76 + images: list[bytes], 77 + image_alts: list[str] | None = None, 78 + image_aspect_ratios: list[models.AppBskyEmbedDefs.AspectRatio] | None = None, 79 + reply_to: models.AppBskyFeedPost.ReplyRef | None = None, 80 + langs: list[str] | None = None, 81 + facets: list[models.AppBskyRichtextFacet.Main] | None = None, 82 + labels: models.ComAtprotoLabelDefs.SelfLabels | None = None, 83 + time_iso: str | None = None, 84 + ) -> models.AppBskyFeedPost.CreateRecordResponse: 85 + """same as send_images, but with labels""" 86 + 87 + if image_alts is None: 88 + image_alts = [""] * len(images) 89 + else: 90 + diff = len(images) - len(image_alts) 91 + image_alts = image_alts + [""] * diff 92 + 93 + if image_aspect_ratios is None: 94 + aligned_image_aspect_ratios = [None] * len(images) 95 + else: 96 + diff = len(images) - len(image_aspect_ratios) 97 + aligned_image_aspect_ratios = image_aspect_ratios + [None] * diff 98 + 99 + uploads = [self.upload_blob(image) for image in images] 100 + 101 + embed_images = [ 102 + models.AppBskyEmbedImages.Image( 103 + alt=alt, image=upload.blob, aspect_ratio=aspect_ratio 104 + ) 105 + for alt, upload, aspect_ratio in zip( 106 + image_alts, uploads, aligned_image_aspect_ratios 107 + ) 108 + ] 109 + 110 + return self.send_post( 111 + text, 112 + reply_to=reply_to, 113 + embed=models.AppBskyEmbedImages.Main(images=embed_images), 114 + langs=langs, 115 + facets=facets, 116 + labels=labels, 117 + time_iso=time_iso, 118 + ) 119 + 120 + def send_post( 121 + self, 122 + text: str | client_utils.TextBuilder, 123 + reply_to: models.AppBskyFeedPost.ReplyRef | None = None, 124 + embed: None 125 + | models.AppBskyEmbedImages.Main 126 + | models.AppBskyEmbedExternal.Main 127 + | models.AppBskyEmbedRecord.Main 128 + | models.AppBskyEmbedRecordWithMedia.Main 129 + | models.AppBskyEmbedVideo.Main = None, 130 + langs: list[str] | None = None, 131 + facets: list[models.AppBskyRichtextFacet.Main] | None = None, 132 + labels: models.ComAtprotoLabelDefs.SelfLabels | None = None, 133 + time_iso: str | None = None, 134 + ) -> models.AppBskyFeedPost.CreateRecordResponse: 135 + """same as send_post, but with labels""" 136 + 137 + if isinstance(text, client_utils.TextBuilder): 138 + facets = text.build_facets() 139 + text = text.build_text() 140 + 141 + repo = self.me and self.me.did 142 + if not repo: 143 + raise Exception("Client not logged in!") 144 + 145 + if not langs: 146 + langs = ["en"] 147 + 148 + record = models.AppBskyFeedPost.Record( 149 + created_at=time_iso or self.get_current_time_iso(), 150 + text=text, 151 + reply=reply_to or None, 152 + embed=embed or None, 153 + langs=langs, 154 + facets=facets or None, 155 + labels=labels or None, 156 + ) 157 + return self.app.bsky.feed.post.create(repo, record) 158 + 159 + def create_gates( 160 + self, 161 + thread_gate_opts: list[str], 162 + quote_gate: bool, 163 + post_uri: str, 164 + time_iso: str | None = None, 165 + ): 166 + account = self.me 167 + if not account: 168 + raise Exception("Client not logged in!") 169 + 170 + rkey = AtUri.from_str(post_uri).rkey 171 + time_iso = time_iso or self.get_current_time_iso() 172 + 173 + if "everybody" not in thread_gate_opts: 174 + allow = [] 175 + if thread_gate_opts: 176 + if "following" in thread_gate_opts: 177 + allow.append(models.AppBskyFeedThreadgate.FollowingRule()) 178 + if "followers" in thread_gate_opts: 179 + allow.append(models.AppBskyFeedThreadgate.FollowerRule()) 180 + if "mentioned" in thread_gate_opts: 181 + allow.append(models.AppBskyFeedThreadgate.MentionRule()) 182 + 183 + thread_gate = models.AppBskyFeedThreadgate.Record( 184 + post=post_uri, created_at=time_iso, allow=allow 185 + ) 186 + 187 + self.app.bsky.feed.threadgate.create(account.did, thread_gate, rkey) 188 + 189 + if quote_gate: 190 + post_gate = models.AppBskyFeedPostgate.Record( 191 + post=post_uri, 192 + created_at=time_iso, 193 + embedding_rules=[models.AppBskyFeedPostgate.DisableRule()], 194 + ) 195 + 196 + self.app.bsky.feed.postgate.create(account.did, post_gate, rkey)
+199
bluesky/common.py
··· 1 + import re 2 + 3 + from atproto import client_utils 4 + 5 + import cross 6 + from util.media import MediaInfo 7 + from util.util import canonical_label 8 + 9 + # only for lexicon reference 10 + SERVICE = "https://bsky.app" 11 + 12 + # TODO this is terrible and stupid 13 + ADULT_PATTERN = re.compile( 14 + r"\b(sexual content|nsfw|erotic|adult only|18\+)\b", re.IGNORECASE 15 + ) 16 + PORN_PATTERN = re.compile(r"\b(porn|yiff|hentai|pornographic|fetish)\b", re.IGNORECASE) 17 + 18 + 19 + class BlueskyPost(cross.Post): 20 + def __init__( 21 + self, record: dict, tokens: list[cross.Token], attachments: list[MediaInfo] 22 + ) -> None: 23 + super().__init__() 24 + self.uri = record["$xpost.strongRef"]["uri"] 25 + self.parent_uri = None 26 + if record.get("reply"): 27 + self.parent_uri = record["reply"]["parent"]["uri"] 28 + 29 + self.tokens = tokens 30 + self.timestamp = record["createdAt"] 31 + labels = record.get("labels", {}).get("values") 32 + self.spoiler = None 33 + if labels: 34 + self.spoiler = ", ".join( 35 + [str(label["val"]).replace("-", " ") for label in labels] 36 + ) 37 + 38 + self.attachments = attachments 39 + self.languages = record.get("langs", []) 40 + 41 + # at:// of the post record 42 + def get_id(self) -> str: 43 + return self.uri 44 + 45 + def get_parent_id(self) -> str | None: 46 + return self.parent_uri 47 + 48 + def get_tokens(self) -> list[cross.Token]: 49 + return self.tokens 50 + 51 + def get_text_type(self) -> str: 52 + return "text/plain" 53 + 54 + def get_timestamp(self) -> str: 55 + return self.timestamp 56 + 57 + def get_attachments(self) -> list[MediaInfo]: 58 + return self.attachments 59 + 60 + def get_spoiler(self) -> str | None: 61 + return self.spoiler 62 + 63 + def get_languages(self) -> list[str]: 64 + return self.languages 65 + 66 + def is_sensitive(self) -> bool: 67 + return self.spoiler is not None 68 + 69 + def get_post_url(self) -> str | None: 70 + did, _, post_id = str(self.uri[len("at://") :]).split("/") 71 + 72 + return f"https://bsky.app/profile/{did}/post/{post_id}" 73 + 74 + 75 + def tokenize_post(post: dict) -> list[cross.Token]: 76 + text: str = post.get("text", "") 77 + if not text: 78 + return [] 79 + ut8_text = text.encode(encoding="utf-8") 80 + 81 + def decode(ut8: bytes) -> str: 82 + return ut8.decode(encoding="utf-8") 83 + 84 + facets: list[dict] = post.get("facets", []) 85 + if not facets: 86 + return [cross.TextToken(decode(ut8_text))] 87 + 88 + slices: list[tuple[int, int, str, str]] = [] 89 + 90 + for facet in facets: 91 + features: list[dict] = facet.get("features", []) 92 + if not features: 93 + continue 94 + 95 + # we don't support overlapping facets/features 96 + feature = features[0] 97 + feature_type = feature["$type"] 98 + index = facet["index"] 99 + match feature_type: 100 + case "app.bsky.richtext.facet#tag": 101 + slices.append( 102 + (index["byteStart"], index["byteEnd"], "tag", feature["tag"]) 103 + ) 104 + case "app.bsky.richtext.facet#link": 105 + slices.append( 106 + (index["byteStart"], index["byteEnd"], "link", feature["uri"]) 107 + ) 108 + case "app.bsky.richtext.facet#mention": 109 + slices.append( 110 + (index["byteStart"], index["byteEnd"], "mention", feature["did"]) 111 + ) 112 + 113 + if not slices: 114 + return [cross.TextToken(decode(ut8_text))] 115 + 116 + slices.sort(key=lambda s: s[0]) 117 + unique: list[tuple[int, int, str, str]] = [] 118 + current_end = 0 119 + for start, end, ttype, val in slices: 120 + if start >= current_end: 121 + unique.append((start, end, ttype, val)) 122 + current_end = end 123 + 124 + if not unique: 125 + return [cross.TextToken(decode(ut8_text))] 126 + 127 + tokens: list[cross.Token] = [] 128 + prev = 0 129 + 130 + for start, end, ttype, val in unique: 131 + if start > prev: 132 + # text between facets 133 + tokens.append(cross.TextToken(decode(ut8_text[prev:start]))) 134 + # facet token 135 + match ttype: 136 + case "link": 137 + label = decode(ut8_text[start:end]) 138 + 139 + # try to unflatten links 140 + split = val.split("://", 1) 141 + if len(split) > 1: 142 + if split[1].startswith(label): 143 + tokens.append(cross.LinkToken(val, "")) 144 + prev = end 145 + continue 146 + 147 + if label.endswith("...") and split[1].startswith(label[:-3]): 148 + tokens.append(cross.LinkToken(val, "")) 149 + prev = end 150 + continue 151 + 152 + tokens.append(cross.LinkToken(val, label)) 153 + case "tag": 154 + tag = decode(ut8_text[start:end]) 155 + tokens.append(cross.TagToken(tag[1:] if tag.startswith("#") else tag)) 156 + case "mention": 157 + mention = decode(ut8_text[start:end]) 158 + tokens.append( 159 + cross.MentionToken( 160 + mention[1:] if mention.startswith("@") else mention, val 161 + ) 162 + ) 163 + prev = end 164 + 165 + if prev < len(ut8_text): 166 + tokens.append(cross.TextToken(decode(ut8_text[prev:]))) 167 + 168 + return tokens 169 + 170 + 171 + def tokens_to_richtext(tokens: list[cross.Token]) -> client_utils.TextBuilder | None: 172 + builder = client_utils.TextBuilder() 173 + 174 + def flatten_link(href: str): 175 + split = href.split("://", 1) 176 + if len(split) > 1: 177 + href = split[1] 178 + 179 + if len(href) > 32: 180 + href = href[:32] + "..." 181 + 182 + return href 183 + 184 + for token in tokens: 185 + if isinstance(token, cross.TextToken): 186 + builder.text(token.text) 187 + elif isinstance(token, cross.LinkToken): 188 + if canonical_label(token.label, token.href): 189 + builder.link(flatten_link(token.href), token.href) 190 + continue 191 + 192 + builder.link(token.label, token.href) 193 + elif isinstance(token, cross.TagToken): 194 + builder.tag("#" + token.tag, token.tag.lower()) 195 + else: 196 + # fail on unsupported tokens 197 + return None 198 + 199 + return builder
+203
bluesky/input.py
··· 1 + import asyncio 2 + import json 3 + import re 4 + from typing import Any, Callable 5 + 6 + import websockets 7 + from atproto_client import models 8 + from atproto_client.models.utils import get_or_create as get_model_or_create 9 + 10 + import cross 11 + import util.database as database 12 + from bluesky.atproto2 import resolve_identity 13 + from bluesky.common import SERVICE, BlueskyPost, tokenize_post 14 + from util.database import DataBaseWorker 15 + from util.media import MediaInfo, download_media 16 + from util.util import LOGGER, as_envvar 17 + 18 + 19 + class BlueskyInputOptions: 20 + def __init__(self, o: dict) -> None: 21 + self.filters = [re.compile(f) for f in o.get("regex_filters", [])] 22 + 23 + 24 + class BlueskyInput(cross.Input): 25 + def __init__(self, settings: dict, db: DataBaseWorker) -> None: 26 + self.options = BlueskyInputOptions(settings.get("options", {})) 27 + did, pds = resolve_identity( 28 + handle=as_envvar(settings.get("handle")), 29 + did=as_envvar(settings.get("did")), 30 + pds=as_envvar(settings.get("pds")), 31 + ) 32 + self.pds = pds 33 + 34 + # PDS is Not a service, the lexicon and rids are the same across pds 35 + super().__init__(SERVICE, did, settings, db) 36 + 37 + def _on_post(self, outputs: list[cross.Output], post: dict[str, Any]): 38 + post_uri = post["$xpost.strongRef"]["uri"] 39 + post_cid = post["$xpost.strongRef"]["cid"] 40 + 41 + parent_uri = None 42 + if post.get("reply"): 43 + parent_uri = post["reply"]["parent"]["uri"] 44 + 45 + embed = post.get("embed", {}) 46 + if embed.get("$type") in ( 47 + "app.bsky.embed.record", 48 + "app.bsky.embed.recordWithMedia", 49 + ): 50 + did, collection, rid = str(embed["record"]["uri"][len("at://") :]).split( 51 + "/" 52 + ) 53 + if collection == "app.bsky.feed.post": 54 + LOGGER.info("Skipping '%s'! Quote..", post_uri) 55 + return 56 + 57 + success = database.try_insert_post( 58 + self.db, post_uri, parent_uri, self.user_id, self.service 59 + ) 60 + if not success: 61 + LOGGER.info("Skipping '%s' as parent post was not found in db!", post_uri) 62 + return 63 + database.store_data( 64 + self.db, post_uri, self.user_id, self.service, {"cid": post_cid} 65 + ) 66 + 67 + tokens = tokenize_post(post) 68 + if not cross.test_filters(tokens, self.options.filters): 69 + LOGGER.info("Skipping '%s'. Matched a filter!", post_uri) 70 + return 71 + 72 + LOGGER.info("Crossposting '%s'...", post_uri) 73 + 74 + def get_blob_url(blob: str): 75 + return f"{self.pds}/xrpc/com.atproto.sync.getBlob?did={self.user_id}&cid={blob}" 76 + 77 + attachments: list[MediaInfo] = [] 78 + if embed.get("$type") == "app.bsky.embed.images": 79 + model = get_model_or_create(embed, model=models.AppBskyEmbedImages.Main) 80 + assert isinstance(model, models.AppBskyEmbedImages.Main) 81 + 82 + for image in model.images: 83 + url = get_blob_url(image.image.cid.encode()) 84 + LOGGER.info("Downloading %s...", url) 85 + io = download_media(url, image.alt) 86 + if not io: 87 + LOGGER.error("Skipping '%s'. Failed to download media!", post_uri) 88 + return 89 + attachments.append(io) 90 + elif embed.get("$type") == "app.bsky.embed.video": 91 + model = get_model_or_create(embed, model=models.AppBskyEmbedVideo.Main) 92 + assert isinstance(model, models.AppBskyEmbedVideo.Main) 93 + url = get_blob_url(model.video.cid.encode()) 94 + LOGGER.info("Downloading %s...", url) 95 + io = download_media(url, model.alt if model.alt else "") 96 + if not io: 97 + LOGGER.error("Skipping '%s'. Failed to download media!", post_uri) 98 + return 99 + attachments.append(io) 100 + 101 + cross_post = BlueskyPost(post, tokens, attachments) 102 + for output in outputs: 103 + output.accept_post(cross_post) 104 + 105 + def _on_delete_post(self, outputs: list[cross.Output], post_id: str, repost: bool): 106 + post = database.find_post(self.db, post_id, self.user_id, self.service) 107 + if not post: 108 + return 109 + 110 + LOGGER.info("Deleting '%s'...", post_id) 111 + if repost: 112 + for output in outputs: 113 + output.delete_repost(post_id) 114 + else: 115 + for output in outputs: 116 + output.delete_post(post_id) 117 + database.delete_post(self.db, post_id, self.user_id, self.service) 118 + 119 + def _on_repost(self, outputs: list[cross.Output], post: dict[str, Any]): 120 + post_uri = post["$xpost.strongRef"]["uri"] 121 + post_cid = post["$xpost.strongRef"]["cid"] 122 + 123 + reposted_uri = post["subject"]["uri"] 124 + 125 + success = database.try_insert_repost( 126 + self.db, post_uri, reposted_uri, self.user_id, self.service 127 + ) 128 + if not success: 129 + LOGGER.info("Skipping '%s' as reposted post was not found in db!", post_uri) 130 + return 131 + database.store_data( 132 + self.db, post_uri, self.user_id, self.service, {"cid": post_cid} 133 + ) 134 + 135 + LOGGER.info("Crossposting '%s'...", post_uri) 136 + for output in outputs: 137 + output.accept_repost(post_uri, reposted_uri) 138 + 139 + 140 + class BlueskyJetstreamInput(BlueskyInput): 141 + def __init__(self, settings: dict, db: DataBaseWorker) -> None: 142 + super().__init__(settings, db) 143 + self.jetstream = settings.get( 144 + "jetstream", "wss://jetstream2.us-east.bsky.network/subscribe" 145 + ) 146 + 147 + def __on_commit(self, outputs: list[cross.Output], msg: dict): 148 + if msg.get("did") != self.user_id: 149 + return 150 + 151 + commit: dict = msg.get("commit", {}) 152 + if not commit: 153 + return 154 + 155 + commit_type = commit["operation"] 156 + match commit_type: 157 + case "create": 158 + record = dict(commit.get("record", {})) 159 + record["$xpost.strongRef"] = { 160 + "cid": commit["cid"], 161 + "uri": f"at://{self.user_id}/{commit['collection']}/{commit['rkey']}", 162 + } 163 + 164 + match commit["collection"]: 165 + case "app.bsky.feed.post": 166 + self._on_post(outputs, record) 167 + case "app.bsky.feed.repost": 168 + self._on_repost(outputs, record) 169 + case "delete": 170 + post_id: str = ( 171 + f"at://{self.user_id}/{commit['collection']}/{commit['rkey']}" 172 + ) 173 + match commit["collection"]: 174 + case "app.bsky.feed.post": 175 + self._on_delete_post(outputs, post_id, False) 176 + case "app.bsky.feed.repost": 177 + self._on_delete_post(outputs, post_id, True) 178 + 179 + async def listen( 180 + self, outputs: list[cross.Output], submit: Callable[[Callable[[], Any]], Any] 181 + ): 182 + uri = self.jetstream + "?" 183 + uri += "wantedCollections=app.bsky.feed.post" 184 + uri += "&wantedCollections=app.bsky.feed.repost" 185 + uri += f"&wantedDids={self.user_id}" 186 + 187 + async for ws in websockets.connect( 188 + uri, extra_headers={"User-Agent": "XPost/0.0.3"} 189 + ): 190 + try: 191 + LOGGER.info("Listening to %s...", self.jetstream) 192 + 193 + async def listen_for_messages(): 194 + async for msg in ws: 195 + submit(lambda: self.__on_commit(outputs, json.loads(msg))) 196 + 197 + listen = asyncio.create_task(listen_for_messages()) 198 + 199 + await asyncio.gather(listen) 200 + except websockets.ConnectionClosedError as e: 201 + LOGGER.error(e, stack_info=True, exc_info=True) 202 + LOGGER.info("Reconnecting to %s...", self.jetstream) 203 + continue
+481
bluesky/output.py
··· 1 + from atproto import Request, client_utils 2 + from atproto_client import models 3 + from httpx import Timeout 4 + 5 + import cross 6 + import misskey.mfm_util as mfm_util 7 + import util.database as database 8 + from bluesky.atproto2 import Client2, resolve_identity 9 + from bluesky.common import ADULT_PATTERN, PORN_PATTERN, SERVICE, tokens_to_richtext 10 + from util.database import DataBaseWorker 11 + from util.media import ( 12 + MediaInfo, 13 + compress_image, 14 + convert_to_mp4, 15 + get_filename_from_url, 16 + get_media_meta, 17 + ) 18 + from util.util import LOGGER, as_envvar 19 + 20 + ALLOWED_GATES = ["mentioned", "following", "followers", "everybody"] 21 + 22 + 23 + class BlueskyOutputOptions: 24 + def __init__(self, o: dict) -> None: 25 + self.quote_gate: bool = False 26 + self.thread_gate: list[str] = ["everybody"] 27 + self.encode_videos: bool = True 28 + 29 + quote_gate = o.get("quote_gate") 30 + if quote_gate is not None: 31 + self.quote_gate = bool(quote_gate) 32 + 33 + thread_gate = o.get("thread_gate") 34 + if thread_gate is not None: 35 + if any([v not in ALLOWED_GATES for v in thread_gate]): 36 + raise ValueError( 37 + f"'thread_gate' only accepts {', '.join(ALLOWED_GATES)} or [], got: {thread_gate}" 38 + ) 39 + self.thread_gate = thread_gate 40 + 41 + encode_videos = o.get("encode_videos") 42 + if encode_videos is not None: 43 + self.encode_videos = bool(encode_videos) 44 + 45 + 46 + class BlueskyOutput(cross.Output): 47 + def __init__(self, input: cross.Input, settings: dict, db: DataBaseWorker) -> None: 48 + super().__init__(input, settings, db) 49 + self.options = BlueskyOutputOptions(settings.get("options") or {}) 50 + 51 + if not as_envvar(settings.get("app-password")): 52 + raise Exception("Account app password not provided!") 53 + 54 + did, pds = resolve_identity( 55 + handle=as_envvar(settings.get("handle")), 56 + did=as_envvar(settings.get("did")), 57 + pds=as_envvar(settings.get("pds")), 58 + ) 59 + 60 + reqs = Request(timeout=Timeout(None, connect=30.0)) 61 + 62 + self.bsky = Client2(pds, request=reqs) 63 + self.bsky.configure_proxy_header( 64 + service_type="bsky_appview", 65 + did=as_envvar(settings.get("bsky_appview")) or "did:web:api.bsky.app", 66 + ) 67 + self.bsky.login(did, as_envvar(settings.get("app-password"))) 68 + 69 + def __check_login(self): 70 + login = self.bsky.me 71 + if not login: 72 + raise Exception("Client not logged in!") 73 + return login 74 + 75 + def _find_parent(self, parent_id: str): 76 + login = self.__check_login() 77 + 78 + thread_tuple = database.find_mapped_thread( 79 + self.db, 80 + parent_id, 81 + self.input.user_id, 82 + self.input.service, 83 + login.did, 84 + SERVICE, 85 + ) 86 + 87 + if not thread_tuple: 88 + LOGGER.error("Failed to find thread tuple in the database!") 89 + return None 90 + 91 + root_uri: str = thread_tuple[0] 92 + reply_uri: str = thread_tuple[1] 93 + 94 + root_cid = database.fetch_data(self.db, root_uri, login.did, SERVICE)["cid"] 95 + reply_cid = database.fetch_data(self.db, root_uri, login.did, SERVICE)["cid"] 96 + 97 + root_record = models.AppBskyFeedPost.CreateRecordResponse( 98 + uri=root_uri, cid=root_cid 99 + ) 100 + reply_record = models.AppBskyFeedPost.CreateRecordResponse( 101 + uri=reply_uri, cid=reply_cid 102 + ) 103 + 104 + return ( 105 + models.create_strong_ref(root_record), 106 + models.create_strong_ref(reply_record), 107 + thread_tuple[2], 108 + thread_tuple[3], 109 + ) 110 + 111 + def _split_attachments(self, attachments: list[MediaInfo]): 112 + sup_media: list[MediaInfo] = [] 113 + unsup_media: list[MediaInfo] = [] 114 + 115 + for a in attachments: 116 + if a.mime.startswith("image/") or a.mime.startswith( 117 + "video/" 118 + ): # TODO convert gifs to videos 119 + sup_media.append(a) 120 + else: 121 + unsup_media.append(a) 122 + 123 + return (sup_media, unsup_media) 124 + 125 + def _split_media_per_post( 126 + self, tokens: list[client_utils.TextBuilder], media: list[MediaInfo] 127 + ): 128 + posts: list[dict] = [{"tokens": tokens, "attachments": []} for tokens in tokens] 129 + available_indices: list[int] = list(range(len(posts))) 130 + 131 + current_image_post_idx: int | None = None 132 + 133 + def make_blank_post() -> dict: 134 + return {"tokens": [client_utils.TextBuilder().text("")], "attachments": []} 135 + 136 + def pop_next_empty_index() -> int: 137 + if available_indices: 138 + return available_indices.pop(0) 139 + else: 140 + new_idx = len(posts) 141 + posts.append(make_blank_post()) 142 + return new_idx 143 + 144 + for att in media: 145 + if att.mime.startswith("video/"): 146 + current_image_post_idx = None 147 + idx = pop_next_empty_index() 148 + posts[idx]["attachments"].append(att) 149 + elif att.mime.startswith("image/"): 150 + if ( 151 + current_image_post_idx is not None 152 + and len(posts[current_image_post_idx]["attachments"]) < 4 153 + ): 154 + posts[current_image_post_idx]["attachments"].append(att) 155 + else: 156 + idx = pop_next_empty_index() 157 + posts[idx]["attachments"].append(att) 158 + current_image_post_idx = idx 159 + 160 + result: list[tuple[client_utils.TextBuilder, list[MediaInfo]]] = [] 161 + for p in posts: 162 + result.append((p["tokens"], p["attachments"])) 163 + return result 164 + 165 + def accept_post(self, post: cross.Post): 166 + login = self.__check_login() 167 + 168 + parent_id = post.get_parent_id() 169 + 170 + # used for db insertion 171 + new_root_id = None 172 + new_parent_id = None 173 + 174 + root_ref = None 175 + reply_ref = None 176 + if parent_id: 177 + parents = self._find_parent(parent_id) 178 + if not parents: 179 + return 180 + root_ref, reply_ref, new_root_id, new_parent_id = parents 181 + 182 + tokens = post.get_tokens().copy() 183 + 184 + unique_labels: set[str] = set() 185 + cw = post.get_spoiler() 186 + if cw: 187 + tokens.insert(0, cross.TextToken("CW: " + cw + "\n\n")) 188 + unique_labels.add("graphic-media") 189 + 190 + # from bsky.app, a post can only have one of those labels 191 + if PORN_PATTERN.search(cw): 192 + unique_labels.add("porn") 193 + elif ADULT_PATTERN.search(cw): 194 + unique_labels.add("sexual") 195 + 196 + if post.is_sensitive(): 197 + unique_labels.add("graphic-media") 198 + 199 + labels = ( 200 + models.ComAtprotoLabelDefs.SelfLabels( 201 + values=[ 202 + models.ComAtprotoLabelDefs.SelfLabel(val=label) 203 + for label in unique_labels 204 + ] 205 + ) 206 + if unique_labels 207 + else None 208 + ) 209 + 210 + sup_media, unsup_media = self._split_attachments(post.get_attachments()) 211 + 212 + if unsup_media: 213 + if tokens: 214 + tokens.append(cross.TextToken("\n")) 215 + for i, attachment in enumerate(unsup_media): 216 + tokens.append( 217 + cross.LinkToken( 218 + attachment.url, f"[{get_filename_from_url(attachment.url)}]" 219 + ) 220 + ) 221 + tokens.append(cross.TextToken(" ")) 222 + 223 + if post.get_text_type() == "text/x.misskeymarkdown": 224 + tokens, status = mfm_util.strip_mfm(tokens) 225 + post_url = post.get_post_url() 226 + if status and post_url: 227 + tokens.append(cross.TextToken("\n")) 228 + tokens.append( 229 + cross.LinkToken(post_url, "[Post contains MFM, see original]") 230 + ) 231 + 232 + split_tokens: list[list[cross.Token]] = cross.split_tokens(tokens, 300) 233 + post_text: list[client_utils.TextBuilder] = [] 234 + 235 + # convert tokens into rich text. skip post if contains unsupported tokens 236 + for block in split_tokens: 237 + rich_text = tokens_to_richtext(block) 238 + 239 + if not rich_text: 240 + LOGGER.error( 241 + "Skipping '%s' as it contains invalid rich text types!", 242 + post.get_id(), 243 + ) 244 + return 245 + post_text.append(rich_text) 246 + 247 + if not post_text: 248 + post_text = [client_utils.TextBuilder().text("")] 249 + 250 + for m in sup_media: 251 + if m.mime.startswith("image/"): 252 + if len(m.io) > 2_000_000: 253 + LOGGER.error( 254 + "Skipping post_id '%s', failed to download attachment! File too large.", 255 + post.get_id(), 256 + ) 257 + return 258 + 259 + if m.mime.startswith("video/"): 260 + if m.mime != "video/mp4" and not self.options.encode_videos: 261 + LOGGER.info( 262 + "Video is not mp4, but encoding is disabled. Skipping '%s'...", 263 + post.get_id(), 264 + ) 265 + return 266 + 267 + if len(m.io) > 100_000_000: 268 + LOGGER.error( 269 + "Skipping post_id '%s', failed to download attachment! File too large?", 270 + post.get_id(), 271 + ) 272 + return 273 + 274 + created_records: list[models.AppBskyFeedPost.CreateRecordResponse] = [] 275 + baked_media = self._split_media_per_post(post_text, sup_media) 276 + 277 + for text, attachments in baked_media: 278 + if not attachments: 279 + if reply_ref and root_ref: 280 + new_post = self.bsky.send_post( 281 + text, 282 + reply_to=models.AppBskyFeedPost.ReplyRef( 283 + parent=reply_ref, root=root_ref 284 + ), 285 + labels=labels, 286 + time_iso=post.get_timestamp(), 287 + ) 288 + else: 289 + new_post = self.bsky.send_post( 290 + text, labels=labels, time_iso=post.get_timestamp() 291 + ) 292 + root_ref = models.create_strong_ref(new_post) 293 + 294 + self.bsky.create_gates( 295 + self.options.thread_gate, 296 + self.options.quote_gate, 297 + new_post.uri, 298 + time_iso=post.get_timestamp(), 299 + ) 300 + reply_ref = models.create_strong_ref(new_post) 301 + created_records.append(new_post) 302 + else: 303 + # if a single post is an image - everything else is an image 304 + if attachments[0].mime.startswith("image/"): 305 + images: list[bytes] = [] 306 + image_alts: list[str] = [] 307 + image_aspect_ratios: list[models.AppBskyEmbedDefs.AspectRatio] = [] 308 + 309 + for attachment in attachments: 310 + image_io = compress_image(attachment.io, quality=100) 311 + metadata = get_media_meta(image_io) 312 + 313 + if len(image_io) > 1_000_000: 314 + LOGGER.info("Compressing %s...", attachment.name) 315 + image_io = compress_image(image_io) 316 + 317 + images.append(image_io) 318 + image_alts.append(attachment.alt) 319 + image_aspect_ratios.append( 320 + models.AppBskyEmbedDefs.AspectRatio( 321 + width=metadata["width"], height=metadata["height"] 322 + ) 323 + ) 324 + 325 + new_post = self.bsky.send_images( 326 + text=post_text[0], 327 + images=images, 328 + image_alts=image_alts, 329 + image_aspect_ratios=image_aspect_ratios, 330 + reply_to=models.AppBskyFeedPost.ReplyRef( 331 + parent=reply_ref, root=root_ref 332 + ) 333 + if root_ref and reply_ref 334 + else None, 335 + labels=labels, 336 + time_iso=post.get_timestamp(), 337 + ) 338 + if not root_ref: 339 + root_ref = models.create_strong_ref(new_post) 340 + 341 + self.bsky.create_gates( 342 + self.options.thread_gate, 343 + self.options.quote_gate, 344 + new_post.uri, 345 + time_iso=post.get_timestamp(), 346 + ) 347 + reply_ref = models.create_strong_ref(new_post) 348 + created_records.append(new_post) 349 + else: # video is guarantedd to be one 350 + metadata = get_media_meta(attachments[0].io) 351 + if metadata["duration"] > 180: 352 + LOGGER.info( 353 + "Skipping post_id '%s', video attachment too long!", 354 + post.get_id(), 355 + ) 356 + return 357 + 358 + video_io = attachments[0].io 359 + if attachments[0].mime != "video/mp4": 360 + LOGGER.info("Converting %s to mp4...", attachments[0].name) 361 + video_io = convert_to_mp4(video_io) 362 + 363 + aspect_ratio = models.AppBskyEmbedDefs.AspectRatio( 364 + width=metadata["width"], height=metadata["height"] 365 + ) 366 + 367 + new_post = self.bsky.send_video( 368 + text=post_text[0], 369 + video=video_io, 370 + video_aspect_ratio=aspect_ratio, 371 + video_alt=attachments[0].alt, 372 + reply_to=models.AppBskyFeedPost.ReplyRef( 373 + parent=reply_ref, root=root_ref 374 + ) 375 + if root_ref and reply_ref 376 + else None, 377 + labels=labels, 378 + time_iso=post.get_timestamp(), 379 + ) 380 + if not root_ref: 381 + root_ref = models.create_strong_ref(new_post) 382 + 383 + self.bsky.create_gates( 384 + self.options.thread_gate, 385 + self.options.quote_gate, 386 + new_post.uri, 387 + time_iso=post.get_timestamp(), 388 + ) 389 + reply_ref = models.create_strong_ref(new_post) 390 + created_records.append(new_post) 391 + 392 + db_post = database.find_post( 393 + self.db, post.get_id(), self.input.user_id, self.input.service 394 + ) 395 + assert db_post, "ghghghhhhh" 396 + 397 + if new_root_id is None or new_parent_id is None: 398 + new_root_id = database.insert_post( 399 + self.db, created_records[0].uri, login.did, SERVICE 400 + ) 401 + database.store_data( 402 + self.db, 403 + created_records[0].uri, 404 + login.did, 405 + SERVICE, 406 + {"cid": created_records[0].cid}, 407 + ) 408 + 409 + new_parent_id = new_root_id 410 + database.insert_mapping(self.db, db_post["id"], new_parent_id) 411 + created_records = created_records[1:] 412 + 413 + for record in created_records: 414 + new_parent_id = database.insert_reply( 415 + self.db, record.uri, login.did, SERVICE, new_parent_id, new_root_id 416 + ) 417 + database.store_data( 418 + self.db, record.uri, login.did, SERVICE, {"cid": record.cid} 419 + ) 420 + database.insert_mapping(self.db, db_post["id"], new_parent_id) 421 + 422 + def delete_post(self, identifier: str): 423 + login = self.__check_login() 424 + 425 + post = database.find_post( 426 + self.db, identifier, self.input.user_id, self.input.service 427 + ) 428 + if not post: 429 + return 430 + 431 + mappings = database.find_mappings(self.db, post["id"], SERVICE, login.did) 432 + for mapping in mappings[::-1]: 433 + LOGGER.info("Deleting '%s'...", mapping[0]) 434 + self.bsky.delete_post(mapping[0]) 435 + database.delete_post(self.db, mapping[0], SERVICE, login.did) 436 + 437 + def accept_repost(self, repost_id: str, reposted_id: str): 438 + login, repost = self.__delete_repost(repost_id) 439 + if not (login and repost): 440 + return 441 + 442 + reposted = database.find_post( 443 + self.db, reposted_id, self.input.user_id, self.input.service 444 + ) 445 + if not reposted: 446 + return 447 + 448 + # mappings of the reposted post 449 + mappings = database.find_mappings(self.db, reposted["id"], SERVICE, login.did) 450 + if mappings: 451 + cid = database.fetch_data(self.db, mappings[0][0], login.did, SERVICE)[ 452 + "cid" 453 + ] 454 + rsp = self.bsky.repost(mappings[0][0], cid) 455 + 456 + internal_id = database.insert_repost( 457 + self.db, rsp.uri, reposted["id"], login.did, SERVICE 458 + ) 459 + database.store_data(self.db, rsp.uri, login.did, SERVICE, {"cid": rsp.cid}) 460 + database.insert_mapping(self.db, repost["id"], internal_id) 461 + 462 + def __delete_repost( 463 + self, repost_id: str 464 + ) -> tuple[models.AppBskyActorDefs.ProfileViewDetailed | None, dict | None]: 465 + login = self.__check_login() 466 + 467 + repost = database.find_post( 468 + self.db, repost_id, self.input.user_id, self.input.service 469 + ) 470 + if not repost: 471 + return None, None 472 + 473 + mappings = database.find_mappings(self.db, repost["id"], SERVICE, login.did) 474 + if mappings: 475 + LOGGER.info("Deleting '%s'...", mappings[0][0]) 476 + self.bsky.unrepost(mappings[0][0]) 477 + database.delete_post(self.db, mappings[0][0], login.did, SERVICE) 478 + return login, repost 479 + 480 + def delete_repost(self, repost_id: str): 481 + self.__delete_repost(repost_id)
+237
cross.py
··· 1 + import re 2 + from abc import ABC, abstractmethod 3 + from datetime import datetime, timezone 4 + from typing import Any, Callable 5 + 6 + from util.database import DataBaseWorker 7 + from util.media import MediaInfo 8 + from util.util import LOGGER, canonical_label 9 + 10 + ALTERNATE = re.compile(r"\S+|\s+") 11 + 12 + 13 + # generic token 14 + class Token: 15 + def __init__(self, type: str) -> None: 16 + self.type = type 17 + 18 + 19 + class TextToken(Token): 20 + def __init__(self, text: str) -> None: 21 + super().__init__("text") 22 + self.text = text 23 + 24 + 25 + # token that represents a link to a website. e.g. [link](https://google.com/) 26 + class LinkToken(Token): 27 + def __init__(self, href: str, label: str) -> None: 28 + super().__init__("link") 29 + self.href = href 30 + self.label = label 31 + 32 + 33 + # token that represents a hashtag. e.g. #SocialMedia 34 + class TagToken(Token): 35 + def __init__(self, tag: str) -> None: 36 + super().__init__("tag") 37 + self.tag = tag 38 + 39 + 40 + # token that represents a mention of a user. 41 + class MentionToken(Token): 42 + def __init__(self, username: str, uri: str) -> None: 43 + super().__init__("mention") 44 + self.username = username 45 + self.uri = uri 46 + 47 + 48 + class MediaMeta: 49 + def __init__(self, width: int, height: int, duration: float) -> None: 50 + self.width = width 51 + self.height = height 52 + self.duration = duration 53 + 54 + def get_width(self) -> int: 55 + return self.width 56 + 57 + def get_height(self) -> int: 58 + return self.height 59 + 60 + def get_duration(self) -> float: 61 + return self.duration 62 + 63 + 64 + class Post(ABC): 65 + @abstractmethod 66 + def get_id(self) -> str: 67 + return "" 68 + 69 + @abstractmethod 70 + def get_parent_id(self) -> str | None: 71 + pass 72 + 73 + @abstractmethod 74 + def get_tokens(self) -> list[Token]: 75 + pass 76 + 77 + # returns input text type. 78 + # text/plain, text/markdown, text/x.misskeymarkdown 79 + @abstractmethod 80 + def get_text_type(self) -> str: 81 + pass 82 + 83 + # post iso timestamp 84 + @abstractmethod 85 + def get_timestamp(self) -> str: 86 + pass 87 + 88 + def get_attachments(self) -> list[MediaInfo]: 89 + return [] 90 + 91 + def get_spoiler(self) -> str | None: 92 + return None 93 + 94 + def get_languages(self) -> list[str]: 95 + return [] 96 + 97 + def is_sensitive(self) -> bool: 98 + return False 99 + 100 + def get_post_url(self) -> str | None: 101 + return None 102 + 103 + 104 + # generic input service. 105 + # user and service for db queries 106 + class Input: 107 + def __init__( 108 + self, service: str, user_id: str, settings: dict, db: DataBaseWorker 109 + ) -> None: 110 + self.service = service 111 + self.user_id = user_id 112 + self.settings = settings 113 + self.db = db 114 + 115 + async def listen(self, outputs: list, handler: Callable[[Post], Any]): 116 + pass 117 + 118 + 119 + class Output: 120 + def __init__(self, input: Input, settings: dict, db: DataBaseWorker) -> None: 121 + self.input = input 122 + self.settings = settings 123 + self.db = db 124 + 125 + def accept_post(self, post: Post): 126 + LOGGER.warning('Not Implemented.. "posted" %s', post.get_id()) 127 + 128 + def delete_post(self, identifier: str): 129 + LOGGER.warning('Not Implemented.. "deleted" %s', identifier) 130 + 131 + def accept_repost(self, repost_id: str, reposted_id: str): 132 + LOGGER.warning('Not Implemented.. "reblogged" %s, %s', repost_id, reposted_id) 133 + 134 + def delete_repost(self, repost_id: str): 135 + LOGGER.warning('Not Implemented.. "removed reblog" %s', repost_id) 136 + 137 + 138 + def test_filters(tokens: list[Token], filters: list[re.Pattern[str]]): 139 + if not tokens or not filters: 140 + return True 141 + 142 + markdown = "" 143 + 144 + for token in tokens: 145 + if isinstance(token, TextToken): 146 + markdown += token.text 147 + elif isinstance(token, LinkToken): 148 + markdown += f"[{token.label}]({token.href})" 149 + elif isinstance(token, TagToken): 150 + markdown += "#" + token.tag 151 + elif isinstance(token, MentionToken): 152 + markdown += token.username 153 + 154 + for filter in filters: 155 + if filter.search(markdown): 156 + return False 157 + 158 + return True 159 + 160 + 161 + def split_tokens( 162 + tokens: list[Token], max_chars: int, max_link_len: int = 35 163 + ) -> list[list[Token]]: 164 + def new_block(): 165 + nonlocal blocks, block, length 166 + if block: 167 + blocks.append(block) 168 + block = [] 169 + length = 0 170 + 171 + def append_text(text_segment): 172 + nonlocal block 173 + # if the last element in the current block is also text, just append to it 174 + if block and isinstance(block[-1], TextToken): 175 + block[-1].text += text_segment 176 + else: 177 + block.append(TextToken(text_segment)) 178 + 179 + blocks: list[list[Token]] = [] 180 + block: list[Token] = [] 181 + length = 0 182 + 183 + for tk in tokens: 184 + if isinstance(tk, TagToken): 185 + tag_len = 1 + len(tk.tag) # (#) + tag 186 + if length + tag_len > max_chars: 187 + new_block() # create new block if the current one is too large 188 + 189 + block.append(tk) 190 + length += tag_len 191 + elif isinstance(tk, LinkToken): # TODO labels should proably be split too 192 + link_len = len(tk.label) 193 + if canonical_label( 194 + tk.label, tk.href 195 + ): # cut down the link if the label is canonical 196 + link_len = min(link_len, max_link_len) 197 + 198 + if length + link_len > max_chars: 199 + new_block() 200 + block.append(tk) 201 + length += link_len 202 + elif isinstance(tk, TextToken): 203 + segments: list[str] = ALTERNATE.findall(tk.text) 204 + 205 + for seg in segments: 206 + seg_len: int = len(seg) 207 + if length + seg_len <= max_chars - (0 if seg.isspace() else 1): 208 + append_text(seg) 209 + length += seg_len 210 + continue 211 + 212 + if length > 0: 213 + new_block() 214 + 215 + if not seg.isspace(): 216 + while len(seg) > max_chars - 1: 217 + chunk = seg[: max_chars - 1] + "-" 218 + append_text(chunk) 219 + new_block() 220 + seg = seg[max_chars - 1 :] 221 + else: 222 + while len(seg) > max_chars: 223 + chunk = seg[:max_chars] 224 + append_text(chunk) 225 + new_block() 226 + seg = seg[max_chars:] 227 + 228 + if seg: 229 + append_text(seg) 230 + length = len(seg) 231 + else: # TODO fix mentions 232 + block.append(tk) 233 + 234 + if block: 235 + blocks.append(block) 236 + 237 + return blocks
+161
main.py
··· 1 + import asyncio 2 + import json 3 + import os 4 + import queue 5 + import threading 6 + import traceback 7 + 8 + import cross 9 + import util.database as database 10 + from bluesky.input import BlueskyJetstreamInput 11 + from bluesky.output import BlueskyOutput, BlueskyOutputOptions 12 + from mastodon.input import MastodonInput, MastodonInputOptions 13 + from mastodon.output import MastodonOutput 14 + from misskey.input import MisskeyInput 15 + from util.util import LOGGER, as_json 16 + 17 + DEFAULT_SETTINGS: dict = { 18 + "input": { 19 + "type": "mastodon-wss", 20 + "instance": "env:MASTODON_INSTANCE", 21 + "token": "env:MASTODON_TOKEN", 22 + "options": MastodonInputOptions({}), 23 + }, 24 + "outputs": [ 25 + { 26 + "type": "bluesky", 27 + "handle": "env:BLUESKY_HANDLE", 28 + "app-password": "env:BLUESKY_APP_PASSWORD", 29 + "options": BlueskyOutputOptions({}), 30 + } 31 + ], 32 + } 33 + 34 + INPUTS = { 35 + "mastodon-wss": lambda settings, db: MastodonInput(settings, db), 36 + "misskey-wss": lambda settigs, db: MisskeyInput(settigs, db), 37 + "bluesky-jetstream-wss": lambda settings, db: BlueskyJetstreamInput(settings, db), 38 + } 39 + 40 + OUTPUTS = { 41 + "bluesky": lambda input, settings, db: BlueskyOutput(input, settings, db), 42 + "mastodon": lambda input, settings, db: MastodonOutput(input, settings, db), 43 + } 44 + 45 + 46 + def execute(data_dir): 47 + if not os.path.exists(data_dir): 48 + os.makedirs(data_dir) 49 + 50 + settings_path = os.path.join(data_dir, "settings.json") 51 + database_path = os.path.join(data_dir, "data.db") 52 + 53 + if not os.path.exists(settings_path): 54 + LOGGER.info("First launch detected! Creating %s and exiting!", settings_path) 55 + 56 + with open(settings_path, "w") as f: 57 + f.write(as_json(DEFAULT_SETTINGS, indent=2)) 58 + return 0 59 + 60 + LOGGER.info("Loading settings...") 61 + with open(settings_path, "rb") as f: 62 + settings = json.load(f) 63 + 64 + LOGGER.info("Starting database worker...") 65 + db_worker = database.DataBaseWorker(os.path.abspath(database_path)) 66 + 67 + db_worker.execute("PRAGMA foreign_keys = ON;") 68 + 69 + # create the posts table 70 + # id - internal id of the post 71 + # user_id - user id on the service (e.g. a724sknj5y9ydk0w) 72 + # service - the service (e.g. https://shrimp.melontini.me) 73 + # identifier - post id on the service (e.g. a8mpiyeej0fpjp0p) 74 + # parent_id - the internal id of the parent 75 + db_worker.execute( 76 + """ 77 + CREATE TABLE IF NOT EXISTS posts ( 78 + id INTEGER PRIMARY KEY AUTOINCREMENT, 79 + user_id TEXT NOT NULL, 80 + service TEXT NOT NULL, 81 + identifier TEXT NOT NULL, 82 + parent_id INTEGER NULL REFERENCES posts(id) ON DELETE SET NULL, 83 + root_id INTEGER NULL REFERENCES posts(id) ON DELETE SET NULL 84 + ); 85 + """ 86 + ) 87 + 88 + columns = db_worker.execute("PRAGMA table_info(posts)") 89 + column_names = [col[1] for col in columns] 90 + if "reposted_id" not in column_names: 91 + db_worker.execute(""" 92 + ALTER TABLE posts 93 + ADD COLUMN reposted_id INTEGER NULL REFERENCES posts(id) ON DELETE SET NULL 94 + """) 95 + if "extra_data" not in column_names: 96 + db_worker.execute(""" 97 + ALTER TABLE posts 98 + ADD COLUMN extra_data TEXT NULL 99 + """) 100 + 101 + # create the mappings table 102 + # original_post_id - the post this was mapped from 103 + # mapped_post_id - the post this was mapped to 104 + db_worker.execute( 105 + """ 106 + CREATE TABLE IF NOT EXISTS mappings ( 107 + original_post_id INTEGER NOT NULL REFERENCES posts(id) ON DELETE CASCADE, 108 + mapped_post_id INTEGER NOT NULL 109 + ); 110 + """ 111 + ) 112 + 113 + input_settings = settings.get("input") 114 + if not input_settings: 115 + raise Exception("No input specified!") 116 + outputs_settings = settings.get("outputs", []) 117 + 118 + input = INPUTS[input_settings["type"]](input_settings, db_worker) 119 + 120 + if not outputs_settings: 121 + LOGGER.warning("No outputs specified! Check the config!") 122 + 123 + outputs: list[cross.Output] = [] 124 + for output_settings in outputs_settings: 125 + outputs.append( 126 + OUTPUTS[output_settings["type"]](input, output_settings, db_worker) 127 + ) 128 + 129 + LOGGER.info("Starting task worker...") 130 + 131 + def worker(queue: queue.Queue): 132 + while True: 133 + task = queue.get() 134 + if task is None: 135 + break 136 + 137 + try: 138 + task() 139 + except Exception as e: 140 + LOGGER.error(f"Exception in worker thread!\n{e}") 141 + traceback.print_exc() 142 + finally: 143 + queue.task_done() 144 + 145 + task_queue = queue.Queue() 146 + thread = threading.Thread(target=worker, args=(task_queue,), daemon=True) 147 + thread.start() 148 + 149 + LOGGER.info("Connecting to %s...", input.service) 150 + try: 151 + asyncio.run(input.listen(outputs, lambda x: task_queue.put(x))) 152 + except KeyboardInterrupt: 153 + LOGGER.info("Stopping...") 154 + 155 + task_queue.join() 156 + task_queue.put(None) 157 + thread.join() 158 + 159 + 160 + if __name__ == "__main__": 161 + execute("./data")
+52
mastodon/common.py
··· 1 + import cross 2 + from util.media import MediaInfo 3 + 4 + 5 + class MastodonPost(cross.Post): 6 + def __init__( 7 + self, 8 + status: dict, 9 + tokens: list[cross.Token], 10 + media_attachments: list[MediaInfo], 11 + ) -> None: 12 + super().__init__() 13 + self.id = status["id"] 14 + self.parent_id = status.get("in_reply_to_id") 15 + self.tokens = tokens 16 + self.content_type = status.get("content_type", "text/plain") 17 + self.timestamp = status["created_at"] 18 + self.media_attachments = media_attachments 19 + self.spoiler = status.get("spoiler_text") 20 + self.language = [status["language"]] if status.get("language") else [] 21 + self.sensitive = status.get("sensitive", False) 22 + self.url = status.get("url") 23 + 24 + def get_id(self) -> str: 25 + return self.id 26 + 27 + def get_parent_id(self) -> str | None: 28 + return self.parent_id 29 + 30 + def get_tokens(self) -> list[cross.Token]: 31 + return self.tokens 32 + 33 + def get_text_type(self) -> str: 34 + return self.content_type 35 + 36 + def get_timestamp(self) -> str: 37 + return self.timestamp 38 + 39 + def get_attachments(self) -> list[MediaInfo]: 40 + return self.media_attachments 41 + 42 + def get_spoiler(self) -> str | None: 43 + return self.spoiler 44 + 45 + def get_languages(self) -> list[str]: 46 + return self.language 47 + 48 + def is_sensitive(self) -> bool: 49 + return self.sensitive or (self.spoiler is not None and self.spoiler != "") 50 + 51 + def get_post_url(self) -> str | None: 52 + return self.url
+225
mastodon/input.py
··· 1 + import asyncio 2 + import json 3 + import re 4 + from typing import Any, Callable 5 + 6 + import requests 7 + import websockets 8 + 9 + import cross 10 + import util.database as database 11 + import util.html_util as html_util 12 + import util.md_util as md_util 13 + from mastodon.common import MastodonPost 14 + from util.database import DataBaseWorker 15 + from util.media import MediaInfo, download_media 16 + from util.util import LOGGER, as_envvar 17 + 18 + ALLOWED_VISIBILITY = ["public", "unlisted"] 19 + MARKDOWNY = ["text/x.misskeymarkdown", "text/markdown", "text/plain"] 20 + 21 + 22 + class MastodonInputOptions: 23 + def __init__(self, o: dict) -> None: 24 + self.allowed_visibility = ALLOWED_VISIBILITY 25 + self.filters = [re.compile(f) for f in o.get("regex_filters", [])] 26 + 27 + allowed_visibility = o.get("allowed_visibility") 28 + if allowed_visibility is not None: 29 + if any([v not in ALLOWED_VISIBILITY for v in allowed_visibility]): 30 + raise ValueError( 31 + f"'allowed_visibility' only accepts {', '.join(ALLOWED_VISIBILITY)}, got: {allowed_visibility}" 32 + ) 33 + self.allowed_visibility = allowed_visibility 34 + 35 + 36 + class MastodonInput(cross.Input): 37 + def __init__(self, settings: dict, db: DataBaseWorker) -> None: 38 + self.options = MastodonInputOptions(settings.get("options", {})) 39 + self.token = as_envvar(settings.get("token")) or (_ for _ in ()).throw( 40 + ValueError("'token' is required") 41 + ) 42 + instance: str = as_envvar(settings.get("instance")) or (_ for _ in ()).throw( 43 + ValueError("'instance' is required") 44 + ) 45 + 46 + service = instance[:-1] if instance.endswith("/") else instance 47 + 48 + LOGGER.info("Verifying %s credentails...", service) 49 + responce = requests.get( 50 + f"{service}/api/v1/accounts/verify_credentials", 51 + headers={"Authorization": f"Bearer {self.token}"}, 52 + ) 53 + if responce.status_code != 200: 54 + LOGGER.error("Failed to validate user credentials!") 55 + responce.raise_for_status() 56 + return 57 + 58 + super().__init__(service, responce.json()["id"], settings, db) 59 + self.streaming = self._get_streaming_url() 60 + 61 + if not self.streaming: 62 + raise Exception("Instance %s does not support streaming!", service) 63 + 64 + def _get_streaming_url(self): 65 + response = requests.get(f"{self.service}/api/v1/instance") 66 + response.raise_for_status() 67 + data: dict = response.json() 68 + return (data.get("urls") or {}).get("streaming_api") 69 + 70 + def __to_tokens(self, status: dict): 71 + content_type = status.get("content_type", "text/plain") 72 + raw_text = status.get("text") 73 + 74 + tags: list[str] = [] 75 + for tag in status.get("tags", []): 76 + tags.append(tag["name"]) 77 + 78 + mentions: list[tuple[str, str]] = [] 79 + for mention in status.get("mentions", []): 80 + mentions.append(("@" + mention["username"], "@" + mention["acct"])) 81 + 82 + if raw_text and content_type in MARKDOWNY: 83 + return md_util.tokenize_markdown(raw_text, tags, mentions) 84 + 85 + akkoma_ext: dict | None = status.get("akkoma", {}).get("source") 86 + if akkoma_ext: 87 + if akkoma_ext.get("mediaType") in MARKDOWNY: 88 + return md_util.tokenize_markdown(akkoma_ext["content"], tags, mentions) 89 + 90 + tokenizer = html_util.HTMLPostTokenizer() 91 + tokenizer.mentions = mentions 92 + tokenizer.tags = tags 93 + tokenizer.feed(status.get("content", "")) 94 + return tokenizer.get_tokens() 95 + 96 + def _on_create_post(self, outputs: list[cross.Output], status: dict): 97 + # skip events from other users 98 + if (status.get("account") or {})["id"] != self.user_id: 99 + return 100 + 101 + if status.get("visibility") not in self.options.allowed_visibility: 102 + # Skip f/o and direct posts 103 + LOGGER.info( 104 + "Skipping '%s'! '%s' visibility..", 105 + status["id"], 106 + status.get("visibility"), 107 + ) 108 + return 109 + 110 + # TODO polls not supported on bsky. maybe 3rd party? skip for now 111 + # we don't handle reblogs. possible with bridgy(?) and self 112 + # we don't handle quotes. 113 + if status.get("poll"): 114 + LOGGER.info("Skipping '%s'! Contains a poll..", status["id"]) 115 + return 116 + 117 + if status.get("quote_id") or status.get("quote"): 118 + LOGGER.info("Skipping '%s'! Quote..", status["id"]) 119 + return 120 + 121 + reblog: dict | None = status.get("reblog") 122 + if reblog: 123 + if (reblog.get("account") or {})["id"] != self.user_id: 124 + LOGGER.info("Skipping '%s'! Reblog of other user..", status["id"]) 125 + return 126 + 127 + success = database.try_insert_repost( 128 + self.db, status["id"], reblog["id"], self.user_id, self.service 129 + ) 130 + if not success: 131 + LOGGER.info( 132 + "Skipping '%s' as reblogged post was not found in db!", status["id"] 133 + ) 134 + return 135 + 136 + for output in outputs: 137 + output.accept_repost(status["id"], reblog["id"]) 138 + return 139 + 140 + in_reply: str | None = status.get("in_reply_to_id") 141 + in_reply_to: str | None = status.get("in_reply_to_account_id") 142 + if in_reply_to and in_reply_to != self.user_id: 143 + # We don't support replies. 144 + LOGGER.info("Skipping '%s'! Reply to other user..", status["id"]) 145 + return 146 + 147 + success = database.try_insert_post( 148 + self.db, status["id"], in_reply, self.user_id, self.service 149 + ) 150 + if not success: 151 + LOGGER.info( 152 + "Skipping '%s' as parent post was not found in db!", status["id"] 153 + ) 154 + return 155 + 156 + tokens = self.__to_tokens(status) 157 + if not cross.test_filters(tokens, self.options.filters): 158 + LOGGER.info("Skipping '%s'. Matched a filter!", status["id"]) 159 + return 160 + 161 + LOGGER.info("Crossposting '%s'...", status["id"]) 162 + 163 + media_attachments: list[MediaInfo] = [] 164 + for attachment in status.get("media_attachments", []): 165 + LOGGER.info("Downloading %s...", attachment["url"]) 166 + info = download_media( 167 + attachment["url"], attachment.get("description") or "" 168 + ) 169 + if not info: 170 + LOGGER.error("Skipping '%s'. Failed to download media!", status["id"]) 171 + return 172 + media_attachments.append(info) 173 + 174 + cross_post = MastodonPost(status, tokens, media_attachments) 175 + for output in outputs: 176 + output.accept_post(cross_post) 177 + 178 + def _on_delete_post(self, outputs: list[cross.Output], identifier: str): 179 + post = database.find_post(self.db, identifier, self.user_id, self.service) 180 + if not post: 181 + return 182 + 183 + LOGGER.info("Deleting '%s'...", identifier) 184 + if post["reposted_id"]: 185 + for output in outputs: 186 + output.delete_repost(identifier) 187 + else: 188 + for output in outputs: 189 + output.delete_post(identifier) 190 + 191 + database.delete_post(self.db, identifier, self.user_id, self.service) 192 + 193 + def _on_post(self, outputs: list[cross.Output], event: str, payload: str): 194 + match event: 195 + case "update": 196 + self._on_create_post(outputs, json.loads(payload)) 197 + case "delete": 198 + self._on_delete_post(outputs, payload) 199 + 200 + async def listen( 201 + self, outputs: list[cross.Output], submit: Callable[[Callable[[], Any]], Any] 202 + ): 203 + uri = f"{self.streaming}/api/v1/streaming?stream=user&access_token={self.token}" 204 + 205 + async for ws in websockets.connect( 206 + uri, extra_headers={"User-Agent": "XPost/0.0.3"} 207 + ): 208 + try: 209 + LOGGER.info("Listening to %s...", self.streaming) 210 + 211 + async def listen_for_messages(): 212 + async for msg in ws: 213 + data = json.loads(msg) 214 + event: str = data.get("event") 215 + payload: str = data.get("payload") 216 + 217 + submit(lambda: self._on_post(outputs, str(event), str(payload))) 218 + 219 + listen = asyncio.create_task(listen_for_messages()) 220 + 221 + await asyncio.gather(listen) 222 + except websockets.ConnectionClosedError as e: 223 + LOGGER.error(e, stack_info=True, exc_info=True) 224 + LOGGER.info("Reconnecting to %s...", self.streaming) 225 + continue
+448
mastodon/output.py
··· 1 + import time 2 + 3 + import requests 4 + 5 + import cross 6 + import misskey.mfm_util as mfm_util 7 + import util.database as database 8 + from util.database import DataBaseWorker 9 + from util.media import MediaInfo 10 + from util.util import LOGGER, as_envvar, canonical_label 11 + 12 + POSSIBLE_MIMES = [ 13 + "audio/ogg", 14 + "audio/mp3", 15 + "image/webp", 16 + "image/jpeg", 17 + "image/png", 18 + "video/mp4", 19 + "video/quicktime", 20 + "video/webm", 21 + ] 22 + 23 + TEXT_MIMES = ["text/x.misskeymarkdown", "text/markdown", "text/plain"] 24 + 25 + ALLOWED_POSTING_VISIBILITY = ["public", "unlisted", "private"] 26 + 27 + 28 + class MastodonOutputOptions: 29 + def __init__(self, o: dict) -> None: 30 + self.visibility = "public" 31 + 32 + visibility = o.get("visibility") 33 + if visibility is not None: 34 + if visibility not in ALLOWED_POSTING_VISIBILITY: 35 + raise ValueError( 36 + f"'visibility' only accepts {', '.join(ALLOWED_POSTING_VISIBILITY)}, got: {visibility}" 37 + ) 38 + self.visibility = visibility 39 + 40 + 41 + class MastodonOutput(cross.Output): 42 + def __init__(self, input: cross.Input, settings: dict, db: DataBaseWorker) -> None: 43 + super().__init__(input, settings, db) 44 + self.options = settings.get("options") or {} 45 + self.token = as_envvar(settings.get("token")) or (_ for _ in ()).throw( 46 + ValueError("'token' is required") 47 + ) 48 + instance: str = as_envvar(settings.get("instance")) or (_ for _ in ()).throw( 49 + ValueError("'instance' is required") 50 + ) 51 + 52 + self.service = instance[:-1] if instance.endswith("/") else instance 53 + 54 + LOGGER.info("Verifying %s credentails...", self.service) 55 + responce = requests.get( 56 + f"{self.service}/api/v1/accounts/verify_credentials", 57 + headers={"Authorization": f"Bearer {self.token}"}, 58 + ) 59 + if responce.status_code != 200: 60 + LOGGER.error("Failed to validate user credentials!") 61 + responce.raise_for_status() 62 + return 63 + self.user_id: str = responce.json()["id"] 64 + 65 + LOGGER.info("Getting %s configuration...", self.service) 66 + responce = requests.get( 67 + f"{self.service}/api/v1/instance", 68 + headers={"Authorization": f"Bearer {self.token}"}, 69 + ) 70 + if responce.status_code != 200: 71 + LOGGER.error("Failed to get instance info!") 72 + responce.raise_for_status() 73 + return 74 + 75 + instance_info: dict = responce.json() 76 + configuration: dict = instance_info["configuration"] 77 + 78 + statuses_config: dict = configuration.get("statuses", {}) 79 + self.max_characters: int = statuses_config.get("max_characters", 500) 80 + self.max_media_attachments: int = statuses_config.get( 81 + "max_media_attachments", 4 82 + ) 83 + self.characters_reserved_per_url: int = statuses_config.get( 84 + "characters_reserved_per_url", 23 85 + ) 86 + 87 + media_config: dict = configuration.get("media_attachments", {}) 88 + self.image_size_limit: int = media_config.get("image_size_limit", 16777216) 89 + self.video_size_limit: int = media_config.get("video_size_limit", 103809024) 90 + self.supported_mime_types: list[str] = media_config.get( 91 + "supported_mime_types", POSSIBLE_MIMES 92 + ) 93 + 94 + # *oma: max post chars 95 + max_toot_chars = instance_info.get("max_toot_chars") 96 + if max_toot_chars: 97 + self.max_characters: int = max_toot_chars 98 + 99 + # *oma: max upload limit 100 + upload_limit = instance_info.get("upload_limit") 101 + if upload_limit: 102 + self.image_size_limit: int = upload_limit 103 + self.video_size_limit: int = upload_limit 104 + 105 + # chuckya: supported text types 106 + chuckya_text_mimes: list[str] = statuses_config.get("supported_mime_types", []) 107 + self.text_format = next( 108 + (mime for mime in TEXT_MIMES if mime in (chuckya_text_mimes)), "text/plain" 109 + ) 110 + 111 + # *oma ext: supported text types 112 + pleroma = instance_info.get("pleroma") 113 + if pleroma: 114 + post_formats: list[str] = pleroma.get("metadata", {}).get( 115 + "post_formats", [] 116 + ) 117 + self.text_format = next( 118 + (mime for mime in TEXT_MIMES if mime in post_formats), self.text_format 119 + ) 120 + 121 + def upload_media(self, attachments: list[MediaInfo]) -> list[str] | None: 122 + for a in attachments: 123 + if a.mime.startswith("image/") and len(a.io) > self.image_size_limit: 124 + return None 125 + 126 + if a.mime.startswith("video/") and len(a.io) > self.video_size_limit: 127 + return None 128 + 129 + if not a.mime.startswith("image/") and not a.mime.startswith("video/"): 130 + if len(a.io) > 7_000_000: 131 + return None 132 + 133 + uploads: list[dict] = [] 134 + for a in attachments: 135 + data = {} 136 + if a.alt: 137 + data["description"] = a.alt 138 + 139 + req = requests.post( 140 + f"{self.service}/api/v2/media", 141 + headers={"Authorization": f"Bearer {self.token}"}, 142 + files={"file": (a.name, a.io, a.mime)}, 143 + data=data, 144 + ) 145 + 146 + if req.status_code == 200: 147 + LOGGER.info("Uploaded %s! (%s)", a.name, req.json()["id"]) 148 + uploads.append({"done": True, "id": req.json()["id"]}) 149 + elif req.status_code == 202: 150 + LOGGER.info("Waiting for %s to process!", a.name) 151 + uploads.append({"done": False, "id": req.json()["id"]}) 152 + else: 153 + LOGGER.error("Failed to upload %s! %s", a.name, req.text) 154 + req.raise_for_status() 155 + 156 + while any([not val["done"] for val in uploads]): 157 + LOGGER.info("Waiting for media to process...") 158 + time.sleep(3) 159 + for media in uploads: 160 + if media["done"]: 161 + continue 162 + 163 + reqs = requests.get( 164 + f"{self.service}/api/v1/media/{media['id']}", 165 + headers={"Authorization": f"Bearer {self.token}"}, 166 + ) 167 + 168 + if reqs.status_code == 206: 169 + continue 170 + 171 + if reqs.status_code == 200: 172 + media["done"] = True 173 + continue 174 + reqs.raise_for_status() 175 + 176 + return [val["id"] for val in uploads] 177 + 178 + def token_to_string(self, tokens: list[cross.Token]) -> str | None: 179 + p_text: str = "" 180 + 181 + for token in tokens: 182 + if isinstance(token, cross.TextToken): 183 + p_text += token.text 184 + elif isinstance(token, cross.TagToken): 185 + p_text += "#" + token.tag 186 + elif isinstance(token, cross.LinkToken): 187 + if canonical_label(token.label, token.href): 188 + p_text += token.href 189 + else: 190 + if self.text_format == "text/plain": 191 + p_text += f"{token.label} ({token.href})" 192 + elif self.text_format in { 193 + "text/x.misskeymarkdown", 194 + "text/markdown", 195 + }: 196 + p_text += f"[{token.label}]({token.href})" 197 + else: 198 + return None 199 + 200 + return p_text 201 + 202 + def split_tokens_media(self, tokens: list[cross.Token], media: list[MediaInfo]): 203 + split_tokens = cross.split_tokens( 204 + tokens, self.max_characters, self.characters_reserved_per_url 205 + ) 206 + post_text: list[str] = [] 207 + 208 + for block in split_tokens: 209 + baked_text = self.token_to_string(block) 210 + 211 + if baked_text is None: 212 + return None 213 + post_text.append(baked_text) 214 + 215 + if not post_text: 216 + post_text = [""] 217 + 218 + posts: list[dict] = [ 219 + {"text": post_text, "attachments": []} for post_text in post_text 220 + ] 221 + available_indices: list[int] = list(range(len(posts))) 222 + 223 + current_image_post_idx: int | None = None 224 + 225 + def make_blank_post() -> dict: 226 + return {"text": "", "attachments": []} 227 + 228 + def pop_next_empty_index() -> int: 229 + if available_indices: 230 + return available_indices.pop(0) 231 + else: 232 + new_idx = len(posts) 233 + posts.append(make_blank_post()) 234 + return new_idx 235 + 236 + for att in media: 237 + if ( 238 + current_image_post_idx is not None 239 + and len(posts[current_image_post_idx]["attachments"]) 240 + < self.max_media_attachments 241 + ): 242 + posts[current_image_post_idx]["attachments"].append(att) 243 + else: 244 + idx = pop_next_empty_index() 245 + posts[idx]["attachments"].append(att) 246 + current_image_post_idx = idx 247 + 248 + result: list[tuple[str, list[MediaInfo]]] = [] 249 + 250 + for p in posts: 251 + result.append((p["text"], p["attachments"])) 252 + 253 + return result 254 + 255 + def accept_post(self, post: cross.Post): 256 + parent_id = post.get_parent_id() 257 + 258 + new_root_id: int | None = None 259 + new_parent_id: int | None = None 260 + 261 + reply_ref: str | None = None 262 + if parent_id: 263 + thread_tuple = database.find_mapped_thread( 264 + self.db, 265 + parent_id, 266 + self.input.user_id, 267 + self.input.service, 268 + self.user_id, 269 + self.service, 270 + ) 271 + 272 + if not thread_tuple: 273 + LOGGER.error("Failed to find thread tuple in the database!") 274 + return None 275 + 276 + _, reply_ref, new_root_id, new_parent_id = thread_tuple 277 + 278 + lang: str 279 + if post.get_languages(): 280 + lang = post.get_languages()[0] 281 + else: 282 + lang = "en" 283 + 284 + post_tokens = post.get_tokens() 285 + if post.get_text_type() == "text/x.misskeymarkdown": 286 + post_tokens, status = mfm_util.strip_mfm(post_tokens) 287 + post_url = post.get_post_url() 288 + if status and post_url: 289 + post_tokens.append(cross.TextToken("\n")) 290 + post_tokens.append( 291 + cross.LinkToken(post_url, "[Post contains MFM, see original]") 292 + ) 293 + 294 + raw_statuses = self.split_tokens_media(post_tokens, post.get_attachments()) 295 + if not raw_statuses: 296 + LOGGER.error("Failed to split post into statuses?") 297 + return None 298 + baked_statuses = [] 299 + 300 + for status, raw_media in raw_statuses: 301 + media: list[str] | None = None 302 + if raw_media: 303 + media = self.upload_media(raw_media) 304 + if not media: 305 + LOGGER.error("Failed to upload attachments!") 306 + return None 307 + baked_statuses.append((status, media)) 308 + continue 309 + baked_statuses.append((status, [])) 310 + 311 + created_statuses: list[str] = [] 312 + 313 + for status, media in baked_statuses: 314 + payload = { 315 + "status": status, 316 + "media_ids": media or [], 317 + "spoiler_text": post.get_spoiler() or "", 318 + "visibility": self.options.get("visibility", "public"), 319 + "content_type": self.text_format, 320 + "language": lang, 321 + } 322 + 323 + if media: 324 + payload["sensitive"] = post.is_sensitive() 325 + 326 + if post.get_spoiler(): 327 + payload["sensitive"] = True 328 + 329 + if not status: 330 + payload["status"] = "🖼️" 331 + 332 + if reply_ref: 333 + payload["in_reply_to_id"] = reply_ref 334 + 335 + reqs = requests.post( 336 + f"{self.service}/api/v1/statuses", 337 + headers={ 338 + "Authorization": f"Bearer {self.token}", 339 + "Content-Type": "application/json", 340 + }, 341 + json=payload, 342 + ) 343 + 344 + if reqs.status_code != 200: 345 + LOGGER.info( 346 + "Failed to post status! %s - %s", reqs.status_code, reqs.text 347 + ) 348 + reqs.raise_for_status() 349 + 350 + reply_ref = reqs.json()["id"] 351 + LOGGER.info("Created new status %s!", reply_ref) 352 + 353 + created_statuses.append(reqs.json()["id"]) 354 + 355 + db_post = database.find_post( 356 + self.db, post.get_id(), self.input.user_id, self.input.service 357 + ) 358 + assert db_post, "ghghghhhhh" 359 + 360 + if new_root_id is None or new_parent_id is None: 361 + new_root_id = database.insert_post( 362 + self.db, created_statuses[0], self.user_id, self.service 363 + ) 364 + new_parent_id = new_root_id 365 + database.insert_mapping(self.db, db_post["id"], new_parent_id) 366 + created_statuses = created_statuses[1:] 367 + 368 + for db_id in created_statuses: 369 + new_parent_id = database.insert_reply( 370 + self.db, db_id, self.user_id, self.service, new_parent_id, new_root_id 371 + ) 372 + database.insert_mapping(self.db, db_post["id"], new_parent_id) 373 + 374 + def delete_post(self, identifier: str): 375 + post = database.find_post( 376 + self.db, identifier, self.input.user_id, self.input.service 377 + ) 378 + if not post: 379 + return 380 + 381 + mappings = database.find_mappings( 382 + self.db, post["id"], self.service, self.user_id 383 + ) 384 + for mapping in mappings[::-1]: 385 + LOGGER.info("Deleting '%s'...", mapping[0]) 386 + requests.delete( 387 + f"{self.service}/api/v1/statuses/{mapping[0]}", 388 + headers={"Authorization": f"Bearer {self.token}"}, 389 + ) 390 + database.delete_post(self.db, mapping[0], self.service, self.user_id) 391 + 392 + def accept_repost(self, repost_id: str, reposted_id: str): 393 + repost = self.__delete_repost(repost_id) 394 + if not repost: 395 + return None 396 + 397 + reposted = database.find_post( 398 + self.db, reposted_id, self.input.user_id, self.input.service 399 + ) 400 + if not reposted: 401 + return 402 + 403 + mappings = database.find_mappings( 404 + self.db, reposted["id"], self.service, self.user_id 405 + ) 406 + if mappings: 407 + rsp = requests.post( 408 + f"{self.service}/api/v1/statuses/{mappings[0][0]}/reblog", 409 + headers={"Authorization": f"Bearer {self.token}"}, 410 + ) 411 + 412 + if rsp.status_code != 200: 413 + LOGGER.error( 414 + "Failed to boost status! status_code: %s, msg: %s", 415 + rsp.status_code, 416 + rsp.content, 417 + ) 418 + return 419 + 420 + internal_id = database.insert_repost( 421 + self.db, rsp.json()["id"], reposted["id"], self.user_id, self.service 422 + ) 423 + database.insert_mapping(self.db, repost["id"], internal_id) 424 + 425 + def __delete_repost(self, repost_id: str) -> dict | None: 426 + repost = database.find_post( 427 + self.db, repost_id, self.input.user_id, self.input.service 428 + ) 429 + if not repost: 430 + return None 431 + 432 + mappings = database.find_mappings( 433 + self.db, repost["id"], self.service, self.user_id 434 + ) 435 + reposted_mappings = database.find_mappings( 436 + self.db, repost["reposted_id"], self.service, self.user_id 437 + ) 438 + if mappings and reposted_mappings: 439 + LOGGER.info("Deleting '%s'...", mappings[0][0]) 440 + requests.post( 441 + f"{self.service}/api/v1/statuses/{reposted_mappings[0][0]}/unreblog", 442 + headers={"Authorization": f"Bearer {self.token}"}, 443 + ) 444 + database.delete_post(self.db, mappings[0][0], self.user_id, self.service) 445 + return repost 446 + 447 + def delete_repost(self, repost_id: str): 448 + self.__delete_repost(repost_id)
+54
misskey/common.py
··· 1 + import cross 2 + from util.media import MediaInfo 3 + 4 + 5 + class MisskeyPost(cross.Post): 6 + def __init__( 7 + self, 8 + instance_url: str, 9 + note: dict, 10 + tokens: list[cross.Token], 11 + files: list[MediaInfo], 12 + ) -> None: 13 + super().__init__() 14 + self.note = note 15 + self.id = note["id"] 16 + self.parent_id = note.get("replyId") 17 + self.tokens = tokens 18 + self.timestamp = note["createdAt"] 19 + self.media_attachments = files 20 + self.spoiler = note.get("cw") 21 + self.sensitive = any( 22 + [a.get("isSensitive", False) for a in note.get("files", [])] 23 + ) 24 + self.url = instance_url + "/notes/" + note["id"] 25 + 26 + def get_id(self) -> str: 27 + return self.id 28 + 29 + def get_parent_id(self) -> str | None: 30 + return self.parent_id 31 + 32 + def get_tokens(self) -> list[cross.Token]: 33 + return self.tokens 34 + 35 + def get_text_type(self) -> str: 36 + return "text/x.misskeymarkdown" 37 + 38 + def get_timestamp(self) -> str: 39 + return self.timestamp 40 + 41 + def get_attachments(self) -> list[MediaInfo]: 42 + return self.media_attachments 43 + 44 + def get_spoiler(self) -> str | None: 45 + return self.spoiler 46 + 47 + def get_languages(self) -> list[str]: 48 + return [] 49 + 50 + def is_sensitive(self) -> bool: 51 + return self.sensitive or (self.spoiler is not None and self.spoiler != "") 52 + 53 + def get_post_url(self) -> str | None: 54 + return self.url
+202
misskey/input.py
··· 1 + import asyncio 2 + import json 3 + import re 4 + import uuid 5 + from typing import Any, Callable 6 + 7 + import requests 8 + import websockets 9 + 10 + import cross 11 + import util.database as database 12 + import util.md_util as md_util 13 + from misskey.common import MisskeyPost 14 + from util.media import MediaInfo, download_media 15 + from util.util import LOGGER, as_envvar 16 + 17 + ALLOWED_VISIBILITY = ["public", "home"] 18 + 19 + 20 + class MisskeyInputOptions: 21 + def __init__(self, o: dict) -> None: 22 + self.allowed_visibility = ALLOWED_VISIBILITY 23 + self.filters = [re.compile(f) for f in o.get("regex_filters", [])] 24 + 25 + allowed_visibility = o.get("allowed_visibility") 26 + if allowed_visibility is not None: 27 + if any([v not in ALLOWED_VISIBILITY for v in allowed_visibility]): 28 + raise ValueError( 29 + f"'allowed_visibility' only accepts {', '.join(ALLOWED_VISIBILITY)}, got: {allowed_visibility}" 30 + ) 31 + self.allowed_visibility = allowed_visibility 32 + 33 + 34 + class MisskeyInput(cross.Input): 35 + def __init__(self, settings: dict, db: cross.DataBaseWorker) -> None: 36 + self.options = MisskeyInputOptions(settings.get("options", {})) 37 + self.token = as_envvar(settings.get("token")) or (_ for _ in ()).throw( 38 + ValueError("'token' is required") 39 + ) 40 + instance: str = as_envvar(settings.get("instance")) or (_ for _ in ()).throw( 41 + ValueError("'instance' is required") 42 + ) 43 + 44 + service = instance[:-1] if instance.endswith("/") else instance 45 + 46 + LOGGER.info("Verifying %s credentails...", service) 47 + responce = requests.post( 48 + f"{instance}/api/i", 49 + json={"i": self.token}, 50 + headers={"Content-Type": "application/json"}, 51 + ) 52 + if responce.status_code != 200: 53 + LOGGER.error("Failed to validate user credentials!") 54 + responce.raise_for_status() 55 + return 56 + 57 + super().__init__(service, responce.json()["id"], settings, db) 58 + 59 + def _on_note(self, outputs: list[cross.Output], note: dict): 60 + if note["userId"] != self.user_id: 61 + return 62 + 63 + if note.get("visibility") not in self.options.allowed_visibility: 64 + LOGGER.info( 65 + "Skipping '%s'! '%s' visibility..", note["id"], note.get("visibility") 66 + ) 67 + return 68 + 69 + # TODO polls not supported on bsky. maybe 3rd party? skip for now 70 + # we don't handle reblogs. possible with bridgy(?) and self 71 + if note.get("poll"): 72 + LOGGER.info("Skipping '%s'! Contains a poll..", note["id"]) 73 + return 74 + 75 + renote: dict | None = note.get("renote") 76 + if renote: 77 + if note.get("text") is not None: 78 + LOGGER.info("Skipping '%s'! Quote..", note["id"]) 79 + return 80 + 81 + if renote.get("userId") != self.user_id: 82 + LOGGER.info("Skipping '%s'! Reblog of other user..", note["id"]) 83 + return 84 + 85 + success = database.try_insert_repost( 86 + self.db, note["id"], renote["id"], self.user_id, self.service 87 + ) 88 + if not success: 89 + LOGGER.info( 90 + "Skipping '%s' as renoted note was not found in db!", note["id"] 91 + ) 92 + return 93 + 94 + for output in outputs: 95 + output.accept_repost(note["id"], renote["id"]) 96 + return 97 + 98 + reply_id: str | None = note.get("replyId") 99 + if reply_id: 100 + if note.get("reply", {}).get("userId") != self.user_id: 101 + LOGGER.info("Skipping '%s'! Reply to other user..", note["id"]) 102 + return 103 + 104 + success = database.try_insert_post( 105 + self.db, note["id"], reply_id, self.user_id, self.service 106 + ) 107 + if not success: 108 + LOGGER.info("Skipping '%s' as parent note was not found in db!", note["id"]) 109 + return 110 + 111 + mention_handles: dict = note.get("mentionHandles") or {} 112 + tags: list[str] = note.get("tags") or [] 113 + 114 + handles: list[tuple[str, str]] = [] 115 + for key, value in mention_handles.items(): 116 + handles.append((value, value)) 117 + 118 + tokens = md_util.tokenize_markdown(note.get("text", ""), tags, handles) 119 + if not cross.test_filters(tokens, self.options.filters): 120 + LOGGER.info("Skipping '%s'. Matched a filter!", note["id"]) 121 + return 122 + 123 + LOGGER.info("Crossposting '%s'...", note["id"]) 124 + 125 + media_attachments: list[MediaInfo] = [] 126 + for attachment in note.get("files", []): 127 + LOGGER.info("Downloading %s...", attachment["url"]) 128 + info = download_media(attachment["url"], attachment.get("comment") or "") 129 + if not info: 130 + LOGGER.error("Skipping '%s'. Failed to download media!", note["id"]) 131 + return 132 + media_attachments.append(info) 133 + 134 + cross_post = MisskeyPost(self.service, note, tokens, media_attachments) 135 + for output in outputs: 136 + output.accept_post(cross_post) 137 + 138 + def _on_delete(self, outputs: list[cross.Output], note: dict): 139 + # TODO handle deletes 140 + pass 141 + 142 + def _on_message(self, outputs: list[cross.Output], data: dict): 143 + if data["type"] == "channel": 144 + type: str = data["body"]["type"] 145 + if type == "note" or type == "reply": 146 + note_body = data["body"]["body"] 147 + self._on_note(outputs, note_body) 148 + return 149 + 150 + pass 151 + 152 + async def _send_keepalive(self, ws: websockets.WebSocketClientProtocol): 153 + while ws.open: 154 + try: 155 + await asyncio.sleep(120) 156 + if ws.open: 157 + await ws.send("h") 158 + LOGGER.debug("Sent keepalive h..") 159 + else: 160 + LOGGER.info("WebSocket is closed, stopping keepalive task.") 161 + break 162 + except Exception as e: 163 + LOGGER.error(f"Error sending keepalive: {e}") 164 + break 165 + 166 + async def _subscribe_to_home(self, ws: websockets.WebSocketClientProtocol): 167 + await ws.send( 168 + json.dumps( 169 + { 170 + "type": "connect", 171 + "body": {"channel": "homeTimeline", "id": str(uuid.uuid4())}, 172 + } 173 + ) 174 + ) 175 + LOGGER.info("Subscribed to 'homeTimeline' channel...") 176 + 177 + async def listen( 178 + self, outputs: list[cross.Output], submit: Callable[[Callable[[], Any]], Any] 179 + ): 180 + streaming: str = f"wss://{self.service.split('://', 1)[1]}" 181 + url: str = f"{streaming}/streaming?i={self.token}" 182 + 183 + async for ws in websockets.connect( 184 + url, extra_headers={"User-Agent": "XPost/0.0.3"} 185 + ): 186 + try: 187 + LOGGER.info("Listening to %s...", streaming) 188 + await self._subscribe_to_home(ws) 189 + 190 + async def listen_for_messages(): 191 + async for msg in ws: 192 + # TODO listen to deletes somehow 193 + submit(lambda: self._on_message(outputs, json.loads(msg))) 194 + 195 + keepalive = asyncio.create_task(self._send_keepalive(ws)) 196 + listen = asyncio.create_task(listen_for_messages()) 197 + 198 + await asyncio.gather(keepalive, listen) 199 + except websockets.ConnectionClosedError as e: 200 + LOGGER.error(e, stack_info=True, exc_info=True) 201 + LOGGER.info("Reconnecting to %s...", streaming) 202 + continue
+38
misskey/mfm_util.py
··· 1 + import re 2 + 3 + import cross 4 + 5 + MFM_PATTERN = re.compile(r"\$\[([^\[\]]+)\]") 6 + 7 + 8 + def strip_mfm(tokens: list[cross.Token]) -> tuple[list[cross.Token], bool]: 9 + modified = False 10 + 11 + for tk in tokens: 12 + if isinstance(tk, cross.TextToken): 13 + original = tk.text 14 + cleaned = __strip_mfm(original) 15 + if cleaned != original: 16 + modified = True 17 + tk.text = cleaned 18 + 19 + elif isinstance(tk, cross.LinkToken): 20 + original = tk.label 21 + cleaned = __strip_mfm(original) 22 + if cleaned != original: 23 + modified = True 24 + tk.label = cleaned 25 + 26 + return tokens, modified 27 + 28 + 29 + def __strip_mfm(text: str) -> str: 30 + def match_contents(match: re.Match[str]): 31 + content = match.group(1).strip() 32 + parts = content.split(" ", 1) 33 + return parts[1] if len(parts) > 1 else "" 34 + 35 + while MFM_PATTERN.search(text): 36 + text = MFM_PATTERN.sub(match_contents, text) 37 + 38 + return text
+13
pyproject.toml
··· 1 + [project] 2 + name = "xpost" 3 + version = "0.0.3" 4 + description = "mastodon -> bluesky crossposting tool" 5 + readme = "README.md" 6 + requires-python = ">=3.12" 7 + dependencies = [ 8 + "atproto>=0.0.61", 9 + "click>=8.2.1", 10 + "python-magic>=0.4.27", 11 + "requests>=2.32.3", 12 + "websockets>=13.1", 13 + ]
+290
util/database.py
··· 1 + import json 2 + import queue 3 + import sqlite3 4 + import threading 5 + from concurrent.futures import Future 6 + 7 + 8 + class DataBaseWorker: 9 + def __init__(self, database: str) -> None: 10 + super(DataBaseWorker, self).__init__() 11 + self.database = database 12 + self.queue = queue.Queue() 13 + self.thread = threading.Thread(target=self._run, daemon=True) 14 + self.shutdown_event = threading.Event() 15 + self.conn = sqlite3.connect(self.database, check_same_thread=False) 16 + self.lock = threading.Lock() 17 + self.thread.start() 18 + 19 + def _run(self): 20 + while not self.shutdown_event.is_set(): 21 + try: 22 + task, future = self.queue.get(timeout=1) 23 + try: 24 + with self.lock: 25 + result = task(self.conn) 26 + future.set_result(result) 27 + except Exception as e: 28 + future.set_exception(e) 29 + finally: 30 + self.queue.task_done() 31 + except queue.Empty: 32 + continue 33 + 34 + def execute(self, sql: str, params=()): 35 + def task(conn: sqlite3.Connection): 36 + cursor = conn.execute(sql, params) 37 + conn.commit() 38 + return cursor.fetchall() 39 + 40 + future = Future() 41 + self.queue.put((task, future)) 42 + return future.result() 43 + 44 + def close(self): 45 + self.shutdown_event.set() 46 + self.thread.join() 47 + with self.lock: 48 + self.conn.close() 49 + 50 + 51 + def try_insert_repost( 52 + db: DataBaseWorker, 53 + post_id: str, 54 + reposted_id: str, 55 + input_user: str, 56 + input_service: str, 57 + ) -> bool: 58 + reposted = find_post(db, reposted_id, input_user, input_service) 59 + if not reposted: 60 + return False 61 + 62 + insert_repost(db, post_id, reposted["id"], input_user, input_service) 63 + return True 64 + 65 + 66 + def try_insert_post( 67 + db: DataBaseWorker, 68 + post_id: str, 69 + in_reply: str | None, 70 + input_user: str, 71 + input_service: str, 72 + ) -> bool: 73 + root_id = None 74 + parent_id = None 75 + 76 + if in_reply: 77 + parent_post = find_post(db, in_reply, input_user, input_service) 78 + if not parent_post: 79 + return False 80 + 81 + root_id = parent_post["id"] 82 + parent_id = root_id 83 + if parent_post["root_id"]: 84 + root_id = parent_post["root_id"] 85 + 86 + if root_id and parent_id: 87 + insert_reply(db, post_id, input_user, input_service, parent_id, root_id) 88 + else: 89 + insert_post(db, post_id, input_user, input_service) 90 + 91 + return True 92 + 93 + 94 + def insert_repost( 95 + db: DataBaseWorker, identifier: str, reposted_id: int, user_id: str, serivce: str 96 + ) -> int: 97 + db.execute( 98 + """ 99 + INSERT INTO posts (user_id, service, identifier, reposted_id) 100 + VALUES (?, ?, ?, ?); 101 + """, 102 + (user_id, serivce, identifier, reposted_id), 103 + ) 104 + return db.execute("SELECT last_insert_rowid();", ())[0][0] 105 + 106 + 107 + def insert_post(db: DataBaseWorker, identifier: str, user_id: str, serivce: str) -> int: 108 + db.execute( 109 + """ 110 + INSERT INTO posts (user_id, service, identifier) 111 + VALUES (?, ?, ?); 112 + """, 113 + (user_id, serivce, identifier), 114 + ) 115 + return db.execute("SELECT last_insert_rowid();", ())[0][0] 116 + 117 + 118 + def insert_reply( 119 + db: DataBaseWorker, 120 + identifier: str, 121 + user_id: str, 122 + serivce: str, 123 + parent: int, 124 + root: int, 125 + ) -> int: 126 + db.execute( 127 + """ 128 + INSERT INTO posts (user_id, service, identifier, parent_id, root_id) 129 + VALUES (?, ?, ?, ?, ?); 130 + """, 131 + (user_id, serivce, identifier, parent, root), 132 + ) 133 + return db.execute("SELECT last_insert_rowid();", ())[0][0] 134 + 135 + 136 + def insert_mapping(db: DataBaseWorker, original: int, mapped: int): 137 + db.execute( 138 + """ 139 + INSERT INTO mappings (original_post_id, mapped_post_id) 140 + VALUES (?, ?); 141 + """, 142 + (original, mapped), 143 + ) 144 + 145 + 146 + def delete_post(db: DataBaseWorker, identifier: str, user_id: str, serivce: str): 147 + db.execute( 148 + """ 149 + DELETE FROM posts 150 + WHERE identifier = ? 151 + AND service = ? 152 + AND user_id = ? 153 + """, 154 + (identifier, serivce, user_id), 155 + ) 156 + 157 + 158 + def fetch_data(db: DataBaseWorker, identifier: str, user_id: str, service: str) -> dict: 159 + result = db.execute( 160 + """ 161 + SELECT extra_data 162 + FROM posts 163 + WHERE identifier = ? 164 + AND user_id = ? 165 + AND service = ? 166 + """, 167 + (identifier, user_id, service), 168 + ) 169 + if not result or not result[0]: 170 + return {} 171 + return json.loads(result[0][0]) 172 + 173 + 174 + def store_data( 175 + db: DataBaseWorker, identifier: str, user_id: str, service: str, extra_data: dict 176 + ) -> None: 177 + db.execute( 178 + """ 179 + UPDATE posts 180 + SET extra_data = ? 181 + WHERE identifier = ? 182 + AND user_id = ? 183 + AND service = ? 184 + """, 185 + (json.dumps(extra_data), identifier, user_id, service), 186 + ) 187 + 188 + 189 + def find_mappings( 190 + db: DataBaseWorker, original_post: int, service: str, user_id: str 191 + ) -> list[str]: 192 + return db.execute( 193 + """ 194 + SELECT p.identifier 195 + FROM posts AS p 196 + JOIN mappings AS m 197 + ON p.id = m.mapped_post_id 198 + WHERE m.original_post_id = ? 199 + AND p.service = ? 200 + AND p.user_id = ? 201 + ORDER BY p.id; 202 + """, 203 + (original_post, service, user_id), 204 + ) 205 + 206 + 207 + def find_post_by_id(db: DataBaseWorker, id: int) -> dict | None: 208 + result = db.execute( 209 + """ 210 + SELECT user_id, service, identifier, parent_id, root_id, reposted_id 211 + FROM posts 212 + WHERE id = ? 213 + """, 214 + (id,), 215 + ) 216 + if not result: 217 + return None 218 + user_id, service, identifier, parent_id, root_id, reposted_id = result[0] 219 + return { 220 + "user_id": user_id, 221 + "service": service, 222 + "identifier": identifier, 223 + "parent_id": parent_id, 224 + "root_id": root_id, 225 + "reposted_id": reposted_id, 226 + } 227 + 228 + 229 + def find_post( 230 + db: DataBaseWorker, identifier: str, user_id: str, service: str 231 + ) -> dict | None: 232 + result = db.execute( 233 + """ 234 + SELECT id, parent_id, root_id, reposted_id 235 + FROM posts 236 + WHERE identifier = ? 237 + AND user_id = ? 238 + AND service = ? 239 + """, 240 + (identifier, user_id, service), 241 + ) 242 + if not result: 243 + return None 244 + id, parent_id, root_id, reposted_id = result[0] 245 + return { 246 + "id": id, 247 + "parent_id": parent_id, 248 + "root_id": root_id, 249 + "reposted_id": reposted_id, 250 + } 251 + 252 + 253 + def find_mapped_thread( 254 + db: DataBaseWorker, 255 + parent_id: str, 256 + input_user: str, 257 + input_service: str, 258 + output_user: str, 259 + output_service: str, 260 + ): 261 + reply_data: dict | None = find_post(db, parent_id, input_user, input_service) 262 + if not reply_data: 263 + return None 264 + 265 + reply_mappings: list[str] | None = find_mappings( 266 + db, reply_data["id"], output_service, output_user 267 + ) 268 + if not reply_mappings: 269 + return None 270 + 271 + reply_identifier: str = reply_mappings[-1] 272 + root_identifier: str = reply_mappings[0] 273 + if reply_data["root_id"]: 274 + root_data = find_post_by_id(db, reply_data["root_id"]) 275 + if not root_data: 276 + return None 277 + 278 + root_mappings = find_mappings( 279 + db, reply_data["root_id"], output_service, output_user 280 + ) 281 + if not root_mappings: 282 + return None 283 + root_identifier = root_mappings[0] 284 + 285 + return ( 286 + root_identifier[0], # real ids 287 + reply_identifier[0], 288 + reply_data["root_id"], # db ids 289 + reply_data["id"], 290 + )
+172
util/html_util.py
··· 1 + from html.parser import HTMLParser 2 + 3 + import cross 4 + 5 + 6 + class HTMLPostTokenizer(HTMLParser): 7 + def __init__(self) -> None: 8 + super().__init__() 9 + self.tokens: list[cross.Token] = [] 10 + 11 + self.mentions: list[tuple[str, str]] 12 + self.tags: list[str] 13 + 14 + self.in_pre = False 15 + self.in_code = False 16 + 17 + self.current_tag_stack = [] 18 + self.list_stack = [] 19 + 20 + self.anchor_stack = [] 21 + self.anchor_data = [] 22 + 23 + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: 24 + attrs_dict = dict(attrs) 25 + 26 + def append_newline(): 27 + if self.tokens: 28 + last_token = self.tokens[-1] 29 + if isinstance( 30 + last_token, cross.TextToken 31 + ) and not last_token.text.endswith("\n"): 32 + self.tokens.append(cross.TextToken("\n")) 33 + 34 + match tag: 35 + case "br": 36 + self.tokens.append(cross.TextToken(" \n")) 37 + case "a": 38 + href = attrs_dict.get("href", "") 39 + self.anchor_stack.append(href) 40 + case "strong", "b": 41 + self.tokens.append(cross.TextToken("**")) 42 + case "em", "i": 43 + self.tokens.append(cross.TextToken("*")) 44 + case "del", "s": 45 + self.tokens.append(cross.TextToken("~~")) 46 + case "code": 47 + if not self.in_pre: 48 + self.tokens.append(cross.TextToken("`")) 49 + self.in_code = True 50 + case "pre": 51 + append_newline() 52 + self.tokens.append(cross.TextToken("```\n")) 53 + self.in_pre = True 54 + case "blockquote": 55 + append_newline() 56 + self.tokens.append(cross.TextToken("> ")) 57 + case "ul", "ol": 58 + self.list_stack.append(tag) 59 + append_newline() 60 + case "li": 61 + indent = " " * (len(self.list_stack) - 1) 62 + if self.list_stack and self.list_stack[-1] == "ul": 63 + self.tokens.append(cross.TextToken(f"{indent}- ")) 64 + elif self.list_stack and self.list_stack[-1] == "ol": 65 + self.tokens.append(cross.TextToken(f"{indent}1. ")) 66 + case _: 67 + if tag in {"h1", "h2", "h3", "h4", "h5", "h6"}: 68 + level = int(tag[1]) 69 + self.tokens.append(cross.TextToken("\n" + "#" * level + " ")) 70 + 71 + self.current_tag_stack.append(tag) 72 + 73 + def handle_data(self, data: str) -> None: 74 + if self.anchor_stack: 75 + self.anchor_data.append(data) 76 + else: 77 + self.tokens.append(cross.TextToken(data)) 78 + 79 + def handle_endtag(self, tag: str) -> None: 80 + if not self.current_tag_stack: 81 + return 82 + 83 + if tag in self.current_tag_stack: 84 + self.current_tag_stack.remove(tag) 85 + 86 + match tag: 87 + case "p": 88 + self.tokens.append(cross.TextToken("\n\n")) 89 + case "a": 90 + href = self.anchor_stack.pop() 91 + anchor_data = "".join(self.anchor_data) 92 + self.anchor_data = [] 93 + 94 + if anchor_data.startswith("#"): 95 + as_tag = anchor_data[1:].lower() 96 + if any(as_tag == block for block in self.tags): 97 + self.tokens.append(cross.TagToken(anchor_data[1:])) 98 + elif anchor_data.startswith("@"): 99 + match = next( 100 + (pair for pair in self.mentions if anchor_data in pair), None 101 + ) 102 + 103 + if match: 104 + self.tokens.append(cross.MentionToken(match[1], "")) 105 + else: 106 + self.tokens.append(cross.LinkToken(href, anchor_data)) 107 + case "strong", "b": 108 + self.tokens.append(cross.TextToken("**")) 109 + case "em", "i": 110 + self.tokens.append(cross.TextToken("*")) 111 + case "del", "s": 112 + self.tokens.append(cross.TextToken("~~")) 113 + case "code": 114 + if not self.in_pre and self.in_code: 115 + self.tokens.append(cross.TextToken("`")) 116 + self.in_code = False 117 + case "pre": 118 + self.tokens.append(cross.TextToken("\n```\n")) 119 + self.in_pre = False 120 + case "blockquote": 121 + self.tokens.append(cross.TextToken("\n")) 122 + case "ul", "ol": 123 + if self.list_stack: 124 + self.list_stack.pop() 125 + self.tokens.append(cross.TextToken("\n")) 126 + case "li": 127 + self.tokens.append(cross.TextToken("\n")) 128 + case _: 129 + if tag in ["h1", "h2", "h3", "h4", "h5", "h6"]: 130 + self.tokens.append(cross.TextToken("\n")) 131 + 132 + def get_tokens(self) -> list[cross.Token]: 133 + if not self.tokens: 134 + return [] 135 + 136 + combined: list[cross.Token] = [] 137 + buffer: list[str] = [] 138 + 139 + def flush_buffer(): 140 + if buffer: 141 + merged = "".join(buffer) 142 + combined.append(cross.TextToken(text=merged)) 143 + buffer.clear() 144 + 145 + for token in self.tokens: 146 + if isinstance(token, cross.TextToken): 147 + buffer.append(token.text) 148 + else: 149 + flush_buffer() 150 + combined.append(token) 151 + 152 + flush_buffer() 153 + 154 + if combined and isinstance(combined[-1], cross.TextToken): 155 + if combined[-1].text.endswith("\n\n"): 156 + combined[-1] = cross.TextToken(combined[-1].text[:-2]) 157 + return combined 158 + 159 + def reset(self): 160 + """Reset the parser state for reuse.""" 161 + super().reset() 162 + self.tokens = [] 163 + 164 + self.mentions = [] 165 + self.tags = [] 166 + 167 + self.in_pre = False 168 + self.in_code = False 169 + 170 + self.current_tag_stack = [] 171 + self.anchor_stack = [] 172 + self.list_stack = []
+123
util/md_util.py
··· 1 + import re 2 + 3 + import cross 4 + import util.html_util as html_util 5 + import util.util as util 6 + 7 + URL = re.compile(r"(?:(?:[A-Za-z][A-Za-z0-9+.-]*://)|mailto:)[^\s]+", re.IGNORECASE) 8 + MD_INLINE_LINK = re.compile( 9 + r"\[([^\]]+)\]\(\s*((?:(?:[A-Za-z][A-Za-z0-9+.\-]*://)|mailto:)[^\s\)]+)\s*\)", 10 + re.IGNORECASE, 11 + ) 12 + MD_AUTOLINK = re.compile( 13 + r"<((?:(?:[A-Za-z][A-Za-z0-9+.\-]*://)|mailto:)[^\s>]+)>", re.IGNORECASE 14 + ) 15 + HASHTAG = re.compile(r"(?<!\w)\#([\w]+)") 16 + FEDIVERSE_HANDLE = re.compile(r"(?<![\w@])@([\w\.-]+)(?:@([\w\.-]+\.[\w\.-]+))?") 17 + 18 + 19 + def tokenize_markdown( 20 + text: str, tags: list[str], handles: list[tuple[str, str]] 21 + ) -> list[cross.Token]: 22 + if not text: 23 + return [] 24 + 25 + tokenizer = html_util.HTMLPostTokenizer() 26 + tokenizer.mentions = handles 27 + tokenizer.tags = tags 28 + tokenizer.feed(text) 29 + html_tokens = tokenizer.get_tokens() 30 + 31 + tokens: list[cross.Token] = [] 32 + 33 + for tk in html_tokens: 34 + if isinstance(tk, cross.TextToken): 35 + tokens.extend(__tokenize_md(tk.text, tags, handles)) 36 + elif isinstance(tk, cross.LinkToken): 37 + if not tk.label or util.canonical_label(tk.label, tk.href): 38 + tokens.append(tk) 39 + continue 40 + 41 + tokens.extend(__tokenize_md(f"[{tk.label}]({tk.href})", tags, handles)) 42 + else: 43 + tokens.append(tk) 44 + 45 + return tokens 46 + 47 + 48 + def __tokenize_md( 49 + text: str, tags: list[str], handles: list[tuple[str, str]] 50 + ) -> list[cross.Token]: 51 + index: int = 0 52 + total: int = len(text) 53 + buffer: list[str] = [] 54 + 55 + tokens: list[cross.Token] = [] 56 + 57 + def flush(): 58 + nonlocal buffer 59 + if buffer: 60 + tokens.append(cross.TextToken("".join(buffer))) 61 + buffer = [] 62 + 63 + while index < total: 64 + if text[index] == "[": 65 + md_inline = MD_INLINE_LINK.match(text, index) 66 + if md_inline: 67 + flush() 68 + label = md_inline.group(1) 69 + href = md_inline.group(2) 70 + tokens.append(cross.LinkToken(href, label)) 71 + index = md_inline.end() 72 + continue 73 + 74 + if text[index] == "<": 75 + md_auto = MD_AUTOLINK.match(text, index) 76 + if md_auto: 77 + flush() 78 + href = md_auto.group(1) 79 + tokens.append(cross.LinkToken(href, href)) 80 + index = md_auto.end() 81 + continue 82 + 83 + if text[index] == "#": 84 + tag = HASHTAG.match(text, index) 85 + if tag: 86 + tag_text = tag.group(1) 87 + if tag_text.lower() in tags: 88 + flush() 89 + tokens.append(cross.TagToken(tag_text)) 90 + index = tag.end() 91 + continue 92 + 93 + if text[index] == "@": 94 + handle = FEDIVERSE_HANDLE.match(text, index) 95 + if handle: 96 + handle_text = handle.group(0) 97 + stripped_handle = handle_text.strip() 98 + 99 + match = next( 100 + (pair for pair in handles if stripped_handle in pair), None 101 + ) 102 + 103 + if match: 104 + flush() 105 + tokens.append( 106 + cross.MentionToken(match[1], "") 107 + ) # TODO: misskey doesn’t provide a uri 108 + index = handle.end() 109 + continue 110 + 111 + url = URL.match(text, index) 112 + if url: 113 + flush() 114 + href = url.group(0) 115 + tokens.append(cross.LinkToken(href, href)) 116 + index = url.end() 117 + continue 118 + 119 + buffer.append(text[index]) 120 + index += 1 121 + 122 + flush() 123 + return tokens
+160
util/media.py
··· 1 + import json 2 + import os 3 + import re 4 + import subprocess 5 + import urllib.parse 6 + 7 + import magic 8 + import requests 9 + 10 + from util.util import LOGGER 11 + 12 + FILENAME = re.compile(r'filename="?([^\";]*)"?') 13 + MAGIC = magic.Magic(mime=True) 14 + 15 + 16 + class MediaInfo: 17 + def __init__(self, url: str, name: str, mime: str, alt: str, io: bytes) -> None: 18 + self.url = url 19 + self.name = name 20 + self.mime = mime 21 + self.alt = alt 22 + self.io = io 23 + 24 + 25 + def download_media(url: str, alt: str) -> MediaInfo | None: 26 + name = get_filename_from_url(url) 27 + io = download_blob(url, max_bytes=100_000_000) 28 + if not io: 29 + LOGGER.error("Failed to download media attachment! %s", url) 30 + return None 31 + mime = MAGIC.from_buffer(io) 32 + if not mime: 33 + mime = "application/octet-stream" 34 + return MediaInfo(url, name, mime, alt, io) 35 + 36 + 37 + def get_filename_from_url(url): 38 + try: 39 + response = requests.head(url, allow_redirects=True) 40 + disposition = response.headers.get("Content-Disposition") 41 + if disposition: 42 + filename = FILENAME.findall(disposition) 43 + if filename: 44 + return filename[0] 45 + except requests.RequestException: 46 + pass 47 + 48 + parsed_url = urllib.parse.urlparse(url) 49 + base_name = os.path.basename(parsed_url.path) 50 + 51 + # hardcoded fix to return the cid for pds 52 + if base_name == "com.atproto.sync.getBlob": 53 + qs = urllib.parse.parse_qs(parsed_url.query) 54 + if qs and qs.get("cid"): 55 + return qs["cid"][0] 56 + 57 + return base_name 58 + 59 + 60 + def probe_bytes(bytes: bytes) -> dict: 61 + cmd = [ 62 + "ffprobe", 63 + "-v", "error", 64 + "-show_format", 65 + "-show_streams", 66 + "-print_format", "json", 67 + "pipe:0", 68 + ] 69 + proc = subprocess.run( 70 + cmd, input=bytes, stdout=subprocess.PIPE, stderr=subprocess.PIPE 71 + ) 72 + 73 + if proc.returncode != 0: 74 + raise RuntimeError(f"ffprobe failed: {proc.stderr.decode()}") 75 + 76 + return json.loads(proc.stdout) 77 + 78 + 79 + def convert_to_mp4(video_bytes: bytes) -> bytes: 80 + cmd = [ 81 + "ffmpeg", 82 + "-i", "pipe:0", 83 + "-c:v", "libx264", 84 + "-crf", "30", 85 + "-preset", "slow", 86 + "-c:a", "aac", 87 + "-b:a", "128k", 88 + "-movflags", "frag_keyframe+empty_moov+default_base_moof", 89 + "-f", "mp4", 90 + "pipe:1", 91 + ] 92 + 93 + proc = subprocess.Popen( 94 + cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE 95 + ) 96 + out_bytes, err = proc.communicate(input=video_bytes) 97 + 98 + if proc.returncode != 0: 99 + raise RuntimeError(f"ffmpeg compress failed: {err.decode()}") 100 + 101 + return out_bytes 102 + 103 + 104 + def compress_image(image_bytes: bytes, quality: int = 90): 105 + cmd = [ 106 + "ffmpeg", 107 + "-f", "image2pipe", 108 + "-i", "pipe:0", 109 + "-c:v", "webp", 110 + "-q:v", str(quality), 111 + "-f", "image2pipe", 112 + "pipe:1", 113 + ] 114 + 115 + proc = subprocess.Popen( 116 + cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE 117 + ) 118 + out_bytes, err = proc.communicate(input=image_bytes) 119 + 120 + if proc.returncode != 0: 121 + raise RuntimeError(f"ffmpeg compress failed: {err.decode()}") 122 + 123 + return out_bytes 124 + 125 + 126 + def download_blob(url: str, max_bytes: int = 5_000_000) -> bytes | None: 127 + response = requests.get(url, stream=True, timeout=20) 128 + if response.status_code != 200: 129 + LOGGER.info("Failed to download %s! %s", url, response.text) 130 + return None 131 + 132 + downloaded_bytes = b"" 133 + current_size = 0 134 + 135 + for chunk in response.iter_content(chunk_size=8192): 136 + if not chunk: 137 + continue 138 + 139 + current_size += len(chunk) 140 + if current_size > max_bytes: 141 + response.close() 142 + return None 143 + 144 + downloaded_bytes += chunk 145 + 146 + return downloaded_bytes 147 + 148 + 149 + def get_media_meta(bytes: bytes): 150 + probe = probe_bytes(bytes) 151 + streams = [s for s in probe["streams"] if s["codec_type"] == "video"] 152 + if not streams: 153 + raise ValueError("No video stream found") 154 + 155 + media = streams[0] 156 + return { 157 + "width": int(media["width"]), 158 + "height": int(media["height"]), 159 + "duration": float(media.get("duration", probe["format"].get("duration", -1))), 160 + }
+43
util/util.py
··· 1 + import json 2 + import logging 3 + import os 4 + import sys 5 + 6 + logging.basicConfig(stream=sys.stdout, level=logging.INFO) 7 + LOGGER = logging.getLogger("XPost") 8 + 9 + 10 + def as_json(obj, indent=None, sort_keys=False) -> str: 11 + return json.dumps( 12 + obj.__dict__ if not isinstance(obj, dict) else obj, 13 + default=lambda o: o.__json__() if hasattr(o, "__json__") else o.__dict__, 14 + indent=indent, 15 + sort_keys=sort_keys, 16 + ) 17 + 18 + 19 + def canonical_label(label: str | None, href: str): 20 + if not label or label == href: 21 + return True 22 + 23 + split = href.split("://", 1) 24 + if len(split) > 1: 25 + if split[1] == label: 26 + return True 27 + 28 + return False 29 + 30 + 31 + def safe_get(obj: dict, key: str, default): 32 + val = obj.get(key, default) 33 + return val if val else default 34 + 35 + 36 + def as_envvar(text: str | None) -> str | None: 37 + if not text: 38 + return None 39 + 40 + if text.startswith("env:"): 41 + return os.environ.get(text[4:], "") 42 + 43 + return text
+448
uv.lock
··· 1 + version = 1 2 + revision = 2 3 + requires-python = ">=3.12" 4 + 5 + [[package]] 6 + name = "annotated-types" 7 + version = "0.7.0" 8 + source = { registry = "https://pypi.org/simple" } 9 + sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } 10 + wheels = [ 11 + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, 12 + ] 13 + 14 + [[package]] 15 + name = "anyio" 16 + version = "4.9.0" 17 + source = { registry = "https://pypi.org/simple" } 18 + dependencies = [ 19 + { name = "idna" }, 20 + { name = "sniffio" }, 21 + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, 22 + ] 23 + sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } 24 + wheels = [ 25 + { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, 26 + ] 27 + 28 + [[package]] 29 + name = "atproto" 30 + version = "0.0.61" 31 + source = { registry = "https://pypi.org/simple" } 32 + dependencies = [ 33 + { name = "click" }, 34 + { name = "cryptography" }, 35 + { name = "dnspython" }, 36 + { name = "httpx" }, 37 + { name = "libipld" }, 38 + { name = "pydantic" }, 39 + { name = "typing-extensions" }, 40 + { name = "websockets" }, 41 + ] 42 + sdist = { url = "https://files.pythonhosted.org/packages/b1/59/6f5074b3a45e0e3c1853544240e9039e86219feb30ff1bb5e8582c791547/atproto-0.0.61.tar.gz", hash = "sha256:98e022daf538d14f134ce7c91d42c4c973f3493ac56e43a84daa4c881f102beb", size = 189208, upload-time = "2025-04-19T00:20:11.918Z" } 43 + wheels = [ 44 + { url = "https://files.pythonhosted.org/packages/bd/b6/da9963bf54d4c0a8a590b6297d8858c395243dbb04cb581fdadb5fe7eac7/atproto-0.0.61-py3-none-any.whl", hash = "sha256:658da5832aaeea4a12a9a74235f9c90c11453e77d596fdccb1f8b39d56245b88", size = 380426, upload-time = "2025-04-19T00:20:10.026Z" }, 45 + ] 46 + 47 + [[package]] 48 + name = "certifi" 49 + version = "2025.4.26" 50 + source = { registry = "https://pypi.org/simple" } 51 + sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" } 52 + wheels = [ 53 + { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" }, 54 + ] 55 + 56 + [[package]] 57 + name = "cffi" 58 + version = "1.17.1" 59 + source = { registry = "https://pypi.org/simple" } 60 + dependencies = [ 61 + { name = "pycparser" }, 62 + ] 63 + sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } 64 + wheels = [ 65 + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, 66 + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, 67 + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, 68 + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, 69 + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, 70 + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, 71 + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, 72 + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, 73 + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, 74 + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, 75 + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, 76 + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, 77 + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, 78 + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, 79 + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, 80 + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, 81 + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, 82 + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, 83 + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, 84 + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, 85 + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" }, 86 + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, 87 + ] 88 + 89 + [[package]] 90 + name = "charset-normalizer" 91 + version = "3.4.2" 92 + source = { registry = "https://pypi.org/simple" } 93 + sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } 94 + wheels = [ 95 + { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, 96 + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, 97 + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, 98 + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, 99 + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, 100 + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, 101 + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, 102 + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, 103 + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, 104 + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, 105 + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, 106 + { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, 107 + { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, 108 + { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, 109 + { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, 110 + { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, 111 + { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, 112 + { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, 113 + { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, 114 + { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, 115 + { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, 116 + { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, 117 + { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, 118 + { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, 119 + { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, 120 + { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, 121 + { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, 122 + ] 123 + 124 + [[package]] 125 + name = "click" 126 + version = "8.2.1" 127 + source = { registry = "https://pypi.org/simple" } 128 + dependencies = [ 129 + { name = "colorama", marker = "sys_platform == 'win32'" }, 130 + ] 131 + sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } 132 + wheels = [ 133 + { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, 134 + ] 135 + 136 + [[package]] 137 + name = "colorama" 138 + version = "0.4.6" 139 + source = { registry = "https://pypi.org/simple" } 140 + sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } 141 + wheels = [ 142 + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, 143 + ] 144 + 145 + [[package]] 146 + name = "cryptography" 147 + version = "45.0.3" 148 + source = { registry = "https://pypi.org/simple" } 149 + dependencies = [ 150 + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, 151 + ] 152 + sdist = { url = "https://files.pythonhosted.org/packages/13/1f/9fa001e74a1993a9cadd2333bb889e50c66327b8594ac538ab8a04f915b7/cryptography-45.0.3.tar.gz", hash = "sha256:ec21313dd335c51d7877baf2972569f40a4291b76a0ce51391523ae358d05899", size = 744738, upload-time = "2025-05-25T14:17:24.777Z" } 153 + wheels = [ 154 + { url = "https://files.pythonhosted.org/packages/82/b2/2345dc595998caa6f68adf84e8f8b50d18e9fc4638d32b22ea8daedd4b7a/cryptography-45.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:7573d9eebaeceeb55285205dbbb8753ac1e962af3d9640791d12b36864065e71", size = 7056239, upload-time = "2025-05-25T14:16:12.22Z" }, 155 + { url = "https://files.pythonhosted.org/packages/71/3d/ac361649a0bfffc105e2298b720d8b862330a767dab27c06adc2ddbef96a/cryptography-45.0.3-cp311-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d377dde61c5d67eb4311eace661c3efda46c62113ff56bf05e2d679e02aebb5b", size = 4205541, upload-time = "2025-05-25T14:16:14.333Z" }, 156 + { url = "https://files.pythonhosted.org/packages/70/3e/c02a043750494d5c445f769e9c9f67e550d65060e0bfce52d91c1362693d/cryptography-45.0.3-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fae1e637f527750811588e4582988932c222f8251f7b7ea93739acb624e1487f", size = 4433275, upload-time = "2025-05-25T14:16:16.421Z" }, 157 + { url = "https://files.pythonhosted.org/packages/40/7a/9af0bfd48784e80eef3eb6fd6fde96fe706b4fc156751ce1b2b965dada70/cryptography-45.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ca932e11218bcc9ef812aa497cdf669484870ecbcf2d99b765d6c27a86000942", size = 4209173, upload-time = "2025-05-25T14:16:18.163Z" }, 158 + { url = "https://files.pythonhosted.org/packages/31/5f/d6f8753c8708912df52e67969e80ef70b8e8897306cd9eb8b98201f8c184/cryptography-45.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af3f92b1dc25621f5fad065288a44ac790c5798e986a34d393ab27d2b27fcff9", size = 3898150, upload-time = "2025-05-25T14:16:20.34Z" }, 159 + { url = "https://files.pythonhosted.org/packages/8b/50/f256ab79c671fb066e47336706dc398c3b1e125f952e07d54ce82cf4011a/cryptography-45.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f8f8f0b73b885ddd7f3d8c2b2234a7d3ba49002b0223f58cfde1bedd9563c56", size = 4466473, upload-time = "2025-05-25T14:16:22.605Z" }, 160 + { url = "https://files.pythonhosted.org/packages/62/e7/312428336bb2df0848d0768ab5a062e11a32d18139447a76dfc19ada8eed/cryptography-45.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9cc80ce69032ffa528b5e16d217fa4d8d4bb7d6ba8659c1b4d74a1b0f4235fca", size = 4211890, upload-time = "2025-05-25T14:16:24.738Z" }, 161 + { url = "https://files.pythonhosted.org/packages/e7/53/8a130e22c1e432b3c14896ec5eb7ac01fb53c6737e1d705df7e0efb647c6/cryptography-45.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c824c9281cb628015bfc3c59335163d4ca0540d49de4582d6c2637312907e4b1", size = 4466300, upload-time = "2025-05-25T14:16:26.768Z" }, 162 + { url = "https://files.pythonhosted.org/packages/ba/75/6bb6579688ef805fd16a053005fce93944cdade465fc92ef32bbc5c40681/cryptography-45.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5833bb4355cb377ebd880457663a972cd044e7f49585aee39245c0d592904578", size = 4332483, upload-time = "2025-05-25T14:16:28.316Z" }, 163 + { url = "https://files.pythonhosted.org/packages/2f/11/2538f4e1ce05c6c4f81f43c1ef2bd6de7ae5e24ee284460ff6c77e42ca77/cryptography-45.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9bb5bf55dcb69f7067d80354d0a348368da907345a2c448b0babc4215ccd3497", size = 4573714, upload-time = "2025-05-25T14:16:30.474Z" }, 164 + { url = "https://files.pythonhosted.org/packages/f5/bb/e86e9cf07f73a98d84a4084e8fd420b0e82330a901d9cac8149f994c3417/cryptography-45.0.3-cp311-abi3-win32.whl", hash = "sha256:3ad69eeb92a9de9421e1f6685e85a10fbcfb75c833b42cc9bc2ba9fb00da4710", size = 2934752, upload-time = "2025-05-25T14:16:32.204Z" }, 165 + { url = "https://files.pythonhosted.org/packages/c7/75/063bc9ddc3d1c73e959054f1fc091b79572e716ef74d6caaa56e945b4af9/cryptography-45.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:97787952246a77d77934d41b62fb1b6f3581d83f71b44796a4158d93b8f5c490", size = 3412465, upload-time = "2025-05-25T14:16:33.888Z" }, 166 + { url = "https://files.pythonhosted.org/packages/71/9b/04ead6015229a9396890d7654ee35ef630860fb42dc9ff9ec27f72157952/cryptography-45.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:c92519d242703b675ccefd0f0562eb45e74d438e001f8ab52d628e885751fb06", size = 7031892, upload-time = "2025-05-25T14:16:36.214Z" }, 167 + { url = "https://files.pythonhosted.org/packages/46/c7/c7d05d0e133a09fc677b8a87953815c522697bdf025e5cac13ba419e7240/cryptography-45.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5edcb90da1843df85292ef3a313513766a78fbbb83f584a5a58fb001a5a9d57", size = 4196181, upload-time = "2025-05-25T14:16:37.934Z" }, 168 + { url = "https://files.pythonhosted.org/packages/08/7a/6ad3aa796b18a683657cef930a986fac0045417e2dc428fd336cfc45ba52/cryptography-45.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38deed72285c7ed699864f964a3f4cf11ab3fb38e8d39cfcd96710cd2b5bb716", size = 4423370, upload-time = "2025-05-25T14:16:39.502Z" }, 169 + { url = "https://files.pythonhosted.org/packages/4f/58/ec1461bfcb393525f597ac6a10a63938d18775b7803324072974b41a926b/cryptography-45.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5555365a50efe1f486eed6ac7062c33b97ccef409f5970a0b6f205a7cfab59c8", size = 4197839, upload-time = "2025-05-25T14:16:41.322Z" }, 170 + { url = "https://files.pythonhosted.org/packages/d4/3d/5185b117c32ad4f40846f579369a80e710d6146c2baa8ce09d01612750db/cryptography-45.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9e4253ed8f5948a3589b3caee7ad9a5bf218ffd16869c516535325fece163dcc", size = 3886324, upload-time = "2025-05-25T14:16:43.041Z" }, 171 + { url = "https://files.pythonhosted.org/packages/67/85/caba91a57d291a2ad46e74016d1f83ac294f08128b26e2a81e9b4f2d2555/cryptography-45.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cfd84777b4b6684955ce86156cfb5e08d75e80dc2585e10d69e47f014f0a5342", size = 4450447, upload-time = "2025-05-25T14:16:44.759Z" }, 172 + { url = "https://files.pythonhosted.org/packages/ae/d1/164e3c9d559133a38279215c712b8ba38e77735d3412f37711b9f8f6f7e0/cryptography-45.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:a2b56de3417fd5f48773ad8e91abaa700b678dc7fe1e0c757e1ae340779acf7b", size = 4200576, upload-time = "2025-05-25T14:16:46.438Z" }, 173 + { url = "https://files.pythonhosted.org/packages/71/7a/e002d5ce624ed46dfc32abe1deff32190f3ac47ede911789ee936f5a4255/cryptography-45.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:57a6500d459e8035e813bd8b51b671977fb149a8c95ed814989da682314d0782", size = 4450308, upload-time = "2025-05-25T14:16:48.228Z" }, 174 + { url = "https://files.pythonhosted.org/packages/87/ad/3fbff9c28cf09b0a71e98af57d74f3662dea4a174b12acc493de00ea3f28/cryptography-45.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f22af3c78abfbc7cbcdf2c55d23c3e022e1a462ee2481011d518c7fb9c9f3d65", size = 4325125, upload-time = "2025-05-25T14:16:49.844Z" }, 175 + { url = "https://files.pythonhosted.org/packages/f5/b4/51417d0cc01802304c1984d76e9592f15e4801abd44ef7ba657060520bf0/cryptography-45.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:232954730c362638544758a8160c4ee1b832dc011d2c41a306ad8f7cccc5bb0b", size = 4560038, upload-time = "2025-05-25T14:16:51.398Z" }, 176 + { url = "https://files.pythonhosted.org/packages/80/38/d572f6482d45789a7202fb87d052deb7a7b136bf17473ebff33536727a2c/cryptography-45.0.3-cp37-abi3-win32.whl", hash = "sha256:cb6ab89421bc90e0422aca911c69044c2912fc3debb19bb3c1bfe28ee3dff6ab", size = 2924070, upload-time = "2025-05-25T14:16:53.472Z" }, 177 + { url = "https://files.pythonhosted.org/packages/91/5a/61f39c0ff4443651cc64e626fa97ad3099249152039952be8f344d6b0c86/cryptography-45.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:d54ae41e6bd70ea23707843021c778f151ca258081586f0cfa31d936ae43d1b2", size = 3395005, upload-time = "2025-05-25T14:16:55.134Z" }, 178 + ] 179 + 180 + [[package]] 181 + name = "dnspython" 182 + version = "2.7.0" 183 + source = { registry = "https://pypi.org/simple" } 184 + sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } 185 + wheels = [ 186 + { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, 187 + ] 188 + 189 + [[package]] 190 + name = "h11" 191 + version = "0.16.0" 192 + source = { registry = "https://pypi.org/simple" } 193 + sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } 194 + wheels = [ 195 + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, 196 + ] 197 + 198 + [[package]] 199 + name = "httpcore" 200 + version = "1.0.9" 201 + source = { registry = "https://pypi.org/simple" } 202 + dependencies = [ 203 + { name = "certifi" }, 204 + { name = "h11" }, 205 + ] 206 + sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } 207 + wheels = [ 208 + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, 209 + ] 210 + 211 + [[package]] 212 + name = "httpx" 213 + version = "0.28.1" 214 + source = { registry = "https://pypi.org/simple" } 215 + dependencies = [ 216 + { name = "anyio" }, 217 + { name = "certifi" }, 218 + { name = "httpcore" }, 219 + { name = "idna" }, 220 + ] 221 + sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } 222 + wheels = [ 223 + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, 224 + ] 225 + 226 + [[package]] 227 + name = "idna" 228 + version = "3.10" 229 + source = { registry = "https://pypi.org/simple" } 230 + sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } 231 + wheels = [ 232 + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, 233 + ] 234 + 235 + [[package]] 236 + name = "libipld" 237 + version = "3.0.1" 238 + source = { registry = "https://pypi.org/simple" } 239 + sdist = { url = "https://files.pythonhosted.org/packages/d4/ad/b440c64e2d1ee84f2933979175399ff09bd0ba7b1b07c6bc20ba585825cd/libipld-3.0.1.tar.gz", hash = "sha256:2970752de70e5fdcac4646900cdefaa0dca08db9b5d59c40b5496d99e3bffa64", size = 4359070, upload-time = "2025-02-18T11:19:59.924Z" } 240 + wheels = [ 241 + { url = "https://files.pythonhosted.org/packages/b8/6b/87c3b3222a1ebc9b8654a2ec168d177e85c993a679b698f53f199b367e37/libipld-3.0.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27313adb70ca9ecfaaa34f1ca6e45ee0569935b7ba9802f78c2f37f7a633a7dd", size = 307914, upload-time = "2025-02-18T11:18:13.449Z" }, 242 + { url = "https://files.pythonhosted.org/packages/62/fc/9cd90e1bf5e50fa31ced3a9e4eced8b386a509f693d915ff483c320f8556/libipld-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bf5a14647350aa6779d634b7dc0f6967296fe52e9ca1d6132e24aa388c77c68e", size = 295778, upload-time = "2025-02-18T11:18:15.223Z" }, 243 + { url = "https://files.pythonhosted.org/packages/9b/17/c4ee7f38d43d513935179706011aa8fa5ef70d223626477de05ae301f4ae/libipld-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d9e619573d500eb4a4ab4a8ef90882305fba43a5a405eb80fcc0afe5d6e9dcd", size = 675489, upload-time = "2025-02-18T11:18:16.808Z" }, 244 + { url = "https://files.pythonhosted.org/packages/8f/93/f7ba7d2ce896a774634f3a279a0d7900ea2b76e0d93c335727b01c564fd6/libipld-3.0.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a2fbfaed3fc98c95cd412e61e960cd41633fc880de24327613b0cb0b974d277b", size = 681145, upload-time = "2025-02-18T11:18:18.835Z" }, 245 + { url = "https://files.pythonhosted.org/packages/92/16/c247088ec2194bfc5b5ed71059c468d1f16987696905fe9b5aaaac336521/libipld-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b36044476920455a26d30df75728eab069201c42c0af3e3610a30fd62b96ab55", size = 685159, upload-time = "2025-02-18T11:18:20.172Z" }, 246 + { url = "https://files.pythonhosted.org/packages/e1/f3/3d0442d0bd92f2bbc5bc7259569c2886bd1398a6f090ea30cd19e8c45f00/libipld-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4586a3442f12065a64a36ae56d80c71d05a87413fbf17bae330c42793c8ecfac", size = 820381, upload-time = "2025-02-18T11:18:22.398Z" }, 247 + { url = "https://files.pythonhosted.org/packages/c7/a7/63998349b924f0d2225ed194497d24bf088fad34fc02085fd97c4777164c/libipld-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d243ca7dea89e1579fd95f95ff612a7b56a980743c25e2a0b1a39cae7b67e55e", size = 681046, upload-time = "2025-02-18T11:18:23.954Z" }, 248 + { url = "https://files.pythonhosted.org/packages/0b/5a/bdbadafe5cb3c5ae1b4e7fd1517a436d7bda8b63621f3d39af92622d905e/libipld-3.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1525c07363abb20e8cd416df7ca316ddfc4f592ed2da694b02e0e4a4af1b9418", size = 689931, upload-time = "2025-02-18T11:18:26.868Z" }, 249 + { url = "https://files.pythonhosted.org/packages/b1/3c/759fcc3f12e41485ef374fab202b7ba84e9f001ca821d3811ff8cd030fdf/libipld-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:372768df5540867962c3c16fe80976f8b162a9771e8fe1b2175f18dabf23b9ce", size = 849420, upload-time = "2025-02-18T11:18:28.847Z" }, 250 + { url = "https://files.pythonhosted.org/packages/c4/ac/d697be6d9f20c5176d11193edbac70d55bdeaa70cd110a156ac87aaecaae/libipld-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:47bf15f9fc5890ff4807c0c5cb0ff99d625bcea3cd222aaa500d57466da529bd", size = 841270, upload-time = "2025-02-18T11:18:30.588Z" }, 251 + { url = "https://files.pythonhosted.org/packages/6e/91/5c64cd11e2daee21c968baa6a0669a0f402ead5fc99ad78b92e06a42e4e5/libipld-3.0.1-cp312-cp312-win32.whl", hash = "sha256:989d37ae0cb31380e6b76391e0272342de830adad2821c2de7b925b360fc45f3", size = 182583, upload-time = "2025-02-18T11:18:31.775Z" }, 252 + { url = "https://files.pythonhosted.org/packages/84/b7/37f88ada4e6fb762a71e93366c320f58995022cf8f67c4ad91d4b9a4568d/libipld-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:4557f20d4b8e61ac6c89ab4cea04f3a518a266f3c3d7348cf4cc8ac9b02c89dc", size = 197643, upload-time = "2025-02-18T11:18:32.86Z" }, 253 + { url = "https://files.pythonhosted.org/packages/3a/23/184f246a3ef1f6fe9775ad27851091a3779c14657e5591f6bdbe910bfe88/libipld-3.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:92ec97dac2e978f09343ebb64b0bb9bed9c294e8a224490552cfc200e9101f5c", size = 176991, upload-time = "2025-02-18T11:18:34.147Z" }, 254 + { url = "https://files.pythonhosted.org/packages/9d/a2/28c89265a107f9e92e32e308084edd7669e3fe40acb5e21b9e5af231f627/libipld-3.0.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2cc452e533b7af10a66134aa33a064b40e05fe51fa4a509a969342768543953f", size = 305678, upload-time = "2025-02-18T11:18:36.125Z" }, 255 + { url = "https://files.pythonhosted.org/packages/05/41/ccb2251240547e0903a55f84bcab0de3b766297f5112c9a3519ce0c66dee/libipld-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6cd8e21c0c7ee87831dc262794637cf6c47b55c55689bc917d2c3d2518221048", size = 295909, upload-time = "2025-02-18T11:18:37.246Z" }, 256 + { url = "https://files.pythonhosted.org/packages/9b/01/93f4e7f751eaafb6e7ba2a5c2dc859eda743837f3edbd06b712a5e92e63e/libipld-3.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9de6425fc8ba0e9072c77826e66ece2dcb1d161f933cc35f2ad94470d5a304fb", size = 675461, upload-time = "2025-02-18T11:18:38.328Z" }, 257 + { url = "https://files.pythonhosted.org/packages/5e/a7/d1ff7b19e48f814f4fc908bd0a9160d80539a0128fe9b51285af09f65625/libipld-3.0.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23c84465181ed30760ba9483e3ae71027573903cfbadf173be9fdd44bd83d8bd", size = 681427, upload-time = "2025-02-18T11:18:39.638Z" }, 258 + { url = "https://files.pythonhosted.org/packages/e2/42/7c3b45b9186f7f67015b0d717feeaa920ea215c51df675e27419f598ffb2/libipld-3.0.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45052b7f9b6a61a425318ff611b115571965d00e42c2ca66dfd0c56a4f3002b4", size = 684988, upload-time = "2025-02-18T11:18:42.021Z" }, 259 + { url = "https://files.pythonhosted.org/packages/33/02/dd30f423e8e74ba830dff5bbbd2d7f68c474e5df1d3b56fce5e59bc08a1e/libipld-3.0.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d183c2543db326d9a4e21819ba5674ae4f1e69dcfd853c654fba471cfbbaa88", size = 820272, upload-time = "2025-02-18T11:18:46.181Z" }, 260 + { url = "https://files.pythonhosted.org/packages/80/cd/bdd10568306ed1d71d24440e08b526ae69b93405d75a5289e0d54cf7b961/libipld-3.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ceb82681b6985e34609636186ac00b51105816d310ed510de1169cd65f903622", size = 680986, upload-time = "2025-02-18T11:18:48.285Z" }, 261 + { url = "https://files.pythonhosted.org/packages/0a/20/d03eddce8c41f1f928efb37268424e336d97d2aca829bd267b1f12851759/libipld-3.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3c71ffe0b9c182664bac3a2386e6c6580744f5aa46513d0d6823e671ab71d82", size = 689783, upload-time = "2025-02-18T11:18:49.501Z" }, 262 + { url = "https://files.pythonhosted.org/packages/27/17/fdfcb6d0b0d7120eb3ad9361173cc6d5c24814b6ea2e7b135b3bb8d6920e/libipld-3.0.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:6ed68ff00bb8d63e18bf823eb89ec86e9f30b997c6d152a35ec6c4c8502ea080", size = 849382, upload-time = "2025-02-18T11:18:51.183Z" }, 263 + { url = "https://files.pythonhosted.org/packages/6c/99/237d618fa6707300a60b8b4b859855e4e34dadb00233dc1e92d911166ae2/libipld-3.0.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8d517c69b8f29acca27b0ced0ecb78f6e54f70952a35bc8f3060b628069c63ec", size = 841299, upload-time = "2025-02-18T11:18:53.398Z" }, 264 + { url = "https://files.pythonhosted.org/packages/93/49/32c73fd530fab341bebc4e400657f5c2189a8d4d627bcdeb774eb37dd90f/libipld-3.0.1-cp313-cp313-win32.whl", hash = "sha256:21989622e02a3bd8be16e97c412af4f48b5ddf3b32f9b0da9d7c6b0724d01e91", size = 182567, upload-time = "2025-02-18T11:18:54.635Z" }, 265 + { url = "https://files.pythonhosted.org/packages/7f/1e/ea73ea525d716ce836367daa212d4d0b1c25a89ffa281c9fee535cb99840/libipld-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:da81784d00597a0c9ac0a133ac820aaea60599b077778046dde4726e1a08685c", size = 196204, upload-time = "2025-02-18T11:18:55.706Z" }, 266 + { url = "https://files.pythonhosted.org/packages/e2/ba/56e9082bdd997c41b3e58d3afb9d40cf08725cbd486f7e334538a41bc2a8/libipld-3.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:d670dea8a76188e2977b5c3d780a6393bb270b0d04976436ce3afbc2cf4da516", size = 177044, upload-time = "2025-02-18T11:18:56.786Z" }, 267 + ] 268 + 269 + [[package]] 270 + name = "pycparser" 271 + version = "2.22" 272 + source = { registry = "https://pypi.org/simple" } 273 + sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } 274 + wheels = [ 275 + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, 276 + ] 277 + 278 + [[package]] 279 + name = "pydantic" 280 + version = "2.11.5" 281 + source = { registry = "https://pypi.org/simple" } 282 + dependencies = [ 283 + { name = "annotated-types" }, 284 + { name = "pydantic-core" }, 285 + { name = "typing-extensions" }, 286 + { name = "typing-inspection" }, 287 + ] 288 + sdist = { url = "https://files.pythonhosted.org/packages/f0/86/8ce9040065e8f924d642c58e4a344e33163a07f6b57f836d0d734e0ad3fb/pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a", size = 787102, upload-time = "2025-05-22T21:18:08.761Z" } 289 + wheels = [ 290 + { url = "https://files.pythonhosted.org/packages/b5/69/831ed22b38ff9b4b64b66569f0e5b7b97cf3638346eb95a2147fdb49ad5f/pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7", size = 444229, upload-time = "2025-05-22T21:18:06.329Z" }, 291 + ] 292 + 293 + [[package]] 294 + name = "pydantic-core" 295 + version = "2.33.2" 296 + source = { registry = "https://pypi.org/simple" } 297 + dependencies = [ 298 + { name = "typing-extensions" }, 299 + ] 300 + sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } 301 + wheels = [ 302 + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, 303 + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, 304 + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, 305 + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, 306 + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, 307 + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, 308 + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, 309 + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, 310 + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, 311 + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, 312 + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, 313 + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, 314 + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, 315 + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, 316 + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, 317 + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, 318 + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, 319 + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, 320 + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, 321 + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, 322 + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, 323 + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, 324 + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, 325 + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, 326 + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, 327 + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, 328 + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, 329 + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, 330 + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, 331 + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, 332 + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, 333 + ] 334 + 335 + [[package]] 336 + name = "python-magic" 337 + version = "0.4.27" 338 + source = { registry = "https://pypi.org/simple" } 339 + sdist = { url = "https://files.pythonhosted.org/packages/da/db/0b3e28ac047452d079d375ec6798bf76a036a08182dbb39ed38116a49130/python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b", size = 14677, upload-time = "2022-06-07T20:16:59.508Z" } 340 + wheels = [ 341 + { url = "https://files.pythonhosted.org/packages/6c/73/9f872cb81fc5c3bb48f7227872c28975f998f3e7c2b1c16e95e6432bbb90/python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3", size = 13840, upload-time = "2022-06-07T20:16:57.763Z" }, 342 + ] 343 + 344 + [[package]] 345 + name = "requests" 346 + version = "2.32.3" 347 + source = { registry = "https://pypi.org/simple" } 348 + dependencies = [ 349 + { name = "certifi" }, 350 + { name = "charset-normalizer" }, 351 + { name = "idna" }, 352 + { name = "urllib3" }, 353 + ] 354 + sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" } 355 + wheels = [ 356 + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" }, 357 + ] 358 + 359 + [[package]] 360 + name = "sniffio" 361 + version = "1.3.1" 362 + source = { registry = "https://pypi.org/simple" } 363 + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } 364 + wheels = [ 365 + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, 366 + ] 367 + 368 + [[package]] 369 + name = "typing-extensions" 370 + version = "4.14.0" 371 + source = { registry = "https://pypi.org/simple" } 372 + sdist = { url = "https://files.pythonhosted.org/packages/d1/bc/51647cd02527e87d05cb083ccc402f93e441606ff1f01739a62c8ad09ba5/typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", size = 107423, upload-time = "2025-06-02T14:52:11.399Z" } 373 + wheels = [ 374 + { url = "https://files.pythonhosted.org/packages/69/e0/552843e0d356fbb5256d21449fa957fa4eff3bbc135a74a691ee70c7c5da/typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af", size = 43839, upload-time = "2025-06-02T14:52:10.026Z" }, 375 + ] 376 + 377 + [[package]] 378 + name = "typing-inspection" 379 + version = "0.4.1" 380 + source = { registry = "https://pypi.org/simple" } 381 + dependencies = [ 382 + { name = "typing-extensions" }, 383 + ] 384 + sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } 385 + wheels = [ 386 + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, 387 + ] 388 + 389 + [[package]] 390 + name = "urllib3" 391 + version = "2.4.0" 392 + source = { registry = "https://pypi.org/simple" } 393 + sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" } 394 + wheels = [ 395 + { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" }, 396 + ] 397 + 398 + [[package]] 399 + name = "websockets" 400 + version = "13.1" 401 + source = { registry = "https://pypi.org/simple" } 402 + sdist = { url = "https://files.pythonhosted.org/packages/e2/73/9223dbc7be3dcaf2a7bbf756c351ec8da04b1fa573edaf545b95f6b0c7fd/websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878", size = 158549, upload-time = "2024-09-21T17:34:21.54Z" } 403 + wheels = [ 404 + { url = "https://files.pythonhosted.org/packages/df/46/c426282f543b3c0296cf964aa5a7bb17e984f58dde23460c3d39b3148fcf/websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc", size = 157821, upload-time = "2024-09-21T17:32:56.442Z" }, 405 + { url = "https://files.pythonhosted.org/packages/aa/85/22529867010baac258da7c45848f9415e6cf37fef00a43856627806ffd04/websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49", size = 155480, upload-time = "2024-09-21T17:32:57.698Z" }, 406 + { url = "https://files.pythonhosted.org/packages/29/2c/bdb339bfbde0119a6e84af43ebf6275278698a2241c2719afc0d8b0bdbf2/websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd", size = 155715, upload-time = "2024-09-21T17:32:59.429Z" }, 407 + { url = "https://files.pythonhosted.org/packages/9f/d0/8612029ea04c5c22bf7af2fd3d63876c4eaeef9b97e86c11972a43aa0e6c/websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0", size = 165647, upload-time = "2024-09-21T17:33:00.495Z" }, 408 + { url = "https://files.pythonhosted.org/packages/56/04/1681ed516fa19ca9083f26d3f3a302257e0911ba75009533ed60fbb7b8d1/websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6", size = 164592, upload-time = "2024-09-21T17:33:02.223Z" }, 409 + { url = "https://files.pythonhosted.org/packages/38/6f/a96417a49c0ed132bb6087e8e39a37db851c70974f5c724a4b2a70066996/websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9", size = 165012, upload-time = "2024-09-21T17:33:03.288Z" }, 410 + { url = "https://files.pythonhosted.org/packages/40/8b/fccf294919a1b37d190e86042e1a907b8f66cff2b61e9befdbce03783e25/websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68", size = 165311, upload-time = "2024-09-21T17:33:04.728Z" }, 411 + { url = "https://files.pythonhosted.org/packages/c1/61/f8615cf7ce5fe538476ab6b4defff52beb7262ff8a73d5ef386322d9761d/websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14", size = 164692, upload-time = "2024-09-21T17:33:05.829Z" }, 412 + { url = "https://files.pythonhosted.org/packages/5c/f1/a29dd6046d3a722d26f182b783a7997d25298873a14028c4760347974ea3/websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf", size = 164686, upload-time = "2024-09-21T17:33:06.823Z" }, 413 + { url = "https://files.pythonhosted.org/packages/0f/99/ab1cdb282f7e595391226f03f9b498f52109d25a2ba03832e21614967dfa/websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c", size = 158712, upload-time = "2024-09-21T17:33:07.877Z" }, 414 + { url = "https://files.pythonhosted.org/packages/46/93/e19160db48b5581feac8468330aa11b7292880a94a37d7030478596cc14e/websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3", size = 159145, upload-time = "2024-09-21T17:33:09.202Z" }, 415 + { url = "https://files.pythonhosted.org/packages/51/20/2b99ca918e1cbd33c53db2cace5f0c0cd8296fc77558e1908799c712e1cd/websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6", size = 157828, upload-time = "2024-09-21T17:33:10.987Z" }, 416 + { url = "https://files.pythonhosted.org/packages/b8/47/0932a71d3d9c0e9483174f60713c84cee58d62839a143f21a2bcdbd2d205/websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708", size = 155487, upload-time = "2024-09-21T17:33:12.153Z" }, 417 + { url = "https://files.pythonhosted.org/packages/a9/60/f1711eb59ac7a6c5e98e5637fef5302f45b6f76a2c9d64fd83bbb341377a/websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418", size = 155721, upload-time = "2024-09-21T17:33:13.909Z" }, 418 + { url = "https://files.pythonhosted.org/packages/6a/e6/ba9a8db7f9d9b0e5f829cf626ff32677f39824968317223605a6b419d445/websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a", size = 165609, upload-time = "2024-09-21T17:33:14.967Z" }, 419 + { url = "https://files.pythonhosted.org/packages/c1/22/4ec80f1b9c27a0aebd84ccd857252eda8418ab9681eb571b37ca4c5e1305/websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f", size = 164556, upload-time = "2024-09-21T17:33:17.113Z" }, 420 + { url = "https://files.pythonhosted.org/packages/27/ac/35f423cb6bb15600438db80755609d27eda36d4c0b3c9d745ea12766c45e/websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5", size = 164993, upload-time = "2024-09-21T17:33:18.168Z" }, 421 + { url = "https://files.pythonhosted.org/packages/31/4e/98db4fd267f8be9e52e86b6ee4e9aa7c42b83452ea0ea0672f176224b977/websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135", size = 165360, upload-time = "2024-09-21T17:33:19.233Z" }, 422 + { url = "https://files.pythonhosted.org/packages/3f/15/3f0de7cda70ffc94b7e7024544072bc5b26e2c1eb36545291abb755d8cdb/websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2", size = 164745, upload-time = "2024-09-21T17:33:20.361Z" }, 423 + { url = "https://files.pythonhosted.org/packages/a1/6e/66b6b756aebbd680b934c8bdbb6dcb9ce45aad72cde5f8a7208dbb00dd36/websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6", size = 164732, upload-time = "2024-09-21T17:33:23.103Z" }, 424 + { url = "https://files.pythonhosted.org/packages/35/c6/12e3aab52c11aeb289e3dbbc05929e7a9d90d7a9173958477d3ef4f8ce2d/websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d", size = 158709, upload-time = "2024-09-21T17:33:24.196Z" }, 425 + { url = "https://files.pythonhosted.org/packages/41/d8/63d6194aae711d7263df4498200c690a9c39fb437ede10f3e157a6343e0d/websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2", size = 159144, upload-time = "2024-09-21T17:33:25.96Z" }, 426 + { url = "https://files.pythonhosted.org/packages/56/27/96a5cd2626d11c8280656c6c71d8ab50fe006490ef9971ccd154e0c42cd2/websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f", size = 152134, upload-time = "2024-09-21T17:34:19.904Z" }, 427 + ] 428 + 429 + [[package]] 430 + name = "xpost" 431 + version = "0.0.3" 432 + source = { virtual = "." } 433 + dependencies = [ 434 + { name = "atproto" }, 435 + { name = "click" }, 436 + { name = "python-magic" }, 437 + { name = "requests" }, 438 + { name = "websockets" }, 439 + ] 440 + 441 + [package.metadata] 442 + requires-dist = [ 443 + { name = "atproto", specifier = ">=0.0.61" }, 444 + { name = "click", specifier = ">=8.2.1" }, 445 + { name = "python-magic", specifier = ">=0.4.27" }, 446 + { name = "requests", specifier = ">=2.32.3" }, 447 + { name = "websockets", specifier = ">=13.1" }, 448 + ]