tangled
alpha
login
or
join now
zenfyr.dev
/
xpost
2
fork
atom
social media crossposting tool. 3rd time's the charm
mastodon
misskey
crossposting
bluesky
2
fork
atom
overview
issues
1
pulls
pipelines
more envvars
zenfyr.dev
5 months ago
1bf4ab28
793b1fd8
verified
This commit was signed with the committer's
known signature
.
zenfyr.dev
SSH Key Fingerprint:
SHA256:TtcIcnTnoAB5mqHofsaOxIgiMzfVBxej1AXT7DQdrTE=
+19
-20
3 changed files
expand all
collapse all
unified
split
atproto
identity.py
env.py
main.py
+2
-2
atproto/identity.py
reviewed
···
151
151
handle_resolver = HandleResolver()
152
152
did_resolver = DidResolver(env.PLC_HOST)
153
153
154
154
-
did_cache = Path(env.CACHE_DIR).joinpath('did.cache')
155
155
-
handle_cache = Path(env.CACHE_DIR).joinpath('handle.cache')
154
154
+
did_cache = env.CACHE_DIR.joinpath('did.cache')
155
155
+
handle_cache = env.CACHE_DIR.joinpath('handle.cache')
156
156
157
157
did_resolver.load_cache(did_cache)
158
158
handle_resolver.load_cache(handle_cache)
+9
-3
env.py
reviewed
···
1
1
import os
2
2
+
from pathlib import Path
2
3
3
4
DEV = bool(os.environ.get("DEV")) or False
4
4
-
DATA_DIR = os.environ.get("DATA_DIR") or "./data"
5
5
-
CACHE_DIR = os.environ.get("CACHE_DIR") or "./data/cache"
6
6
-
MIGRATIONS_DIR = os.environ.get("MIGRATIONS_DIR") or "./migrations"
5
5
+
6
6
+
DATA_DIR = Path(os.environ.get("DATA_DIR") or "./data")
7
7
+
CACHE_DIR = Path(os.environ.get("CACHE_DIR") or DATA_DIR.joinpath("cache"))
8
8
+
SETTINGS_DIR = Path(os.environ.get("SETTINGS_DIR") or DATA_DIR.joinpath("settings.json"))
9
9
+
DATABASE_DIR = Path(os.environ.get("DATABASE_DIR") or DATA_DIR.joinpath("data.db"))
10
10
+
11
11
+
MIGRATIONS_DIR = Path(os.environ.get("MIGRATIONS_DIR") or "./migrations")
12
12
+
7
13
PLC_HOST = os.environ.get("PLC_HOST") or "https://plc.directory"
+8
-15
main.py
reviewed
···
14
14
15
15
16
16
def main() -> None:
17
17
-
data = Path(env.DATA_DIR)
18
18
-
19
19
-
if not data.exists():
20
20
-
data.mkdir(parents=True)
21
21
-
22
22
-
settings_path = data.joinpath("settings.json")
23
23
-
database_path = data.joinpath("db.sqlite")
17
17
+
if not env.DATA_DIR.exists():
18
18
+
env.DATA_DIR.mkdir(parents=True)
24
19
25
25
-
if not settings_path.exists():
26
26
-
LOGGER.info("First launch detected! Creating %s and exiting!", settings_path)
20
20
+
if not env.SETTINGS_DIR.exists():
21
21
+
LOGGER.info("First launch detected! Creating %s and exiting!", env.SETTINGS_DIR)
27
22
return
28
23
29
29
-
migrator = DatabaseMigrator(database_path, Path(env.MIGRATIONS_DIR))
24
24
+
migrator = DatabaseMigrator(env.DATABASE_DIR, env.MIGRATIONS_DIR)
30
25
try:
31
26
migrator.migrate()
32
27
except Exception:
···
35
30
finally:
36
31
migrator.close()
37
32
38
38
-
db_pool = DatabasePool(database_path)
33
33
+
db_pool = DatabasePool(env.DATABASE_DIR)
39
34
40
35
LOGGER.info("Bootstrapping registries...")
41
36
bootstrap()
42
37
43
38
LOGGER.info("Loading settings...")
44
39
45
45
-
with open(settings_path) as f:
40
40
+
with open(env.SETTINGS_DIR) as f:
46
41
settings = json.load(f)
47
42
read_env(settings)
48
43
···
52
47
raise KeyError("No `outputs` spicified in settings!")
53
48
54
49
input = create_input_service(db_pool, settings["input"])
55
55
-
outputs = [
56
56
-
create_output_service(db_pool, data) for data in settings["outputs"]
57
57
-
]
50
50
+
outputs = [create_output_service(db_pool, data) for data in settings["outputs"]]
58
51
59
52
LOGGER.info("Starting task worker...")
60
53