···11+import os
22+import sys
33+import json
44+from PIL import Image
55+from PIL.ExifTags import TAGS
66+from datetime import datetime
77+from geopy.distance import geodesic
88+99+def decimal_coords(coords, ref):
1010+ """Convert GPS coordinates from DMS to decimal degrees."""
1111+ decimal_degrees = float(coords[0]) + float(coords[1])/60 + float(coords[2])/3600
1212+ if ref in ('S', 'W'):
1313+ decimal_degrees = -decimal_degrees
1414+ return decimal_degrees
1515+1616+def main(directory, threshold):
1717+ # Find GPSInfo tag in EXIF
1818+ GPSINFO_TAG = next(tag for tag, name in TAGS.items() if name == "GPSInfo")
1919+2020+ # Collect image data
2121+ files = os.listdir(directory)
2222+ images = []
2323+ prog=1
2424+ for filename in files:
2525+ if prog % 200 == 0 or len(files) == prog:
2626+ print(f"Progress: {prog} / {len(files)}")
2727+ prog+=1
2828+ filepath = os.path.join(directory, filename)
2929+ if filename.lower().endswith(('.jpg', '.jpeg')):
3030+ try:
3131+ with Image.open(filepath) as img:
3232+ exif = img.getexif()
3333+ if exif:
3434+ gps_info = exif.get(GPSINFO_TAG, None)
3535+ if gps_info:
3636+ # Extract latitude and longitude
3737+ lat_coords = gps_info[2]
3838+ lat_ref = gps_info[1]
3939+ lon_coords = gps_info[4]
4040+ lon_ref = gps_info[3]
4141+ lat = decimal_coords(lat_coords, lat_ref)
4242+ lon = decimal_coords(lon_coords, lon_ref)
4343+ modtime = datetime.fromtimestamp(os.path.getmtime(filepath))
4444+ images.append({
4545+ 'modtime': modtime.timestamp(),
4646+ 'lat': lat,
4747+ 'lon': lon,
4848+ 'filename': filename
4949+ })
5050+# (modtime, lat, lon, filename))
5151+ except Exception as e:
5252+ print(f"Error processing {filename}: {e}")
5353+5454+ # Sort images by modification time
5555+ images.sort(key=lambda x: x['modtime'])
5656+5757+ # Group into clusters based on distance threshold
5858+ clusters = []
5959+ current_cluster = []
6060+ for i in range(len(images)):
6161+ if i == 0:
6262+ current_cluster.append(images[i])
6363+ else:
6464+ prev = images[i - 1]
6565+ curr = images[i]
6666+ distance = geodesic((prev['lat'], prev['lon']), (curr['lat'], curr['lon'])).kilometers
6767+ if distance < threshold:
6868+ current_cluster.append(curr)
6969+ else:
7070+ clusters.append(current_cluster)
7171+ current_cluster = [curr]
7272+ if current_cluster:
7373+ clusters.append(current_cluster)
7474+7575+ with open(f"{directory}.clusters.json", 'w') as f:
7676+ json.dump([clusters], f, indent=2)
7777+7878+ # Output results
7979+ print(f"\n{'='*60}")
8080+ print(f"{'GPS Clustering Report':^60}")
8181+ print(f"{'='*60}")
8282+8383+ for idx, cluster in enumerate(clusters):
8484+ print(f"\nCluster {idx} (Representative: {cluster[0]['filename']})")
8585+ for img in cluster:
8686+ print(f" File: {img['filename']}")
8787+ print(f" Lat: {img['lat']:.6f}, Lon: {img['lon']:.6f}")
8888+8989+ print(f"\n{'='*60}")
9090+ print(f"Total Clusters: {len(clusters)}")
9191+ print(f"{'='*60}")
9292+9393+if __name__ == "__main__":
9494+ DEFAULT_THRESHOLD=30.0
9595+ if len(sys.argv) < 2:
9696+ print("Usage: python script.py <directory> [threshold]")
9797+ print(f"Default threshold is {DEFAULT_THRESHOLD} km")
9898+ sys.exit(1)
9999+100100+ directory = sys.argv[1]
101101+ threshold = float(sys.argv[2]) if len(sys.argv) > 2 else DEFAULT_THRESHOLD
102102+ main(directory, threshold)
+133
preprocessing/srv/geokdclust.py
···11+import sys
22+import json
33+from kdtree import GeoIndexer
44+55+def main():
66+ if len(sys.argv) < 2:
77+ print("Usage: python script.py <directory> [threshold]")
88+ print(f"Default threshold is {DEFAULT_THRESHOLD} km")
99+ sys.exit(1)
1010+1111+ directory = sys.argv[1]
1212+1313+ # Define the column names in order based on the schema
1414+ column_names = [
1515+ 'geonameid',
1616+ 'name',
1717+ 'asciiname',
1818+ 'alternatenames',
1919+ 'latitude',
2020+ 'longitude',
2121+ 'feature_class',
2222+ 'feature_code',
2323+ 'country_code',
2424+ 'cc2',
2525+ 'admin1_code',
2626+ 'admin2_code',
2727+ 'admin3_code',
2828+ 'admin4_code',
2929+ 'population',
3030+ 'elevation',
3131+ 'dem',
3232+ 'timezone',
3333+ 'modification_date'
3434+ ]
3535+3636+ # Indices of the required columns (1,9,11,12,3,5,6) in the data file
3737+ required_indices = [0, 8, 10, 11, 2, 4, 5]
3838+3939+ # Extract the corresponding column names
4040+ required_columns = [column_names[i] for i in required_indices]
4141+4242+ # List to store all rows as dictionaries
4343+ data = []
4444+4545+ # Read the file and process each line
4646+ num_cities = '500'
4747+ with open(f"cities{num_cities}.txt", 'r') as file:
4848+ for line in file:
4949+ line = line.strip()
5050+ if not line:
5151+ continue # Skip empty lines
5252+5353+ fields = line.split('\t')
5454+ if len(fields) < max(required_indices) + 1:
5555+ continue # Skip lines with insufficient data
5656+5757+ row = {}
5858+ for idx, col in zip(required_indices, required_columns):
5959+ row[col] = fields[idx]
6060+6161+ data.append(row)
6262+6363+ city_nodes = [(
6464+ float(r['latitude']),
6565+ float(r['longitude']),
6666+ f"{r['country_code']}:{r['admin1_code']}:{r['admin2_code']}"
6767+ ) for r in data]
6868+6969+ a_col_names = [
7070+ 'code',
7171+ 'name',
7272+ 'asciiname',
7373+ 'geonameId'
7474+ ]
7575+ regions=dict()
7676+7777+ with open('admin2Codes.txt', 'r') as file:
7878+ for line in file:
7979+ line = line.strip()
8080+ if not line:
8181+ continue # Skip empty lines
8282+8383+ fields = line.split('\t')
8484+ regions[fields[0].replace('.',':')] = fields[1]
8585+8686+ # 1. Build the index
8787+ print("Building index...")
8888+ indexer = GeoIndexer(city_nodes)
8989+ print("Index built successfully.")
9090+9191+ with open(f"{directory}.clusters.json") as file:
9292+ clusters = json.load(file)[0]
9393+9494+ rbc = dict()
9595+ unk = []
9696+9797+ towns = set()
9898+9999+100100+ for c in clusters:
101101+ for n in c:
102102+ # 3. Find the nearest node
103103+ nearest_node = indexer.find_nearest(
104104+ float(n['lat']),
105105+ float(n['lon'])
106106+ )
107107+108108+ print(f"\nQuery Point: ({n['lat']}, {n['lon']})")
109109+ print(f"Nearest Node Found: {nearest_node}")
110110+111111+ if nearest_node[2] in regions:
112112+ print(f"Derived Admin Region: {regions[nearest_node[2]]}")
113113+ if nearest_node[2] in rbc:
114114+ rbc[nearest_node[2]].append(n)
115115+ else:
116116+ rbc[nearest_node[2]] = [n]
117117+118118+ towns.add(nearest_node)
119119+120120+ else:
121121+ print(f"Unknown Admin Region: {nearest_node[2]}")
122122+ unk.append(n)
123123+124124+ with open(f"{directory}.region.clusters.{num_cities}.json", 'w') as file:
125125+ l = list(rbc.values())
126126+ l.append(unk)
127127+ json.dump([l], file)
128128+129129+ with open(f"{directory}.close_cities.{num_cities}.json", 'w') as file:
130130+ json.dump(list(towns), file)
131131+132132+if __name__ == '__main__':
133133+ main()
+24
preprocessing/srv/isocountry.py
···11+import json
22+33+def parse_country_data(file_path):
44+ countries = dict()
55+66+ with open(file_path, 'r', encoding='utf-8') as file:
77+ for line in file:
88+ line = line.strip()
99+ if not line or line.startswith('#'):
1010+ continue # Skip comments and empty lines
1111+1212+ fields = line.split()
1313+ if len(fields) >= 5:
1414+ iso_code = fields[0]
1515+ country_name = fields[4]
1616+ countries[iso_code] = country_name
1717+1818+ return countries
1919+2020+if __name__ == "__main__":
2121+ file_path = 'countryInfo.txt' # Replace with your actual file path
2222+ result = parse_country_data(file_path)
2323+ with open('countries.json', 'w') as f:
2424+ json.dump(result, f, indent=2, ensure_ascii=False)
+136
preprocessing/srv/kdtree.py
···11+import math
22+33+class _Node:
44+ """A private helper class for a node in the Kd-tree."""
55+ def __init__(self, point, axis, left=None, right=None):
66+ self.point = point
77+ self.axis = axis
88+ self.left = left
99+ self.right = right
1010+1111+class GeoIndexer:
1212+ """
1313+ Indexes a collection of latitude/longitude nodes using a Kd-tree
1414+ for efficient approximate nearest neighbor searches.
1515+ """
1616+1717+ def __init__(self, nodes: list[tuple[float, float, str]]):
1818+ """
1919+ Initializes the index and builds the Kd-tree from the nodes.
2020+2121+ Args:
2222+ nodes: A list of unique (latitude, longitude) tuples.
2323+ """
2424+ if not nodes:
2525+ raise ValueError("Node list cannot be empty.")
2626+2727+ # k=2 for (latitude, longitude)
2828+ self._k = 2
2929+ self._root = self._build_kdtree(nodes)
3030+3131+ def _build_kdtree(self, points: list[tuple[float, float, str]], depth: int = 0):
3232+ """Recursively builds the Kd-tree."""
3333+ if not points:
3434+ return None
3535+3636+ # Determine the axis to split on
3737+ axis = depth % self._k
3838+3939+ # Sort points by the current axis and find the median
4040+ points.sort(key=lambda point: point[axis])
4141+ median_idx = len(points) // 2
4242+ median_point = points[median_idx]
4343+4444+ # Create a new node and recursively build subtrees
4545+ return _Node(
4646+ point=median_point,
4747+ axis=axis,
4848+ left=self._build_kdtree(points[:median_idx], depth + 1),
4949+ right=self._build_kdtree(points[median_idx + 1:], depth + 1)
5050+ )
5151+5252+ def find_nearest(self, lat: float, lon: float) -> tuple[float, float, str]:
5353+ """
5454+ Finds the node in the index that is closest to the given
5555+ latitude and longitude.
5656+5757+ Args:
5858+ lat: The latitude of the query point.
5959+ lon: The longitude of the query point.
6060+6161+ Returns:
6262+ The (latitude, longitude) tuple of the nearest node.
6363+ """
6464+ query_point = (lat, lon)
6565+ if self._root is None:
6666+ raise Exception("The tree is empty.")
6767+6868+ # The state is a list [best_point, best_sq_dist] so it can be modified
6969+ # by the nested helper function (pass-by-reference behavior).
7070+ state = [None, float('inf')]
7171+7272+ self._find_nearest_recursive(self._root, query_point, state)
7373+7474+ return state[0]
7575+7676+ def _find_nearest_recursive(self, node: _Node, query_point: tuple[float, float, str], state: list):
7777+ """Recursively searches the tree for the nearest neighbor."""
7878+ if node is None:
7979+ return
8080+8181+ # Calculate squared Euclidean distance between current node and query point
8282+ dist_sq = (node.point[0] - query_point[0])**2 + (node.point[1] - query_point[1])**2
8383+8484+ # If this node is closer, update our best guess
8585+ if dist_sq < state[1]:
8686+ state[0] = node.point
8787+ state[1] = dist_sq
8888+8989+ axis = node.axis
9090+ # Determine which subtree to search first
9191+ if query_point[axis] < node.point[axis]:
9292+ primary_child = node.left
9393+ secondary_child = node.right
9494+ else:
9595+ primary_child = node.right
9696+ secondary_child = node.left
9797+9898+ # Recurse into the primary subtree
9999+ self._find_nearest_recursive(primary_child, query_point, state)
100100+101101+ # Check if we need to search the secondary subtree (pruning step)
102102+ # We only need to search the other side if the distance from the query
103103+ # point to the splitting plane is less than our current best distance.
104104+ dist_to_plane_sq = (node.point[axis] - query_point[axis]) ** 2
105105+106106+ if dist_to_plane_sq < state[1]:
107107+ self._find_nearest_recursive(secondary_child, query_point, state)
108108+109109+110110+# --- Example Usage ---
111111+112112+if __name__ == '__main__':
113113+ # A list of nodes (e.g., locations in a city)
114114+ # Format: (latitude, longitude)
115115+ city_nodes = [
116116+ (34.0522, -118.2437, "LA"), # Los Angeles
117117+ (34.0722, -118.2337, "LA"), # Near Dodger Stadium
118118+ (34.0430, -118.2673, "LA"), # Near Crypto.com Arena
119119+ (34.1522, -118.2553, "LA"), # Griffith Observatory
120120+ (34.1016, -118.3409, "LA"), # Hollywood Walk of Fame
121121+ (34.0620, -118.3617, "LA"), # LACMA
122122+ ]
123123+124124+ # 1. Build the index
125125+ print("Building index...")
126126+ indexer = GeoIndexer(city_nodes)
127127+ print("Index built successfully.")
128128+129129+ # 2. Define a query point
130130+ query_lat, query_lon = 34.0800, -118.3000 # A point in Koreatown
131131+132132+ # 3. Find the nearest node
133133+ nearest_node = indexer.find_nearest(query_lat, query_lon)
134134+135135+ print(f"\nQuery Point: ({query_lat}, {query_lon})")
136136+ print(f"Nearest Node Found: {nearest_node}")
+274
preprocessing/srv/phmeta.py
···11+import os
22+import sys
33+import json
44+import argparse
55+from datetime import datetime, timezone, timedelta
66+from collections import defaultdict
77+from PIL import Image
88+from PIL.ExifTags import TAGS
99+1010+from kdtree import GeoIndexer
1111+1212+from photos_api_client import Client
1313+from photos_api_client.api.default import post_album, post_batch_photos_in_album
1414+from photos_api_client.models import PostAlbumBody, PostAlbumBodyAlbum
1515+from photos_api_client.models import PostBatchPhotosInAlbumBody, PostBatchPhotosInAlbumBodyAlbum, PostBatchPhotosInAlbumBodyContentsItem
1616+1717+1818+# Define GMT+10 timezone
1919+GMT10 = timezone(timedelta(hours=10))
2020+2121+def decimal_coords(coords, ref):
2222+ """Convert GPS coordinates from DMS to decimal degrees."""
2323+ decimal_degrees = float(coords[0]) + float(coords[1])/60 + float(coords[2])/3600
2424+ if ref in ('S', 'W'):
2525+ decimal_degrees = -decimal_degrees
2626+ return decimal_degrees
2727+2828+def get_image_info(filepath, date_source):
2929+ """Extracts metadata and date from an image file."""
3030+ try:
3131+ img = Image.open(filepath)
3232+ width, height = img.size
3333+ orientation = 1
3434+ timestamp = None
3535+ lat = None
3636+ lon = None
3737+3838+ # Get date from EXIF
3939+ if date_source == 'exif':
4040+ if hasattr(img, '_getexif'):
4141+ exif_data = img._getexif()
4242+ if exif_data:
4343+ for tag, value in exif_data.items():
4444+ tag_name = TAGS.get(tag, tag)
4545+ if tag_name == 'Orientation':
4646+ orientation = value
4747+ # DateTimeOriginal is preferred as it's when the photo was taken
4848+ if tag_name == 'DateTimeOriginal' and value:
4949+ # Format is 'YYYY:MM:DD HH:MM:SS'
5050+ timestamp = datetime.strptime(value, '%Y:%m:%d %H:%M:%S')
5151+ # Fallback to DateTime
5252+ elif tag_name == 'DateTime' and not timestamp and value:
5353+ timestamp = datetime.strptime(value, '%Y:%m:%d %H:%M:%S')
5454+ if tag_name =='GPSInfo':
5555+ print("Yloo")
5656+ # Extract latitude and longitude
5757+ gps_info = value
5858+ lat_coords = gps_info[2]
5959+ lat_ref = gps_info[1]
6060+ lon_coords = gps_info[4]
6161+ lon_ref = gps_info[3]
6262+ lat = decimal_coords(lat_coords, lat_ref)
6363+ lon = decimal_coords(lon_coords, lon_ref)
6464+6565+ # If no EXIF date or if source is modtime, use file modification time
6666+ if not timestamp:
6767+ mod_time = os.path.getmtime(filepath)
6868+ timestamp = datetime.fromtimestamp(mod_time)
6969+7070+ metadata = {
7171+ "width": width,
7272+ "height": height,
7373+ "orientation": orientation,
7474+ "lat": lat,
7575+ "lon": lon,
7676+ }
7777+ return metadata, timestamp
7878+7979+ except Exception as e:
8080+ print(f"Error processing {filepath}: {e}")
8181+ return None, None
8282+8383+def create_store_json(directory, date_source, backend_api):
8484+ """Creates a store.json file in a directory based on its image contents, segmented by day."""
8585+8686+ # Define the column names in order based on the schema
8787+ column_names = [
8888+ 'geonameid',
8989+ 'name',
9090+ 'asciiname',
9191+ 'alternatenames',
9292+ 'latitude',
9393+ 'longitude',
9494+ 'feature_class',
9595+ 'feature_code',
9696+ 'country_code',
9797+ 'cc2',
9898+ 'admin1_code',
9999+ 'admin2_code',
100100+ 'admin3_code',
101101+ 'admin4_code',
102102+ 'population',
103103+ 'elevation',
104104+ 'dem',
105105+ 'timezone',
106106+ 'modification_date'
107107+ ]
108108+109109+ # Indices of the required columns (1,9,11,12,3,5,6) in the data file
110110+ required_indices = [0, 8, 10, 11, 2, 4, 5]
111111+112112+ # Extract the corresponding column names
113113+ required_columns = [column_names[i] for i in required_indices]
114114+115115+ # List to store all rows as dictionaries
116116+ data = []
117117+118118+ # Read the file and process each line
119119+ num_cities = '500'
120120+ with open(f"/mnt/t7/adam/geocoding/cities{num_cities}.txt", 'r') as file:
121121+ for line in file:
122122+ line = line.strip()
123123+ if not line:
124124+ continue # Skip empty lines
125125+126126+ fields = line.split('\t')
127127+ if len(fields) < max(required_indices) + 1:
128128+ continue # Skip lines with insufficient data
129129+130130+ row = {}
131131+ for idx, col in zip(required_indices, required_columns):
132132+ row[col] = fields[idx]
133133+134134+ data.append(row)
135135+136136+ city_nodes = [(
137137+ float(r['latitude']),
138138+ float(r['longitude']),
139139+ f"{r['country_code']}:{r['admin1_code']}:{r['admin2_code']}"
140140+ ) for r in data]
141141+142142+ a_col_names = [
143143+ 'code',
144144+ 'name',
145145+ 'asciiname',
146146+ 'geonameId'
147147+ ]
148148+ regions=dict()
149149+150150+ with open('/mnt/t7/adam/geocoding/admin2Codes.txt', 'r') as file:
151151+ for line in file:
152152+ line = line.strip()
153153+ if not line:
154154+ continue # Skip empty lines
155155+156156+ fields = line.split('\t')
157157+ regions[fields[0].replace('.',':')] = fields[1]
158158+159159+ # 1. Build the index
160160+ print("Building index...")
161161+ indexer = GeoIndexer(city_nodes)
162162+ print("Index built successfully.")
163163+164164+ images_by_day = defaultdict(list)
165165+ total_images = 0
166166+167167+ for filename in sorted(os.listdir(directory)):
168168+ if filename.lower().endswith(('.jpg', '.jpeg')):
169169+ print(filename)
170170+ filepath = os.path.join(directory, filename)
171171+ metadata, timestamp = get_image_info(filepath, date_source)
172172+ nearest_node = None
173173+ if metadata and timestamp:
174174+ # Convert timestamp to GMT+10 and get the date part
175175+ local_dt = timestamp.astimezone(GMT10)
176176+ day = local_dt.date()
177177+ if metadata['lat'] is not None and metadata['lon'] is not None:
178178+ nearest_node = indexer.find_nearest(
179179+ float(metadata['lat']),
180180+ float(metadata['lon'])
181181+ )
182182+183183+ images_by_day[day].append({
184184+ "file": filename,
185185+ "timestamp": timestamp.isoformat(),
186186+ # reconstruct with minimal info
187187+ "metadata": {
188188+ "width": metadata['width'],
189189+ "height": metadata['height'],
190190+ "orientation": metadata['orientation'],
191191+ "region": nearest_node[2],
192192+ } if nearest_node is not None else {
193193+ "width": metadata['width'],
194194+ "height": metadata['height'],
195195+ "orientation": metadata['orientation']
196196+ }
197197+ })
198198+ total_images += 1
199199+200200+ if not total_images:
201201+ return
202202+203203+ dir_name = os.path.basename(directory)
204204+ segments = []
205205+ # Sort days to have segments in chronological order
206206+ for day in sorted(images_by_day.keys()):
207207+ segment_images = images_by_day[day]
208208+ segments.append({
209209+ "segmentId": f"{dir_name}_{day.strftime('%Y-%m-%d')}",
210210+ "header": day.strftime('%a, %B %d, %Y'),
211211+ "images": segment_images
212212+ })
213213+214214+ store_data = [
215215+ {
216216+ "sectionId": dir_name,
217217+ "totalImages": total_images,
218218+ "segments": segments
219219+ }
220220+ ]
221221+222222+ if backend_api:
223223+ client = Client(base_url="http://adams-laptop:8000")
224224+ photoRecords = [fill_photo_record(image)
225225+ for seg in segments for image in seg]
226226+ post_batch_photos_in_album.sync(client, body=PostBatchPhotosInAlbumBody.from_dict({
227227+ "album": PostBatchPhotosInAlbumBodyAlbum.from_dict({
228228+ "title": dir_name,
229229+ "year": 2025
230230+ }),
231231+ "contents": photoRecords
232232+ }))
233233+ else:
234234+ store_json_path = os.path.join(directory, 'store.geo.json')
235235+ with open(store_json_path, 'w') as f:
236236+ json.dump(store_data, f, indent=2)
237237+ print(f"Created {store_json_path}")
238238+239239+def fill_photo_record(image):
240240+ imgToPost = PostBatchPhotosInAlbumBodyContentsItem()
241241+ imgToPost.file_name = image.file
242242+ imgToPost.date_created = image.timestamp
243243+ imgToPost.date_modified = image.timestamp
244244+ imgToPost.width = image.metadata.width
245245+ imgToPost.height = image.metadata.width
246246+ imgToPost.orientation = image.metadata.orientation
247247+ imgToPost.region = image.region
248248+ return imgToPost
249249+250250+def main():
251251+ """Main function to traverse directories and generate store.json files."""
252252+ parser = argparse.ArgumentParser(description="Generate store.json for photo directories.")
253253+ parser.add_argument("target_directory", help="The root directory to process.")
254254+ parser.add_argument("--date-source", choices=['exif', 'modtime'], default='exif',
255255+ help="Source for image date (exif or modtime). Defaults to exif.")
256256+ parser.add_argument("--single-dir", action='store_true',
257257+ help="whether to operate on only a single directory")
258258+ parser.add_argument("--backend-api", default=None,
259259+ help="use the api client rather than saving json to filesystem")
260260+ args = parser.parse_args()
261261+262262+ root_dir = args.target_directory
263263+ if not os.path.isdir(root_dir):
264264+ print(f"Error: {root_dir} is not a valid directory.")
265265+ sys.exit(1)
266266+267267+ if args.single_dir:
268268+ create_store_json(root_dir, args.date_source, args.backend_api)
269269+ else:
270270+ for dirpath, _, _ in os.walk(root_dir):
271271+ create_store_json(dirpath, args.date_source, args.backend_api)
272272+273273+if __name__ == "__main__":
274274+ main()
···11-import type { OpenAPIHono } from '@hono/zod-openapi'
21import type { AppBindings } from '@/types.ts'
22+import type { OpenAPIHono } from '@hono/zod-openapi'
3344+import env from '@/env.ts'
55+import openApiConf from '@/openapi.config.ts'
66+import { ErrorHandler } from "hono"
77+import { INTERNAL_SERVER_ERROR } from "stoker/http-status-codes"
48import notFound from 'stoker/middlewares/not-found'
55-import onError from 'stoker/middlewares/on-error'
66-import openApiConf from '@/openapi.config.ts'
77-import { allAlbums, createAlbum } from './routes.ts'
99+import { routes as albumSegementedRoutes } from './albums-segmented.ts'
1010+import { routes as albumRoutes } from './albums.ts'
1111+import { routes as photoRoutes } from './photos.ts'
812913export type App = Pick<OpenAPIHono<AppBindings>, 'openapi'>
10141515+export type Passthrough = (a: App) => App
1616+1117interface Routing {
1212- (r: (a: App) => App): PhotoAPI
1818+ (r: Passthrough): PhotoAPI
1319}
14201521export class PhotoAPI {
···21272228 configureRoutes(): PhotoAPI {
2329 return this
2424- .addRoute(allAlbums)
2525- .addRoute(createAlbum)
2626- }
2727-2828- private addRoute: Routing = (r) => {
2929- this.app = r(this.app) as OpenAPIHono<AppBindings>
3030- return this
3030+ .addRoutes(albumRoutes)
3131+ .addRoutes(photoRoutes)
3232+ .addRoutes(albumSegementedRoutes)
3133 }
32343335 serve() {
3436 Deno.serve(this.app
3537 .notFound(notFound)
3636- .onError(onError)
3838+ .onError(this.onError)
3739 .fetch)
3840 }
3941···4143 const doc = this.app.getOpenAPI31Document(openApiConf)
4244 Deno.writeTextFileSync(path, JSON.stringify(doc, null, 2))
4345 }
4646+4747+ private addRoute: Routing = (r) => {
4848+ this.app = r(this.app) as OpenAPIHono<AppBindings>
4949+ return this
5050+ }
5151+5252+ addRoutes(routies: Array<Passthrough>): PhotoAPI {
5353+ routies.forEach(this.addRoute)
5454+ return this
5555+ }
5656+private onError: ErrorHandler<AppBindings> = (err, c) => {
5757+ c.var.logger.error(err.message)
5858+ c.var.logger.error(err.stack)
5959+ return c.json(
6060+ {
6161+ message: err.message,
6262+ stack: env.NODE_ENV === "production" ? void 0 : err.stack
6363+ },
6464+ INTERNAL_SERVER_ERROR
6565+ );
6666+};
4467}
+53
server/src/common.ts
···11+import { createSchemaFactory } from 'drizzle-zod';
22+33+export const { createInsertSchema } = createSchemaFactory({
44+ coerce: {
55+ date: true,
66+ },
77+})
88+99+export async function batch<T>(
1010+ items: T[],
1111+ batchSize: number,
1212+ consumer: (batch: T[]) => Promise<void>
1313+): Promise<void> {
1414+ for (let i = 0; i < items.length; i += batchSize) {
1515+ const batch = items.slice(i, i + batchSize);
1616+ // Wait for the current batch to finish before starting the next
1717+ await consumer(batch);
1818+ }
1919+}
2020+2121+/**
2222+ * Processes an array in batches and accumulates the results.
2323+ * * @template T - The type of input items
2424+ * @template R - The type of individual processed results
2525+ * @param items - The source array
2626+ * @param batchSize - Size of each batch
2727+ * @param processor - Function that processes a batch and returns an array of results
2828+ * @returns A single flattened array of all results
2929+ */
3030+export async function mapInBatches<T, R>(
3131+ items: T[],
3232+ batchSize: number,
3333+ processor: (batch: T[]) => Promise<R[]>
3434+): Promise<R[]> {
3535+ const allResults: R[] = [];
3636+3737+ for (let i = 0; i < items.length; i += batchSize) {
3838+ const batch = items.slice(i, i + batchSize);
3939+4040+ // Execute the processor for the current chunk
4141+ const batchResult = await processor(batch);
4242+4343+ // Accumulate the results into the main list
4444+ allResults.push(...batchResult);
4545+ }
4646+4747+ return allResults;
4848+}
4949+5050+export function sole<I>(list: I[]): I {
5151+ const [val] = list
5252+ return val
5353+}
+3-3
server/src/db/index.ts
···11-import { drizzle } from 'drizzle-orm/libsql'
21import env from '@/env.ts'
33-import * as schema from './schema/index.ts'
22+import { drizzle } from 'drizzle-orm/libsql'
33+import { relations } from './schema/index.ts'
4455-const db = drizzle(env.DATABASE_URL, { schema })
55+const db = drizzle(env.DATABASE_URL, { relations })
6677export default db