An encrypted personal cloud built on the AT Protocol.

Add web file upload with client-side encryption [CL-146]

Encrypt files client-side via WASM crypto worker, upload encrypted blob
to PDS, create document record with encrypted metadata, and update the
parent directory. Also fixes DPoP nonce detection in authenticatedRequest
to check WWW-Authenticate instead of just header presence, preventing
expired-token 401s from being misrouted as nonce challenges.

sans-self.org b5329c1b 6af17a27

Waiting for spindle ...
+297 -44
+1
CHANGELOG.md
··· 12 12 - Remove bearer token authentication fallback from AppView [#26](https://issues.opake.app/issues/26.html)s 13 13 14 14 ### Added 15 + - Add web file upload with client-side encryption [#146](https://issues.opake.app/issues/146.html) 15 16 - Integrate app store loading tracker into document fetching and downloads [#251](https://issues.opake.app/issues/251.html) 16 17 - Add web file download with client-side decryption [#147](https://issues.opake.app/issues/147.html) 17 18 - Add web file browser with tag filtering [#145](https://issues.opake.app/issues/145.html)
+3 -3
crates/opake-appview/src/db/db_tests.rs
··· 216 216 let db = test_db(); 217 217 218 218 // No cursor initially 219 - let initial = db.with_conn(|c| cursor::load_cursor(c)).unwrap(); 219 + let initial = db.with_conn(cursor::load_cursor).unwrap(); 220 220 assert!(initial.is_none()); 221 221 222 222 // Save and load 223 223 db.with_conn(|c| cursor::save_cursor(c, 1709330400000000)) 224 224 .unwrap(); 225 - let loaded = db.with_conn(|c| cursor::load_cursor(c)).unwrap(); 225 + let loaded = db.with_conn(cursor::load_cursor).unwrap(); 226 226 assert_eq!(loaded, Some(1709330400000000)); 227 227 228 228 // Update 229 229 db.with_conn(|c| cursor::save_cursor(c, 1709330500000000)) 230 230 .unwrap(); 231 - let updated = db.with_conn(|c| cursor::load_cursor(c)).unwrap(); 231 + let updated = db.with_conn(cursor::load_cursor).unwrap(); 232 232 assert_eq!(updated, Some(1709330500000000)); 233 233 }
+1 -1
crates/opake-cli/src/commands/shared.rs
··· 74 74 75 75 fn entry(recipient: &str, doc: &str) -> GrantEntry { 76 76 GrantEntry { 77 - uri: format!("at://did:plc:owner/app.opake.grant/g1"), 77 + uri: "at://did:plc:owner/app.opake.grant/g1".to_string(), 78 78 document: doc.into(), 79 79 recipient: recipient.into(), 80 80 encrypted_metadata: dummy_encrypted_metadata(),
+1 -10
crates/opake-core/src/directories/move_entry_tests.rs
··· 1 1 use super::*; 2 - use crate::client::{HttpResponse, RequestBody}; 2 + use crate::client::RequestBody; 3 3 use crate::records::Directory; 4 4 use crate::test_utils::MockTransport; 5 5 ··· 25 25 uri: DOC_URI.to_string(), 26 26 kind: EntryKind::Document, 27 27 name: "beach.jpg".to_string(), 28 - parent_uri: parent_uri.map(String::from), 29 - } 30 - } 31 - 32 - fn source_dir(uri: &str, name: &str, parent_uri: Option<&str>) -> ResolvedPath { 33 - ResolvedPath { 34 - uri: uri.to_string(), 35 - kind: EntryKind::Directory, 36 - name: name.to_string(), 37 28 parent_uri: parent_uri.map(String::from), 38 29 } 39 30 }
+1 -1
crates/opake-core/src/documents/download.rs
··· 181 181 use crate::test_utils::MockTransport; 182 182 use base64::{engine::general_purpose::STANDARD as BASE64, Engine}; 183 183 184 - use super::super::tests::{dummy_encrypted_metadata, mock_client, TEST_DID, TEST_URI}; 184 + use super::super::tests::{mock_client, TEST_DID, TEST_URI}; 185 185 186 186 fn test_keypair() -> (X25519PublicKey, X25519PrivateKey) { 187 187 let secret = crypto::X25519DalekStaticSecret::random_from_rng(OsRng);
+1 -2
crates/opake-core/src/documents/download_grant.rs
··· 107 107 use crate::client::HttpResponse; 108 108 use crate::crypto::{OsRng, X25519PublicKey}; 109 109 use crate::records::{ 110 - AtBytes, BlobRef, CidLink, DirectEncryption, EncryptedMetadata, Encryption, 111 - EncryptionEnvelope, 110 + AtBytes, BlobRef, CidLink, DirectEncryption, Encryption, EncryptionEnvelope, 112 111 }; 113 112 use crate::test_utils::{dummy_encrypted_metadata, MockTransport}; 114 113 use base64::{engine::general_purpose::STANDARD as BASE64, Engine};
+1 -3
crates/opake-core/src/documents/download_keyring_tests.rs
··· 1 1 use super::*; 2 2 use crate::client::HttpResponse; 3 3 use crate::crypto::{OsRng, X25519DalekPublicKey, X25519DalekStaticSecret}; 4 - use crate::records::{ 5 - AtBytes, BlobRef, CidLink, EncryptedMetadata, KeyringEncryption, KeyringRef, WrappedKey, 6 - }; 4 + use crate::records::{AtBytes, BlobRef, CidLink, KeyringEncryption, KeyringRef, WrappedKey}; 7 5 use crate::test_utils::{dummy_encrypted_metadata, MockTransport}; 8 6 use base64::{engine::general_purpose::STANDARD as BASE64, Engine}; 9 7
+2 -2
crates/opake-core/src/keyrings/remove_member_tests.rs
··· 2 2 use crate::client::{HttpResponse, LegacySession, RequestBody, Session, XrpcClient}; 3 3 use crate::crypto::{self, OsRng, X25519DalekPublicKey, X25519DalekStaticSecret}; 4 4 use crate::records::{AtBytes, Keyring, WrappedKey}; 5 - use crate::test_utils::{dummy_encrypted_metadata, MockTransport}; 5 + use crate::test_utils::MockTransport; 6 6 7 7 const TEST_DID: &str = "did:plc:owner"; 8 8 const KEYRING_URI: &str = "at://did:plc:owner/app.opake.keyring/kr1"; ··· 24 24 } 25 25 26 26 fn two_member_keyring() -> (Keyring, ContentKey) { 27 - let members_keys = [ 27 + let _members_keys = [ 28 28 (TEST_DID, &test_keypair().0), 29 29 ("did:plc:bob", &test_keypair().0), 30 30 ];
+5
crates/opake-wasm/src/lib.rs
··· 41 41 } 42 42 } 43 43 44 + #[wasm_bindgen(js_name = schemaVersion)] 45 + pub fn schema_version() -> u32 { 46 + opake_core::records::SCHEMA_VERSION 47 + } 48 + 44 49 #[wasm_bindgen(js_name = generateContentKey)] 45 50 pub fn generate_content_key() -> Vec<u8> { 46 51 let key = opake_core::crypto::generate_content_key(&mut OsRng);
+114 -19
web/src/lib/api.ts
··· 2 2 3 3 import type { OAuthSession, Session } from "@/lib/storageTypes"; 4 4 import type { TokenResponse } from "@/lib/oauth"; 5 + import type { BlobRef } from "@/lib/pdsTypes"; 5 6 import { getCryptoWorker } from "@/lib/worker"; 6 7 import { IndexedDbStorage } from "@/lib/indexeddbStorage"; 7 8 ··· 58 59 url: string; 59 60 method: string; 60 61 headers?: Record<string, string>; 61 - body?: string; 62 + body?: BodyInit; 62 63 label: string; 63 64 } 64 65 ··· 79 80 80 81 let response = await fetch(url, { method, headers, body }); 81 82 82 - // DPoP nonce retry — the PDS has a different nonce than the AS. 83 + // Always capture the latest PDS nonce — it may differ from the AS nonce. 84 + if (session.type === "oauth") { 85 + const nonce = response.headers.get("dpop-nonce"); 86 + if (nonce) session.dpopNonce = nonce; 87 + } 88 + 89 + // DPoP nonce retry — the PDS explicitly challenged us for a nonce. 83 90 if (session.type === "oauth" && requiresNonceRetry(response)) { 91 + await attachDpopAuth(headers, session, method, url); 92 + response = await fetch(url, { method, headers, body }); 93 + 84 94 const nonce = response.headers.get("dpop-nonce"); 85 - if (nonce) { 86 - session.dpopNonce = nonce; 87 - await attachDpopAuth(headers, session, method, url); 88 - response = await fetch(url, { method, headers, body }); 89 - } 95 + if (nonce) session.dpopNonce = nonce; 90 96 } 91 97 92 98 // Token expired — refresh and retry once. ··· 97 103 await attachDpopAuth(headers, session, method, url); 98 104 response = await fetch(url, { method, headers, body }); 99 105 106 + const nonce = response.headers.get("dpop-nonce"); 107 + if (nonce) session.dpopNonce = nonce; 108 + 100 109 // The refreshed token might also need a nonce retry on the PDS 101 110 if (requiresNonceRetry(response)) { 102 - const nonce = response.headers.get("dpop-nonce"); 103 - if (nonce) { 104 - session.dpopNonce = nonce; 105 - await attachDpopAuth(headers, session, method, url); 106 - response = await fetch(url, { method, headers, body }); 107 - } 111 + await attachDpopAuth(headers, session, method, url); 112 + response = await fetch(url, { method, headers, body }); 108 113 } 109 114 } 110 115 } ··· 175 180 } 176 181 177 182 // --------------------------------------------------------------------------- 183 + // Authenticated blob upload (raw bytes → BlobRef) 184 + // --------------------------------------------------------------------------- 185 + 186 + interface BlobUploadParams { 187 + pdsUrl: string; 188 + data: Uint8Array; 189 + } 190 + 191 + export async function authenticatedBlobUpload( 192 + params: BlobUploadParams, 193 + session: Session, 194 + ): Promise<BlobRef> { 195 + const { pdsUrl, data } = params; 196 + const url = `${pdsUrl.replace(/\/$/, "")}/xrpc/com.atproto.repo.uploadBlob`; 197 + 198 + // Normalize to a real Uint8Array — Comlink may deliver typed arrays as plain Arrays 199 + const bytes = new Uint8Array(data); 200 + 201 + const response = await authenticatedRequest( 202 + { 203 + url, 204 + method: "POST", 205 + headers: { "Content-Type": "application/octet-stream" }, 206 + body: new Blob([bytes]), 207 + label: "uploadBlob", 208 + }, 209 + session, 210 + ); 211 + 212 + const result = (await response.json()) as { blob: BlobRef }; 213 + return result.blob; 214 + } 215 + 216 + // --------------------------------------------------------------------------- 217 + // Authenticated record creation + update 218 + // --------------------------------------------------------------------------- 219 + 220 + interface RecordRef { 221 + uri: string; 222 + cid: string; 223 + } 224 + 225 + interface CreateRecordParams { 226 + pdsUrl: string; 227 + did: string; 228 + collection: string; 229 + record: unknown; 230 + } 231 + 232 + export async function authenticatedCreateRecord( 233 + params: CreateRecordParams, 234 + session: Session, 235 + ): Promise<RecordRef> { 236 + const { pdsUrl, did, collection, record } = params; 237 + return (await authenticatedXrpc( 238 + { 239 + pdsUrl, 240 + lexicon: "com.atproto.repo.createRecord", 241 + method: "POST", 242 + body: { repo: did, collection, record: { $type: collection, ...(record as object) } }, 243 + }, 244 + session, 245 + )) as RecordRef; 246 + } 247 + 248 + interface PutRecordParams { 249 + pdsUrl: string; 250 + did: string; 251 + collection: string; 252 + rkey: string; 253 + record: unknown; 254 + } 255 + 256 + export async function authenticatedPutRecord( 257 + params: PutRecordParams, 258 + session: Session, 259 + ): Promise<RecordRef> { 260 + const { pdsUrl, did, collection, rkey, record } = params; 261 + return (await authenticatedXrpc( 262 + { 263 + pdsUrl, 264 + lexicon: "com.atproto.repo.putRecord", 265 + method: "POST", 266 + body: { repo: did, collection, rkey, record: { $type: collection, ...(record as object) } }, 267 + }, 268 + session, 269 + )) as RecordRef; 270 + } 271 + 272 + // --------------------------------------------------------------------------- 178 273 // Token refresh 179 274 // --------------------------------------------------------------------------- 180 275 ··· 254 349 return true; 255 350 } 256 351 257 - /** Check if a response is a DPoP nonce challenge (400 use_dpop_nonce or 401 with nonce header). */ 352 + /** Check if a response is an explicit DPoP nonce challenge (WWW-Authenticate contains use_dpop_nonce). */ 258 353 function requiresNonceRetry(response: Response): boolean { 259 - if (response.headers.has("dpop-nonce")) { 260 - if (response.status === 401) return true; 261 - if (response.status === 400) return true; 262 - } 263 - return false; 354 + // The PDS always includes dpop-nonce on authenticated endpoints, so checking just 355 + // header presence incorrectly treats expired-token 401s as nonce challenges. 356 + // Only retry when the server explicitly says the nonce is the problem. 357 + const wwwAuth = response.headers.get("www-authenticate") ?? ""; 358 + return wwwAuth.includes("use_dpop_nonce"); 264 359 } 265 360 266 361 async function attachDpopAuth(
+108
web/src/lib/upload.ts
··· 1 + // Upload orchestration — encrypt client-side, upload blob, create record, add to directory. 2 + 3 + import { 4 + authenticatedBlobUpload, 5 + authenticatedCreateRecord, 6 + authenticatedXrpc, 7 + authenticatedPutRecord, 8 + } from "@/lib/api"; 9 + import { uint8ArrayToBase64 } from "@/lib/encoding"; 10 + import { rkeyFromUri } from "@/lib/atUri"; 11 + import { getCryptoWorker } from "@/lib/worker"; 12 + import type { DocumentRecord, DirectoryRecord, PdsRecord } from "@/lib/pdsTypes"; 13 + import type { Session } from "@/lib/storageTypes"; 14 + 15 + export async function uploadDocument( 16 + file: File, 17 + directoryUri: string | null, 18 + pdsUrl: string, 19 + did: string, 20 + publicKey: Uint8Array, 21 + session: Session, 22 + ): Promise<string> { 23 + const worker = getCryptoWorker(); 24 + const plaintext = new Uint8Array(await file.arrayBuffer()); 25 + 26 + // 1. Generate content key + encrypt blob 27 + const contentKey = await worker.generateContentKey(); 28 + const blobPayload = await worker.encryptBlob(contentKey, plaintext); 29 + 30 + // 2. Upload blob + wrap key + encrypt metadata — all independent, run concurrently 31 + const mimeType = file.type || "application/octet-stream"; 32 + const [blobRef, wrappedKey, encryptedMeta] = await Promise.all([ 33 + authenticatedBlobUpload({ pdsUrl, data: blobPayload.ciphertext }, session), 34 + worker.wrapKey(contentKey, publicKey, did), 35 + worker.encryptMetadata(contentKey, { 36 + name: file.name, 37 + mimeType, 38 + size: plaintext.byteLength, 39 + }), 40 + ]); 41 + 42 + // 5. Build document record 43 + const opakeVersion = await worker.schemaVersion(); 44 + const now = new Date().toISOString(); 45 + 46 + const documentRecord: DocumentRecord = { 47 + opakeVersion, 48 + blob: blobRef, 49 + encryption: { 50 + $type: "app.opake.document#directEncryption", 51 + envelope: { 52 + algo: "aes-256-gcm", 53 + nonce: { $bytes: uint8ArrayToBase64(blobPayload.nonce) }, 54 + keys: [wrappedKey], 55 + }, 56 + }, 57 + encryptedMetadata: { 58 + ciphertext: { $bytes: uint8ArrayToBase64(encryptedMeta.ciphertext) }, 59 + nonce: { $bytes: uint8ArrayToBase64(encryptedMeta.nonce) }, 60 + }, 61 + visibility: "private", 62 + createdAt: now, 63 + modifiedAt: null, 64 + }; 65 + 66 + // 6. Create document record on PDS 67 + const { uri: documentUri } = await authenticatedCreateRecord( 68 + { pdsUrl, did, collection: "app.opake.document", record: documentRecord }, 69 + session, 70 + ); 71 + 72 + // 7. Add entry to parent directory 73 + await addEntryToDirectory(directoryUri, documentUri, now, pdsUrl, did, session); 74 + 75 + return documentUri; 76 + } 77 + 78 + async function addEntryToDirectory( 79 + directoryUri: string | null, 80 + entryUri: string, 81 + modifiedAt: string, 82 + pdsUrl: string, 83 + did: string, 84 + session: Session, 85 + ): Promise<void> { 86 + const rkey = directoryUri ? rkeyFromUri(directoryUri) : "self"; 87 + 88 + // Fetch current directory record 89 + const response = (await authenticatedXrpc( 90 + { 91 + pdsUrl, 92 + lexicon: `com.atproto.repo.getRecord?repo=${encodeURIComponent(did)}&collection=app.opake.directory&rkey=${encodeURIComponent(rkey)}`, 93 + }, 94 + session, 95 + )) as PdsRecord<DirectoryRecord>; 96 + 97 + // Append new entry 98 + const updatedRecord: DirectoryRecord = { 99 + ...response.value, 100 + entries: [...response.value.entries, entryUri], 101 + modifiedAt, 102 + }; 103 + 104 + await authenticatedPutRecord( 105 + { pdsUrl, did, collection: "app.opake.directory", rkey, record: updatedRecord }, 106 + session, 107 + ); 108 + }
+23 -2
web/src/routes/cabinet/files/route.tsx
··· 1 - import { useEffect } from "react"; 1 + import { useEffect, useRef } from "react"; 2 2 import { createFileRoute, Link, Outlet, useMatch, useNavigate } from "@tanstack/react-router"; 3 3 import { 4 4 ListBulletsIcon, ··· 27 27 28 28 function FileBrowserLayout() { 29 29 const navigate = useNavigate(); 30 + const fileInputRef = useRef<HTMLInputElement>(null); 31 + const uploadFile = useDocumentsStore((s) => s.uploadFile); 30 32 31 33 // Determine current directory from child splat route params 32 34 const splatMatch = useMatch({ ··· 103 105 setTagFilters(current); 104 106 }; 105 107 108 + const handleFileSelected = (e: React.ChangeEvent<HTMLInputElement>) => { 109 + const file = e.target.files?.[0]; 110 + if (!file) return; 111 + void uploadFile(file, currentDirectoryUri); 112 + // Reset so re-selecting the same file triggers onChange again 113 + e.target.value = ""; 114 + }; 115 + 106 116 const handleClose = () => { 107 117 if (segments.length > 1) { 108 118 void navigate({ ··· 134 144 </> 135 145 } 136 146 items={[ 137 - { icon: UploadSimpleIcon, label: "Upload file" }, 147 + { 148 + icon: UploadSimpleIcon, 149 + label: "Upload file", 150 + onClick: () => fileInputRef.current?.click(), 151 + }, 138 152 { icon: FolderIcon, label: "New folder" }, 139 153 { icon: FileTextIcon, label: "New document" }, 140 154 { icon: BookOpenIcon, label: "New note" }, ··· 190 204 onClear={() => setTagFilters([])} 191 205 /> 192 206 {documentsLoading ? <PanelSkeleton /> : <Outlet />} 207 + <input 208 + ref={fileInputRef} 209 + type="file" 210 + className="hidden" 211 + onChange={handleFileSelected} 212 + aria-hidden="true" 213 + /> 193 214 </PanelShell> 194 215 ); 195 216 }
+25
web/src/stores/documents/store.ts
··· 16 16 } from "@/lib/pdsTypes"; 17 17 import { rkeyFromUri } from "@/lib/atUri"; 18 18 import { downloadDocument } from "@/lib/download"; 19 + import { uploadDocument } from "@/lib/upload"; 19 20 import { storage, fetchAllRecords } from "./fetch"; 20 21 import { decryptDocumentRecord, markDecryptionFailed } from "./decrypt"; 21 22 import { directoryItemFromSnapshot, documentPlaceholder, applyTagFilter } from "./file-items"; ··· 45 46 readonly setTagFilters: (tags: string[]) => void; 46 47 readonly setViewMode: (mode: "list" | "grid") => void; 47 48 readonly downloadFile: (documentUri: string) => Promise<void>; 49 + readonly uploadFile: (file: File, directoryUri: string | null) => Promise<void>; 48 50 readonly ancestorsOf: (directoryUri: string | null) => readonly DirectoryAncestor[]; 49 51 } 50 52 ··· 253 255 } catch (error) { 254 256 console.error("[documents] download failed:", documentUri, error); 255 257 } finally { 258 + done(); 259 + } 260 + }, 261 + 262 + uploadFile: async (file: File, directoryUri: string | null) => { 263 + const authState = useAuthStore.getState(); 264 + if (authState.session.status !== "active") return; 265 + 266 + const done = loading("upload"); 267 + 268 + try { 269 + const { did, pdsUrl } = authState.session; 270 + const session = await storage.loadSession(did); 271 + const identity = await storage.loadIdentity(did); 272 + const publicKey = base64ToUint8Array(identity.public_key); 273 + 274 + await uploadDocument(file, directoryUri, pdsUrl, did, publicKey, session); 275 + 276 + // Refresh the entire tree so the new file appears 277 + done(); 278 + await get().fetchAll(); 279 + } catch (error) { 280 + console.error("[documents] upload failed:", error); 256 281 done(); 257 282 } 258 283 },
+11 -1
web/src/workers/crypto.worker.ts
··· 1 1 import * as Comlink from "comlink"; 2 2 import init, { 3 3 bindingCheck, 4 + schemaVersion as wasmSchemaVersion, 4 5 generateContentKey, 5 6 encryptBlob, 6 7 decryptBlob, ··· 8 9 unwrapKey, 9 10 wrapContentKeyForKeyring, 10 11 unwrapContentKeyFromKeyring, 12 + encryptMetadata as wasmEncryptMetadata, 11 13 decryptMetadata as wasmDecryptMetadata, 12 14 decryptDirectoryMetadata as wasmDecryptDirectoryMetadata, 13 15 generateDpopKeyPair as wasmGenerateDpopKeyPair, ··· 51 53 52 54 bindingCheck(): string { 53 55 return bindingCheck(); 56 + }, 57 + 58 + schemaVersion(): number { 59 + return wasmSchemaVersion(); 54 60 }, 55 61 56 62 generateContentKey(): Uint8Array { ··· 81 87 return unwrapContentKeyFromKeyring(wrapped, groupKey); 82 88 }, 83 89 84 - // Metadata decryption 90 + // Metadata encryption / decryption 91 + 92 + encryptMetadata(key: Uint8Array, metadata: DocumentMetadata): EncryptedPayload { 93 + return wasmEncryptMetadata(key, metadata) as EncryptedPayload; 94 + }, 85 95 86 96 decryptMetadata(key: Uint8Array, ciphertext: Uint8Array, nonce: Uint8Array): DocumentMetadata { 87 97 return wasmDecryptMetadata(key, ciphertext, nonce) as DocumentMetadata;