diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index fad12709d3e..06c5df54814 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -154,7 +154,7 @@ pnpm desktop:build # Build desktop application ### Test Organization - **Server tests** (`apps/server/spec/`): Must run sequentially (shared database state) - **Client tests** (`apps/client/src/`): Can run in parallel -- **E2E tests** (`apps/server-e2e/`): Use Playwright for integration testing +- **E2E tests** (`packages/trilium-e2e/`): Shared Playwright tests, run via `pnpm --filter server e2e` or `pnpm --filter client-standalone e2e` - **ETAPI tests** (`apps/server/spec/etapi/`): External API contract tests **Pattern**: When adding new API endpoints, add tests in `spec/etapi/` following existing patterns (see `search.spec.ts`). diff --git a/.github/workflows/deploy-app.yml b/.github/workflows/deploy-app.yml new file mode 100644 index 00000000000..c94cb63043b --- /dev/null +++ b/.github/workflows/deploy-app.yml @@ -0,0 +1,70 @@ +name: Deploy Standalone App + +on: + # Trigger on push to main branch + push: + branches: + - standalone + # Only run when app files change + paths: + - 'apps/client/**' + - 'apps/client-standalone/**' + - 'packages/trilium-core/**' + - '.github/workflows/deploy-app.yml' + + # Allow manual triggering from Actions tab + workflow_dispatch: + + # Run on pull requests for preview deployments + pull_request: + paths: + - 'apps/client/**' + - 'apps/client-standalone/**' + - 'packages/trilium-core/**' + - '.github/workflows/deploy-app.yml' +jobs: + build-and-deploy: + name: Build and Deploy App + runs-on: ubuntu-latest + timeout-minutes: 10 + + # Required permissions for deployment + permissions: + contents: read + deployments: write + pull-requests: write # For PR preview comments + id-token: write # For OIDC authentication (if needed) + + steps: + - name: Checkout Repository + uses: actions/checkout@v6 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + + - name: Setup Node.js + uses: actions/setup-node@v6 + with: + node-version: '24' + cache: 'pnpm' + + - name: Install Dependencies + run: pnpm install --frozen-lockfile + + - name: Update build info + run: pnpm run chore:update-build-info + + - name: Trigger build of app + run: pnpm --filter=client-standalone build + + - name: Deploy + uses: ./.github/actions/deploy-to-cloudflare-pages + if: github.repository == vars.REPO_MAIN + with: + project_name: "trilium-app" + comment_body: "🖥️ App preview is ready" + production_url: "https://app.triliumnotes.org" + deploy_dir: "apps/client-standalone/dist" + cloudflare_api_token: ${{ secrets.CLOUDFLARE_API_TOKEN }} + cloudflare_account_id: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index 68248496bb0..6225074c243 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -3,10 +3,12 @@ on: push: branches: - main + - standalone - "release/*" pull_request: branches: - main + - standalone - "release/*" concurrency: @@ -63,13 +65,20 @@ jobs: path: apps/server/test-output/vitest/html/ retention-days: 30 + - name: Run the client-standalone tests + # Runs the same trilium-core spec set as the server suite, but in + # happy-dom + sql.js WASM via BrowserSqlProvider (see + # apps/client-standalone/src/test_setup.ts). Catches differences + # between the Node-side and browser-side runtimes. + run: pnpm run --filter=client-standalone test + - name: Run CKEditor e2e tests run: | pnpm run --filter=ckeditor5-mermaid test pnpm run --filter=ckeditor5-math test - name: Run the rest of the tests - run: pnpm run --filter=\!client --filter=\!server --filter=\!ckeditor5-mermaid --filter=\!ckeditor5-math test + run: pnpm run --filter=\!client --filter=\!client-standalone --filter=\!server --filter=\!ckeditor5-mermaid --filter=\!ckeditor5-math test build_docker: name: Build Docker image diff --git a/.github/workflows/main-docker.yml b/.github/workflows/main-docker.yml index e8eb3228f47..a148f11ad17 100644 --- a/.github/workflows/main-docker.yml +++ b/.github/workflows/main-docker.yml @@ -2,6 +2,7 @@ on: push: branches: - "main" + - "standalone" - "feature/update**" - "feature/server_esm**" paths-ignore: @@ -82,7 +83,7 @@ jobs: require-healthy: true - name: Run Playwright tests - run: TRILIUM_DOCKER=1 TRILIUM_PORT=8082 pnpm --filter=server-e2e e2e + run: TRILIUM_DOCKER=1 TRILIUM_PORT=8082 pnpm --filter=server e2e - name: Upload Playwright trace if: failure() diff --git a/.github/workflows/mobile.yml b/.github/workflows/mobile.yml new file mode 100644 index 00000000000..1fc19b06492 --- /dev/null +++ b/.github/workflows/mobile.yml @@ -0,0 +1,57 @@ +name: Mobile + +on: + push: + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build_android: + name: Build Android APK + runs-on: ubuntu-latest + steps: + - name: Checkout the repository + uses: actions/checkout@v6 + + - uses: pnpm/action-setup@v6 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version: 24 + cache: "pnpm" + + - name: Set up JDK 21 + uses: actions/setup-java@v5 + with: + distribution: temurin + java-version: 21 + + - name: Set up Gradle + uses: gradle/actions/setup-gradle@v5 + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Update build info + run: pnpm run chore:update-build-info + + - name: Build client-standalone (webDir for Capacitor) + run: pnpm --filter @triliumnext/mobile build + + - name: Sync Capacitor Android project + run: pnpm --filter @triliumnext/mobile exec cap sync android + + - name: Assemble debug APK + working-directory: apps/mobile/android + run: ./gradlew assembleDebug --no-daemon + + - name: Upload APK + uses: actions/upload-artifact@v7 + with: + name: trilium-mobile-debug-apk + path: apps/mobile/android/app/build/outputs/apk/debug/*.apk + retention-days: 14 diff --git a/.github/workflows/playwright.yml b/.github/workflows/playwright.yml index d3a0ab13293..f2dfe724f2e 100644 --- a/.github/workflows/playwright.yml +++ b/.github/workflows/playwright.yml @@ -14,7 +14,7 @@ permissions: contents: read jobs: - e2e: + e2e-server: strategy: fail-fast: false matrix: @@ -73,15 +73,66 @@ jobs: sleep 10 - name: Server end-to-end tests - run: pnpm --filter server-e2e e2e + run: pnpm --filter server e2e - name: Upload test report if: failure() uses: actions/upload-artifact@v7 with: name: e2e report ${{ matrix.arch }} - path: apps/server-e2e/test-output + path: apps/server/test-output - name: Kill the server if: always() run: pkill -f trilium || true + + e2e-standalone: + strategy: + fail-fast: false + matrix: + include: + - name: linux-x64 + os: ubuntu-22.04 + - name: linux-arm64 + os: ubuntu-24.04-arm + runs-on: ${{ matrix.os }} + name: Standalone E2E tests on ${{ matrix.name }} + env: + TRILIUM_DOCKER: 1 + TRILIUM_PORT: 8082 + steps: + - uses: actions/checkout@v6 + with: + filter: tree:0 + fetch-depth: 0 + + - uses: pnpm/action-setup@v5 + - uses: actions/setup-node@v6 + with: + node-version: 24 + cache: 'pnpm' + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Install Playwright browsers + run: pnpm exec playwright install --with-deps + + - name: Build standalone + run: TRILIUM_INTEGRATION_TEST=memory pnpm --filter client-standalone build + + - name: Start standalone preview server + run: | + cd apps/client-standalone + pnpm vite preview --port $TRILIUM_PORT --host 127.0.0.1 & + sleep 5 + + - name: Standalone end-to-end tests + run: pnpm --filter client-standalone e2e + + - name: Upload test report + if: failure() + uses: actions/upload-artifact@v7 + with: + name: standalone e2e report ${{ matrix.name }} + path: apps/client-standalone/test-output diff --git a/.gitignore b/.gitignore index 994679ebe5c..bb746d9ec00 100644 --- a/.gitignore +++ b/.gitignore @@ -51,3 +51,6 @@ upload site/ apps/*/coverage scripts/translation/.language*.json + +# AI +.claude/settings.local.json \ No newline at end of file diff --git a/CLAUDE.md b/CLAUDE.md index a395f985bfa..c99e5f74d26 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -6,68 +6,122 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co ## Overview -Trilium Notes is a hierarchical note-taking application with advanced features like synchronization, scripting, and rich text editing. It's built as a TypeScript monorepo using pnpm, with multiple applications and shared packages. +Trilium Notes is a hierarchical note-taking application with synchronization, scripting, and rich text editing. TypeScript monorepo using pnpm with multiple apps and shared packages. ## Development Commands -### Setup -- `pnpm install` - Install all dependencies -- `corepack enable` - Enable pnpm if not available - -### Running Applications -- `pnpm run server:start` - Start development server (http://localhost:8080) -- `pnpm run server:start-prod` - Run server in production mode - -### Building -- `pnpm run client:build` - Build client application -- `pnpm run server:build` - Build server application -- `pnpm run electron:build` - Build desktop application - -### Testing -- `pnpm test:all` - Run all tests (parallel + sequential) -- `pnpm test:parallel` - Run tests that can run in parallel -- `pnpm test:sequential` - Run tests that must run sequentially (server, ckeditor5-mermaid, ckeditor5-math) -- `pnpm coverage` - Generate coverage reports - -## Architecture Overview - -### Monorepo Structure -- **apps/**: Runnable applications - - `client/` - Frontend application (shared by server and desktop) - - `server/` - Node.js server with web interface - - `desktop/` - Electron desktop application - - `web-clipper/` - Browser extension for saving web content - - Additional tools: `db-compare`, `dump-db`, `edit-docs` - -- **packages/**: Shared libraries - - `commons/` - Shared interfaces and utilities - - `ckeditor5/` - Custom rich text editor with Trilium-specific plugins - - `codemirror/` - Code editor customizations - - `highlightjs/` - Syntax highlighting - - Custom CKEditor plugins: `ckeditor5-admonition`, `ckeditor5-footnotes`, `ckeditor5-math`, `ckeditor5-mermaid` - -### Core Architecture Patterns - -#### Three-Layer Cache System -- **Becca** (Backend Cache): Server-side entity cache (`apps/server/src/becca/`) -- **Froca** (Frontend Cache): Client-side mirror of backend data (`apps/client/src/services/froca.ts`) -- **Shaca** (Share Cache): Optimized cache for shared/published notes (`apps/server/src/share/`) - -#### Entity System -Core entities are defined in `apps/server/src/becca/entities/`: -- `BNote` - Notes with content and metadata -- `BBranch` - Hierarchical relationships between notes (allows multiple parents) -- `BAttribute` - Key-value metadata attached to notes -- `BRevision` - Note version history -- `BOption` - Application configuration - -#### Widget-Based UI -Frontend uses a widget system (`apps/client/src/widgets/`): -- `BasicWidget` - Base class for all UI components -- `NoteContextAwareWidget` - Widgets that respond to note changes -- `RightPanelWidget` - Widgets displayed in the right panel +```bash +# Setup +corepack enable && pnpm install + +# Run +pnpm server:start # Dev server at http://localhost:8080 +pnpm desktop:start # Electron dev app +pnpm standalone:start # Standalone client dev + +# Build +pnpm client:build # Frontend +pnpm server:build # Backend +pnpm desktop:build # Electron + +# Test +pnpm test:all # All tests (parallel + sequential) +pnpm test:parallel # Client + most package tests +pnpm test:sequential # Server, ckeditor5-mermaid, ckeditor5-math (shared DB) +pnpm --filter server test # Single package tests +pnpm coverage # Coverage reports + +# Lint & Format +pnpm dev:linter-check # ESLint check +pnpm dev:linter-fix # ESLint fix +pnpm dev:format-check # Format check (stricter stylistic rules) +pnpm dev:format-fix # Format fix +pnpm typecheck # TypeScript type check across all projects +``` + +**Running a single test file**: `pnpm --filter server test spec/etapi/search.spec.ts` + +## Main Applications + +The four main apps share `packages/trilium-core/` for business logic but differ in runtime: + +- **client** (`apps/client/`): Preact frontend with jQuery widget system. Shared UI layer used by both server and desktop. +- **server** (`apps/server/`): Node.js backend (Express, better-sqlite3). Serves the client and provides REST/WebSocket APIs. +- **desktop** (`apps/desktop/`): Electron wrapper around server + client, running both in a single process. +- **standalone** (`apps/client-standalone/` + `apps/standalone-desktop/`): Runs the entire stack in the browser — server logic compiled to WASM via sql.js, executed in a service worker. No Node.js dependency at runtime. + +## Monorepo Structure + +``` +apps/ + client/ # Preact frontend (shared by server, desktop, standalone) + server/ # Node.js backend (Express, better-sqlite3) + desktop/ # Electron (bundles server + client) + client-standalone/ # Standalone client (WASM + service workers, no Node.js) + standalone-desktop/ # Standalone desktop variant + web-clipper/ # Browser extension + website/ # Project website + db-compare/, dump-db/, edit-docs/, build-docs/, icon-pack-builder/ + +packages/ + trilium-core/ # Core business logic: entities, services, SQL, sync + commons/ # Shared interfaces and utilities + trilium-e2e/ # Shared Playwright E2E tests + ckeditor5/ # Custom rich text editor bundle + codemirror/ # Code editor integration + highlightjs/ # Syntax highlighting + share-theme/ # Theme for shared/published notes + ckeditor5-admonition/, ckeditor5-footnotes/, ckeditor5-math/, ckeditor5-mermaid/ + ckeditor5-keyboard-marker/, express-partial-content/, pdfjs-viewer/, splitjs/ + turndown-plugin-gfm/ +``` + +Use `pnpm --filter ` to run commands in specific packages. + +## Core Architecture + +### Three-Layer Cache System + +All data access goes through cache layers — never bypass with direct DB queries: + +- **Becca** (`packages/trilium-core/src/becca/`): Server-side entity cache. Access via `becca.notes[noteId]`. +- **Froca** (`apps/client/src/services/froca.ts`): Client-side mirror synced via WebSocket. Access via `froca.getNote()`. +- **Shaca** (`apps/server/src/share/`): Optimized cache for shared/published notes. + +**Critical**: Always use cache methods, not direct DB writes. Cache methods create `EntityChange` records needed for synchronization. + +### Entity System + +Core entities live in `packages/trilium-core/src/becca/entities/` (not `apps/server/`): + +- `BNote` — Notes with content and metadata +- `BBranch` — Multi-parent tree relationships (cloning supported) +- `BAttribute` — Key-value metadata (labels and relations) +- `BRevision` — Version history +- `BOption` — Application configuration +- `BBlob` — Binary content storage + +Entities extend `AbstractBeccaEntity` with built-in change tracking, hash generation, and date management. + +### Entity Change & Sync Protocol + +Every entity modification creates an `EntityChange` record driving sync: +1. Login with HMAC authentication (document secret + timestamp) +2. Push changes → Pull changes → Push again (conflict resolution) +3. Content hash verification with retry loop + +Sync services: `packages/trilium-core/src/services/sync.ts`, `syncMutexService`, `syncUpdateService`. + +### Widget-Based UI + +Frontend widgets in `apps/client/src/widgets/`: +- `BasicWidget` / `TypedBasicWidget` — Base classes (jQuery `this.$widget` for DOM) +- `NoteContextAwareWidget` — Responds to note changes +- `RightPanelWidget` — Sidebar widgets with position ordering - Type-specific widgets in `type_widgets/` directory +**Widget lifecycle**: `doRenderBody()` for initial render, `refreshWithNote()` for note changes, `entitiesReloadedEvent({loadResults})` for entity updates. Uses jQuery — don't mix React patterns. + #### Reusable Preact Components Common UI components are available in `apps/client/src/widgets/react/` — prefer reusing these over creating custom implementations: - `NoItems` - Empty state placeholder with icon and message (use for "no results", "too many items", error states) @@ -77,42 +131,48 @@ Common UI components are available in `apps/client/src/widgets/react/` — prefe - `Checkbox`, `RadioButton` - Form controls - `CollapsibleSection` - Expandable content sections -#### API Architecture -- **Internal API**: REST endpoints in `apps/server/src/routes/api/` -- **ETAPI**: External API for third-party integrations (`apps/server/src/etapi/`) -- **WebSocket**: Real-time synchronization (`apps/server/src/services/ws.ts`) +Fluent builder pattern: `.child()`, `.class()`, `.css()` chaining with position-based ordering. + +### API Architecture + +- **Internal API** (`apps/server/src/routes/api/`): REST endpoints, trusts frontend +- **ETAPI** (`apps/server/src/etapi/`): External API with basic auth tokens — maintain backwards compatibility +- **WebSocket** (`apps/server/src/services/ws.ts`): Real-time sync + +### Platform Abstraction -### Key Files for Understanding Architecture +`packages/trilium-core/src/services/platform.ts` defines `PlatformProvider` interface with implementations in `apps/desktop/`, `apps/server/`, and `apps/client-standalone/`. Singleton via `initPlatform()`/`getPlatform()`. -1. **Application Entry Points**: - - `apps/server/src/main.ts` - Server startup - - `apps/client/src/desktop.ts` - Client initialization +**PlatformProvider** provides: +- `crash(message)` — Platform-specific fatal error handling +- `getEnv(key)` — Environment variable access (server/desktop use `process.env`, standalone maps URL query params like `?safeMode` → `TRILIUM_SAFE_MODE`) +- `isElectron`, `isMac`, `isWindows` — Platform detection flags -2. **Core Services**: - - `apps/server/src/becca/becca.ts` - Backend data management - - `apps/client/src/services/froca.ts` - Frontend data synchronization - - `apps/server/src/services/backend_script_api.ts` - Scripting API +**Critical rules for `trilium-core`**: +- **No `process.env` in core** — use `getPlatform().getEnv()` instead (not available in standalone/browser) +- **No `import path from "path"` in core** — Node's `path` module is externalized in browser builds. Use `packages/trilium-core/src/services/utils/path.ts` for `extname()`/`basename()` equivalents +- **No Node.js built-in modules in core** — core runs in both Node.js and the browser (standalone). Use platform-agnostic alternatives or platform providers +- **Platform detection via functions** — `isElectron()`, `isMac()`, `isWindows()` from `utils/index.ts` are functions (not constants) that call `getPlatform()`. They can only be called after `initializeCore()`, not at module top-level. If used in static definitions, wrap in a closure: `value: () => isWindows() ? "0.9" : "1.0"` +- **Barrel import caution** — `import { x } from "@triliumnext/core"` loads ALL core exports. Early-loading modules like `config.ts` should import specific subpaths (e.g. `@triliumnext/core/src/services/utils/index`) to avoid circular dependencies or initialization ordering issues +- **Electron IPC** — In desktop mode, client API calls use Electron IPC (not HTTP). The IPC handler in `apps/server/src/routes/electron.ts` must be registered via `utils.isElectron` from the **server's** utils (which correctly checks `process.versions["electron"]`), not from core's utils -3. **Database Schema**: - - `apps/server/src/assets/db/schema.sql` - Core database structure +### Binary Utilities -4. **Configuration**: - - `package.json` - Project dependencies and scripts +Use utilities from `packages/trilium-core/src/services/utils/binary.ts` for string/buffer conversions instead of manual `TextEncoder`/`TextDecoder` or `Buffer.from()` calls: -## Note Types and Features +- **`wrapStringOrBuffer(input)`** — Converts `string` to `Uint8Array`, returns `Uint8Array` unchanged. Use when a function expects `Uint8Array` but receives `string | Uint8Array`. +- **`unwrapStringOrBuffer(input)`** — Converts `Uint8Array` to `string`, returns `string` unchanged. Use when a function expects `string` but receives `string | Uint8Array`. +- **`encodeBase64(input)`** / **`decodeBase64(input)`** — Base64 encoding/decoding that works in both Node.js and browser. +- **`encodeUtf8(string)`** / **`decodeUtf8(buffer)`** — UTF-8 encoding/decoding. -Trilium supports multiple note types, each with specialized widgets: -- **Text**: Rich text with CKEditor5 (markdown import/export) -- **Code**: Syntax-highlighted code editing with CodeMirror -- **File**: Binary file attachments -- **Image**: Image display with editing capabilities -- **Canvas**: Drawing/diagramming with Excalidraw -- **Mermaid**: Diagram generation -- **Relation Map**: Visual note relationship mapping -- **Web View**: Embedded web pages -- **Doc/Book**: Hierarchical documentation structure +Import via `import { binary_utils } from "@triliumnext/core"` or directly from the module. -## Development Guidelines +### Database + +SQLite via `better-sqlite3`. SQL abstraction in `packages/trilium-core/src/services/sql/` with `DatabaseProvider` interface, prepared statement caching, and transaction support. + +- Schema: `apps/server/src/assets/db/schema.sql` +- Migrations: `apps/server/src/migrations/YYMMDD_HHMM__description.sql` ### Testing Strategy - Server tests run sequentially due to shared database @@ -122,12 +182,6 @@ Trilium supports multiple note types, each with specialized widgets: - **Write concise tests**: Group related assertions together in a single test case rather than creating many one-shot tests - **Extract and test business logic**: When adding pure business logic (e.g., data transformations, migrations, validations), extract it as a separate function and always write unit tests for it -### Scripting System -Trilium provides powerful user scripting capabilities: -- Frontend scripts run in browser context -- Backend scripts run in Node.js context with full API access -- Script API documentation available in `docs/Script API/` - ### Internationalization - Translation files in `apps/client/src/translations/` - Supported languages: English, German, Spanish, French, Romanian, Chinese @@ -147,12 +201,14 @@ Trilium provides powerful user scripting capabilities: - IPC communication: use `electron.ipcMain.on(channel, handler)` on server side, `electron.ipcRenderer.send(channel, data)` on client side - Electron-only features should check `isElectron()` from `apps/client/src/services/utils.ts` (client) or `utils.isElectron` (server) -### Security Considerations -- Per-note encryption with granular protected sessions -- CSRF protection for API endpoints -- OpenID and TOTP authentication support -- Sanitization of user-generated content +Three inheritance mechanisms: +1. **Standard**: `note.getInheritableAttributes()` walks parent tree +2. **Child prefix**: `child:label` on parent copies to children +3. **Template relation**: `#template=noteNoteId` includes template's inheritable attributes +### Attribute Inheritance + +Use `note.getOwnedAttribute()` for direct, `note.getAttribute()` for inherited. ### Client-Side API Restrictions - **Do not use `crypto.randomUUID()`** or other Web Crypto APIs that require secure contexts - Trilium can run over HTTP, not just HTTPS - Use `randomString()` from `apps/client/src/services/utils.ts` for generating IDs instead @@ -173,20 +229,43 @@ Trilium provides powerful user scripting capabilities: - Import shared types directly from `@triliumnext/commons` - do not re-export them from app-specific modules - Keep app-specific types (e.g., `LlmProvider` for server, `StreamCallbacks` for client) in their respective apps -## Common Development Tasks +## Important Patterns + +- **Protected notes**: Check `note.isContentAvailable()` before accessing content; use `note.getTitleOrProtected()` for safe title access +- **Long operations**: Use `TaskContext` for progress reporting via WebSocket +- **Event system** (`packages/trilium-core/src/services/events.ts`): Events emitted in order (notes → branches → attributes) during load for referential integrity +- **Search**: Expression-based, scoring happens in-memory — cannot add SQL-level LIMIT/OFFSET without losing scoring +- **Widget cleanup**: Unsubscribe from events in `cleanup()`/`doDestroy()` to prevent memory leaks + +## Code Style + +- 4-space indentation, semicolons always required +- Double quotes (enforced by format config) +- Max line length: 100 characters +- Unix line endings +- Import sorting via `eslint-plugin-simple-import-sort` + +## Testing + +- **Server tests** (`apps/server/spec/`): Vitest, must run sequentially (shared DB), forks pool, max 6 workers +- **Client tests** (`apps/client/src/`): Vitest with happy-dom environment, can run in parallel +- **E2E tests** (`packages/trilium-e2e/`): Shared Playwright tests, run via `pnpm --filter server e2e` or `pnpm --filter client-standalone e2e` +- **ETAPI tests** (`apps/server/spec/etapi/`): External API contract tests + +## Documentation -### Adding New Note Types -1. Create widget in `apps/client/src/widgets/type_widgets/` -2. Register in `apps/client/src/services/note_types.ts` -3. Add backend handling in `apps/server/src/services/notes.ts` +- `docs/Script API/` — Auto-generated, never edit directly +- `docs/User Guide/` — Edit via `pnpm edit-docs:edit-docs`, not manually +- `docs/Developer Guide/` and `docs/Release Notes/` — Safe for direct Markdown editing -### Extending Search -- Search expressions handled in `apps/server/src/services/search/` -- Add new search operators in search context files +## Key Entry Points -### Custom CKEditor Plugins -- Create new package in `packages/` following existing plugin structure -- Register in `packages/ckeditor5/src/plugins.ts` +- `apps/server/src/main.ts` — Server startup +- `apps/client/src/desktop.ts` — Client initialization +- `packages/trilium-core/src/becca/becca.ts` — Backend data management +- `apps/client/src/services/froca.ts` — Frontend cache +- `apps/server/src/routes/routes.ts` — API route registration +- `packages/trilium-core/src/services/sql/sql.ts` — Database abstraction ### Adding Hidden System Notes The hidden subtree (`_hidden`) contains system notes with predictable IDs (prefixed with `_`). Defined in `apps/server/src/services/hidden_subtree.ts` via the `HiddenSubtreeItem` interface from `@triliumnext/commons`. @@ -238,4 +317,4 @@ Tools are defined using `defineTools()` in `apps/server/src/services/llm/tools/` - Vite for fast development builds - ESBuild for production optimization - pnpm workspaces for dependency management -- Docker support with multi-stage builds \ No newline at end of file +- Docker support with multi-stage builds diff --git a/apps/build-docs/package.json b/apps/build-docs/package.json index cae84da5105..dfaf54bd4c3 100644 --- a/apps/build-docs/package.json +++ b/apps/build-docs/package.json @@ -15,6 +15,10 @@ "author": "Elian Doran ", "license": "AGPL-3.0-only", "packageManager": "pnpm@10.33.0", + "dependencies": { + "@triliumnext/core": "workspace:*", + "@triliumnext/server": "workspace:*" + }, "devDependencies": { "@redocly/cli": "2.28.0", "archiver": "7.0.1", diff --git a/apps/build-docs/src/backend_script_entrypoint.ts b/apps/build-docs/src/backend_script_entrypoint.ts index 0447900b6b5..7b00ed752b9 100644 --- a/apps/build-docs/src/backend_script_entrypoint.ts +++ b/apps/build-docs/src/backend_script_entrypoint.ts @@ -14,21 +14,18 @@ */ export type { - default as AbstractBeccaEntity -} from "../../server/src/becca/entities/abstract_becca_entity.js"; -export type { - default as BAttachment -} from "../../server/src/becca/entities/battachment.js"; -export type { default as BAttribute } from "../../server/src/becca/entities/battribute.js"; -export type { default as BBranch } from "../../server/src/becca/entities/bbranch.js"; -export type { default as BEtapiToken } from "../../server/src/becca/entities/betapi_token.js"; -export type { BNote }; -export type { default as BOption } from "../../server/src/becca/entities/boption.js"; -export type { default as BRecentNote } from "../../server/src/becca/entities/brecent_note.js"; -export type { default as BRevision } from "../../server/src/becca/entities/brevision.js"; + AbstractBeccaEntity, + BAttachment, + BAttribute, + BBranch, + BEtapiToken, + BNote, + BOption, + BRecentNote, + BRevision +} from "@triliumnext/core"; -import BNote from "../../server/src/becca/entities/bnote.js"; -import BackendScriptApi, { type Api } from "../../server/src/services/backend_script_api.js"; +import { BNote, BackendScriptApi, type BackendScriptApiInterface as Api } from "@triliumnext/core"; export type { Api }; diff --git a/apps/build-docs/src/build-docs.ts b/apps/build-docs/src/build-docs.ts index 357ecb1d414..f5c97fb36c7 100644 --- a/apps/build-docs/src/build-docs.ts +++ b/apps/build-docs/src/build-docs.ts @@ -5,10 +5,43 @@ if (!process.env.TRILIUM_RESOURCE_DIR) { } process.env.NODE_ENV = "development"; -import cls from "@triliumnext/server/src/services/cls.js"; +import { BackupService, getContext, initializeCore, type ImageProvider } from "@triliumnext/core"; +import ClsHookedExecutionContext from "@triliumnext/server/src/cls_provider.js"; +import NodejsCryptoProvider from "@triliumnext/server/src/crypto_provider.js"; +import ServerPlatformProvider from "@triliumnext/server/src/platform_provider.js"; +import BetterSqlite3Provider from "@triliumnext/server/src/sql_provider.js"; +import NodejsZipProvider from "@triliumnext/server/src/zip_provider.js"; + +// Stub backup service for build-docs (not used, but required by initializeCore) +class StubBackupService extends BackupService { + constructor() { + super({ + getOption: () => "", + getOptionBool: () => false, + setOption: () => {} + }); + } + async backupNow(_name: string): Promise { + throw new Error("Backup not supported in build-docs"); + } + async getExistingBackups() { + return []; + } + async getBackupContent(_filePath: string): Promise { + return null; + } +} + +// Stub image provider for build-docs (not used, but required by initializeCore) +const stubImageProvider: ImageProvider = { + getImageType: () => null, + processImage: async () => { + throw new Error("Image processing not supported in build-docs"); + } +}; import archiver from "archiver"; import { execSync } from "child_process"; -import { WriteStream } from "fs"; +import { readFileSync } from "fs"; import * as fs from "fs/promises"; import * as fsExtra from "fs-extra"; import yaml from "js-yaml"; @@ -16,6 +49,37 @@ import { dirname, join, resolve } from "path"; import BuildContext from "./context.js"; +let initialized = false; + +async function initializeBuildEnvironment() { + if (initialized) return; + initialized = true; + + const dbProvider = new BetterSqlite3Provider(); + dbProvider.loadFromMemory(); + + const { serverZipExportProviderFactory } = await import("@triliumnext/server/src/services/export/zip/factory.js"); + + await initializeCore({ + dbConfig: { + provider: dbProvider, + isReadOnly: false, + onTransactionCommit: () => {}, + onTransactionRollback: () => {} + }, + crypto: new NodejsCryptoProvider(), + zip: new NodejsZipProvider(), + zipExportProviderFactory: serverZipExportProviderFactory, + executionContext: new ClsHookedExecutionContext(), + platform: new ServerPlatformProvider(), + schema: readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8"), + translations: (await import("@triliumnext/server/src/services/i18n.js")).initializeTranslations, + getDemoArchive: async () => null, + backup: new StubBackupService(), + image: stubImageProvider + }); +} + interface NoteMapping { rootNoteId: string; path: string; @@ -72,9 +136,8 @@ async function exportDocs( ) { const zipFilePath = `output-${noteId}.zip`; try { - const { exportToZipFile } = (await import("@triliumnext/server/src/services/export/zip.js")) - .default; - await exportToZipFile(noteId, format, zipFilePath, {}); + const { zipExportService } = await import("@triliumnext/core"); + await zipExportService.exportToZipFile(noteId, format, zipFilePath, {}); const ignoredSet = ignoredFiles ? new Set(ignoredFiles) : undefined; await extractZip(zipFilePath, outputPath, ignoredSet); @@ -92,18 +155,12 @@ async function importAndExportDocs(sourcePath: string, outputSubDir: string) { const zipName = outputSubDir || "user-guide"; const zipFilePath = `output-${zipName}.zip`; try { - const { exportToZip } = (await import("@triliumnext/server/src/services/export/zip.js")) - .default; + const { zipExportService, TaskContext } = await import("@triliumnext/core"); + const { waitForStreamToFinish } = await import("@triliumnext/server/src/services/utils.js"); const branch = note.getParentBranches()[0]; - const taskContext = new (await import("@triliumnext/server/src/services/task_context.js")) - .default( - "no-progress-reporting", - "export", - null - ); + const taskContext = new TaskContext("no-progress-reporting", "export", null); const fileOutputStream = fsExtra.createWriteStream(zipFilePath); - await exportToZip(taskContext, branch, "share", fileOutputStream); - const { waitForStreamToFinish } = await import("@triliumnext/server/src/services/utils.js"); + await zipExportService.exportToZip(taskContext, branch, "share", fileOutputStream); await waitForStreamToFinish(fileOutputStream); // Output to root directory if outputSubDir is empty, otherwise to subdirectory @@ -117,15 +174,11 @@ async function importAndExportDocs(sourcePath: string, outputSubDir: string) { } async function buildDocsInner(config?: Config) { - const i18n = await import("@triliumnext/server/src/services/i18n.js"); - await i18n.initializeTranslations(); - - const sqlInit = (await import("../../server/src/services/sql_init.js")).default; - await sqlInit.createInitialDatabase(true); + const { sql_init, becca_loader } = await import("@triliumnext/core"); + await sql_init.createInitialDatabase(true); // Wait for becca to be loaded before importing data - const beccaLoader = await import("../../server/src/becca/becca_loader.js"); - await beccaLoader.beccaLoaded; + await becca_loader.beccaLoaded; if (config) { // Config-based build (reads from edit-docs-config.yaml) @@ -176,16 +229,14 @@ async function buildDocsInner(config?: Config) { export async function importData(path: string) { const buffer = await createImportZip(path); - const importService = (await import("../../server/src/services/import/zip.js")).default; - const TaskContext = (await import("../../server/src/services/task_context.js")).default; + const { zipImportService, TaskContext, becca } = await import("@triliumnext/core"); const context = new TaskContext("no-progress-reporting", "importNotes", null); - const becca = (await import("../../server/src/becca/becca.js")).default; const rootNote = becca.getRoot(); if (!rootNote) { throw new Error("Missing root note for import."); } - return await importService.importZip(context, buffer, rootNote, { + return await zipImportService.importZip(context, buffer, rootNote, { preserveIds: true }); } @@ -218,20 +269,16 @@ export async function extractZip( outputPath: string, ignoredFiles?: Set ) { - const { readZipFile, readContent } = (await import( - "@triliumnext/server/src/services/import/zip.js" - )); - await readZipFile(await fs.readFile(zipFilePath), async (zip, entry) => { + const { getZipProvider } = await import("@triliumnext/core"); + await getZipProvider().readZipFile(await fs.readFile(zipFilePath), async (entry, readContent) => { // We ignore directories since they can appear out of order anyway. if (!entry.fileName.endsWith("/") && !ignoredFiles?.has(entry.fileName)) { const destPath = join(outputPath, entry.fileName); - const fileContent = await readContent(zip, entry); + const fileContent = await readContent(); await fsExtra.mkdirs(dirname(destPath)); await fs.writeFile(destPath, fileContent); } - - zip.readEntry(); }); } @@ -246,9 +293,12 @@ export async function buildDocsFromConfig(configPath?: string, gitRootDir?: stri }); } + // Initialize the build environment before using cls + await initializeBuildEnvironment(); + // Trigger the actual build. await new Promise((res, rej) => { - cls.init(() => { + getContext().init(() => { buildDocsInner(config ?? undefined) .catch(rej) .then(res); @@ -263,9 +313,12 @@ export default async function buildDocs({ gitRootDir }: BuildContext) { cwd: gitRootDir }); + // Initialize the build environment before using cls + await initializeBuildEnvironment(); + // Trigger the actual build. await new Promise((res, rej) => { - cls.init(() => { + getContext().init(() => { buildDocsInner() .catch(rej) .then(res); diff --git a/apps/build-docs/src/main.ts b/apps/build-docs/src/main.ts index cca17125d83..4ed69962b88 100644 --- a/apps/build-docs/src/main.ts +++ b/apps/build-docs/src/main.ts @@ -28,4 +28,13 @@ async function main() { cpSync(join(context.baseDir, "user-guide/404.html"), join(context.baseDir, "404.html")); } -main(); +// Note: forcing process.exit() because importing notes via the core triggers +// fire-and-forget async work in `notes.ts#downloadImages` (a 5s setTimeout that +// re-schedules itself via `asyncPostProcessContent`), which keeps the libuv +// event loop alive forever even after main() completes. +main() + .then(() => process.exit(0)) + .catch((error) => { + console.error("Error building documentation:", error); + process.exit(1); + }); diff --git a/apps/build-docs/tsconfig.app.json b/apps/build-docs/tsconfig.app.json index b9e17115a07..877f0818807 100644 --- a/apps/build-docs/tsconfig.app.json +++ b/apps/build-docs/tsconfig.app.json @@ -23,6 +23,12 @@ "eslint.config.mjs" ], "references": [ + { + "path": "../../packages/commons/tsconfig.lib.json" + }, + { + "path": "../../packages/trilium-core/tsconfig.lib.json" + }, { "path": "../server/tsconfig.app.json" }, diff --git a/apps/build-docs/tsconfig.json b/apps/build-docs/tsconfig.json index 99c9b71b370..75aea1dcc41 100644 --- a/apps/build-docs/tsconfig.json +++ b/apps/build-docs/tsconfig.json @@ -1,8 +1,7 @@ { "extends": "../../tsconfig.base.json", - "include": [ - "scripts/**/*.ts" - ], + "files": [], + "include": [], "references": [ { "path": "../server" diff --git a/apps/client-standalone/.env b/apps/client-standalone/.env new file mode 100644 index 00000000000..18a7bcf954a --- /dev/null +++ b/apps/client-standalone/.env @@ -0,0 +1,4 @@ +# The development license key for premium CKEditor features. +# Note: This key must only be used for the Trilium Notes project. +VITE_CKEDITOR_KEY=eyJhbGciOiJFUzI1NiJ9.eyJleHAiOjE3ODcyNzA0MDAsImp0aSI6IjkyMWE1MWNlLTliNDMtNGRlMC1iOTQwLTc5ZjM2MDBkYjg1NyIsImRpc3RyaWJ1dGlvbkNoYW5uZWwiOiJ0cmlsaXVtIiwiZmVhdHVyZXMiOlsiVFJJTElVTSJdLCJ2YyI6ImU4YzRhMjBkIn0.hny77p-U4-jTkoqbwPytrEar5ylGCWBN7Ez3SlB8i6_mJCBIeCSTOlVQk_JMiOEq3AGykUMHzWXzjdMFwgniOw +VITE_CKEDITOR_ENABLE_INSPECTOR=false \ No newline at end of file diff --git a/apps/client-standalone/.env.production b/apps/client-standalone/.env.production new file mode 100644 index 00000000000..efd1fd5179f --- /dev/null +++ b/apps/client-standalone/.env.production @@ -0,0 +1 @@ +VITE_CKEDITOR_ENABLE_INSPECTOR=false diff --git a/apps/client-standalone/package.json b/apps/client-standalone/package.json new file mode 100644 index 00000000000..b3aa60e2ac9 --- /dev/null +++ b/apps/client-standalone/package.json @@ -0,0 +1,94 @@ +{ + "name": "@triliumnext/client-standalone", + "version": "0.102.2", + "description": "Standalone client for TriliumNext with SQLite WASM backend", + "private": true, + "license": "AGPL-3.0-only", + "scripts": { + "build": "cross-env NODE_OPTIONS=--max-old-space-size=4096 vite build", + "dev": "vite dev", + "test": "vitest", + "start-prod": "pnpm build && pnpm vite preview --port 8888", + "coverage": "vitest --coverage", + "e2e": "playwright test", + "start-prod-no-dir": "pnpm build && pnpm vite preview --host 127.0.0.1" + }, + "dependencies": { + "@excalidraw/excalidraw": "0.18.0", + "@fullcalendar/core": "6.1.20", + "@fullcalendar/daygrid": "6.1.20", + "@fullcalendar/interaction": "6.1.20", + "@fullcalendar/list": "6.1.20", + "@fullcalendar/multimonth": "6.1.20", + "@fullcalendar/timegrid": "6.1.20", + "@maplibre/maplibre-gl-leaflet": "0.1.3", + "@mermaid-js/layout-elk": "0.2.1", + "@mind-elixir/node-menu": "5.0.1", + "@popperjs/core": "2.11.8", + "@preact/signals": "2.9.0", + "@sqlite.org/sqlite-wasm": "3.51.1-build2", + "@triliumnext/ckeditor5": "workspace:*", + "@triliumnext/codemirror": "workspace:*", + "@triliumnext/commons": "workspace:*", + "@triliumnext/core": "workspace:*", + "@triliumnext/highlightjs": "workspace:*", + "@triliumnext/share-theme": "workspace:*", + "@triliumnext/split.js": "workspace:*", + "@zumer/snapdom": "2.8.0", + "autocomplete.js": "0.38.1", + "bootstrap": "5.3.8", + "boxicons": "2.1.4", + "clsx": "2.1.1", + "color": "5.0.3", + "debounce": "3.0.0", + "draggabilly": "3.0.0", + "fflate": "0.8.2", + "force-graph": "1.51.4", + "globals": "17.4.0", + "i18next": "26.0.5", + "i18next-http-backend": "3.0.4", + "aes-js": "3.1.2", + "jquery": "4.0.0", + "jquery.fancytree": "2.38.5", + "js-md5": "0.8.3", + "js-sha1": "0.7.0", + "js-sha256": "0.11.1", + "js-sha512": "0.9.0", + "scrypt-js": "3.0.1", + "jsplumb": "2.15.6", + "katex": "0.16.45", + "knockout": "3.5.1", + "leaflet": "1.9.4", + "leaflet-gpx": "2.2.0", + "mark.js": "8.11.1", + "marked": "18.0.1", + "mermaid": "11.14.0", + "mind-elixir": "5.10.0", + "normalize.css": "8.0.1", + "panzoom": "9.4.4", + "preact": "10.29.1", + "react-i18next": "17.0.4", + "react-window": "2.2.7", + "reveal.js": "6.0.1", + "svg-pan-zoom": "3.6.2", + "tabulator-tables": "6.4.0", + "vanilla-js-wheel-zoom": "9.0.4" + }, + "devDependencies": { + "@types/aes-js": "3.1.4", + "@ckeditor/ckeditor5-inspector": "5.0.0", + "@preact/preset-vite": "2.10.2", + "@types/bootstrap": "5.2.10", + "@types/jquery": "4.0.0", + "@types/leaflet": "1.9.21", + "@types/leaflet-gpx": "1.3.8", + "@types/mark.js": "8.11.12", + "@types/reveal.js": "5.2.2", + "@types/tabulator-tables": "6.3.1", + "copy-webpack-plugin": "14.0.0", + "cross-env": "7.0.3", + "happy-dom": "20.9.0", + "script-loader": "0.7.2", + "vite-plugin-static-copy": "4.0.1" + } +} \ No newline at end of file diff --git a/apps/client-standalone/playwright.config.ts b/apps/client-standalone/playwright.config.ts new file mode 100644 index 00000000000..c93c99df9d4 --- /dev/null +++ b/apps/client-standalone/playwright.config.ts @@ -0,0 +1,20 @@ +import { createBaseConfig } from "../../packages/trilium-e2e/src/base-config"; + +const port = process.env["TRILIUM_PORT"] ?? "8082"; +const baseURL = process.env["BASE_URL"] || `http://127.0.0.1:${port}`; + +export default createBaseConfig({ + appDir: __dirname, + projectName: "standalone", + workers: 1, + webServer: !process.env.TRILIUM_DOCKER ? { + command: `pnpm build && pnpm vite preview --host 127.0.0.1 --port ${port}`, + url: baseURL, + env: { + TRILIUM_INTEGRATION_TEST: "memory" + }, + reuseExistingServer: !process.env.CI, + cwd: __dirname, + timeout: 5 * 60 * 1000 + } : undefined, +}); diff --git a/apps/client-standalone/public/_headers b/apps/client-standalone/public/_headers new file mode 100644 index 00000000000..6e0d001fdaf --- /dev/null +++ b/apps/client-standalone/public/_headers @@ -0,0 +1,3 @@ +/* + Cross-Origin-Opener-Policy: same-origin + Cross-Origin-Embedder-Policy: require-corp \ No newline at end of file diff --git a/apps/client-standalone/public/favicon.ico b/apps/client-standalone/public/favicon.ico new file mode 100644 index 00000000000..398e3854e62 Binary files /dev/null and b/apps/client-standalone/public/favicon.ico differ diff --git a/apps/client-standalone/public/manifest.webmanifest b/apps/client-standalone/public/manifest.webmanifest new file mode 100644 index 00000000000..d1ab2fc21ac --- /dev/null +++ b/apps/client-standalone/public/manifest.webmanifest @@ -0,0 +1,20 @@ +{ + "name": "Trilium Notes", + "short_name": "Trilium", + "description": "Trilium Notes is a hierarchical note taking application with focus on building large personal knowledge bases.", + "theme_color": "#333333", + "background_color": "#1F1F1F", + "display": "standalone", + "scope": "/", + "start_url": "/", + "display_override": [ + "window-controls-overlay" + ], + "icons": [ + { + "src": "assets/icon.png", + "sizes": "512x512", + "type": "image/png" + } + ] +} diff --git a/apps/client-standalone/src/desktop.ts b/apps/client-standalone/src/desktop.ts new file mode 100644 index 00000000000..090b1fb88c0 --- /dev/null +++ b/apps/client-standalone/src/desktop.ts @@ -0,0 +1,2 @@ +// Re-export desktop from client +export * from "../../client/src/desktop"; diff --git a/apps/client-standalone/src/index.html b/apps/client-standalone/src/index.html new file mode 100644 index 00000000000..23b5a1fe346 --- /dev/null +++ b/apps/client-standalone/src/index.html @@ -0,0 +1,31 @@ + + + + + + + + + + Trilium Notes + + + + + +
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/apps/client-standalone/src/lightweight/backup_provider.ts b/apps/client-standalone/src/lightweight/backup_provider.ts new file mode 100644 index 00000000000..7677e88325f --- /dev/null +++ b/apps/client-standalone/src/lightweight/backup_provider.ts @@ -0,0 +1,156 @@ +import type { DatabaseBackup } from "@triliumnext/commons"; +import { BackupOptionsService, BackupService, getSql } from "@triliumnext/core"; + +const BACKUP_DIR_NAME = "backups"; +const BACKUP_FILE_PATTERN = /^backup-.*\.db$/; + +/** + * Standalone backup service using OPFS (Origin Private File System). + * Stores database backups as serialized byte arrays in OPFS. + * Falls back to no-op behavior when OPFS is not available (e.g., in tests). + */ +export default class StandaloneBackupService extends BackupService { + private backupDir: FileSystemDirectoryHandle | null = null; + private opfsAvailable: boolean | null = null; + + constructor(options: BackupOptionsService) { + super(options); + } + + private isOpfsAvailable(): boolean { + if (this.opfsAvailable === null) { + this.opfsAvailable = typeof navigator !== "undefined" + && navigator.storage + && typeof navigator.storage.getDirectory === "function"; + } + return this.opfsAvailable; + } + + private async ensureBackupDirectory(): Promise { + if (!this.isOpfsAvailable()) { + return null; + } + + if (!this.backupDir) { + const root = await navigator.storage.getDirectory(); + this.backupDir = await root.getDirectoryHandle(BACKUP_DIR_NAME, { create: true }); + } + return this.backupDir; + } + + override async backupNow(name: string): Promise { + const fileName = `backup-${name}.db`; + + // Check if OPFS is available + if (!this.isOpfsAvailable()) { + console.warn(`[Backup] OPFS not available, skipping backup: ${fileName}`); + return `/${BACKUP_DIR_NAME}/${fileName}`; + } + + try { + const dir = await this.ensureBackupDirectory(); + if (!dir) { + console.warn(`[Backup] Backup directory not available, skipping: ${fileName}`); + return `/${BACKUP_DIR_NAME}/${fileName}`; + } + + // Serialize the database + const data = getSql().serialize(); + + // Write to OPFS + const fileHandle = await dir.getFileHandle(fileName, { create: true }); + const writable = await fileHandle.createWritable(); + await writable.write(data); + await writable.close(); + + console.log(`[Backup] Created backup: ${fileName} (${data.byteLength} bytes)`); + return `/${BACKUP_DIR_NAME}/${fileName}`; + } catch (error) { + console.error(`[Backup] Failed to create backup ${fileName}:`, error); + // Don't throw - backup failure shouldn't block operations + return `/${BACKUP_DIR_NAME}/${fileName}`; + } + } + + override async getExistingBackups(): Promise { + if (!this.isOpfsAvailable()) { + return []; + } + + try { + const dir = await this.ensureBackupDirectory(); + if (!dir) { + return []; + } + + const backups: DatabaseBackup[] = []; + + for await (const [name, handle] of dir.entries()) { + if (handle.kind !== "file" || !BACKUP_FILE_PATTERN.test(name)) { + continue; + } + + const file = await (handle as FileSystemFileHandle).getFile(); + backups.push({ + fileName: name, + filePath: `/${BACKUP_DIR_NAME}/${name}`, + mtime: new Date(file.lastModified) + }); + } + + // Sort by modification time, newest first + backups.sort((a, b) => b.mtime.getTime() - a.mtime.getTime()); + return backups; + } catch (error) { + console.error("[Backup] Failed to list backups:", error); + return []; + } + } + + /** + * Delete a backup by filename. + */ + async deleteBackup(fileName: string): Promise { + if (!this.isOpfsAvailable()) { + return; + } + + try { + const dir = await this.ensureBackupDirectory(); + if (!dir) { + return; + } + await dir.removeEntry(fileName); + console.log(`[Backup] Deleted backup: ${fileName}`); + } catch (error) { + console.error(`[Backup] Failed to delete backup ${fileName}:`, error); + } + } + + override async getBackupContent(filePath: string): Promise { + if (!this.isOpfsAvailable()) { + return null; + } + + try { + const dir = await this.ensureBackupDirectory(); + if (!dir) { + return null; + } + + // Extract fileName from filePath (e.g., "/backups/backup-now.db" -> "backup-now.db") + const fileName = filePath.split("/").pop(); + if (!fileName || !BACKUP_FILE_PATTERN.test(fileName)) { + return null; + } + + const fileHandle = await dir.getFileHandle(fileName); + const file = await fileHandle.getFile(); + const data = await file.arrayBuffer(); + return new Uint8Array(data); + } catch (error) { + console.error(`[Backup] Failed to get backup content ${filePath}:`, error); + return null; + } + } +} diff --git a/apps/client-standalone/src/lightweight/browser_router.ts b/apps/client-standalone/src/lightweight/browser_router.ts new file mode 100644 index 00000000000..a5bdfea79fa --- /dev/null +++ b/apps/client-standalone/src/lightweight/browser_router.ts @@ -0,0 +1,314 @@ +/** + * Browser-compatible router that mimics Express routing patterns. + * Supports path parameters (e.g., /api/notes/:noteId) and query strings. + */ + +import { getContext, routes } from "@triliumnext/core"; + +export interface UploadedFile { + originalname: string; + mimetype: string; + buffer: Uint8Array; +} + +export interface BrowserRequest { + method: string; + url: string; + path: string; + params: Record; + query: Record; + headers?: Record; + body?: unknown; + file?: UploadedFile; +} + +export interface BrowserResponse { + status: number; + headers: Record; + body: ArrayBuffer | null; +} + +export type RouteHandler = (req: BrowserRequest) => unknown | Promise; + +interface Route { + method: string; + pattern: RegExp; + paramNames: string[]; + handler: RouteHandler; +} + +/** + * Symbol used to mark a result as an already-formatted response, + * so that formatResult passes it through without JSON-serializing. + * Must match the symbol exported from browser_routes.ts. + */ +const RAW_RESPONSE = Symbol.for('RAW_RESPONSE'); + +const encoder = new TextEncoder(); + +/** + * Convert an Express-style path pattern to a RegExp. + * Supports :param syntax for path parameters. + * + * Examples: + * /api/notes/:noteId -> /^\/api\/notes\/([^\/]+)$/ + * /api/notes/:noteId/revisions -> /^\/api\/notes\/([^\/]+)\/revisions$/ + */ +function pathToRegex(path: string): { pattern: RegExp; paramNames: string[] } { + const paramNames: string[] = []; + + // Escape special regex characters except for :param patterns + const regexPattern = path + .replace(/[.*+?^${}()|[\]\\]/g, '\\$&') // Escape special chars + .replace(/:([a-zA-Z_][a-zA-Z0-9_]*)/g, (_, paramName) => { + paramNames.push(paramName); + return '([^/]+)'; + }); + + return { + pattern: new RegExp(`^${regexPattern}$`), + paramNames + }; +} + +/** + * Parse query string into an object. + */ +function parseQuery(search: string): Record { + const query: Record = {}; + if (!search || search === '?') return query; + + const params = new URLSearchParams(search); + for (const [key, value] of params) { + query[key] = value; + } + return query; +} + +/** + * Convert a result to a JSON response. + */ +function jsonResponse(obj: unknown, status = 200, extraHeaders: Record = {}): BrowserResponse { + const parsedObj = routes.convertEntitiesToPojo(obj); + const body = encoder.encode(JSON.stringify(parsedObj)).buffer as ArrayBuffer; + return { + status, + headers: { "content-type": "application/json; charset=utf-8", ...extraHeaders }, + body + }; +} + +/** + * Convert a string to a text response. + */ +function textResponse(text: string, status = 200, extraHeaders: Record = {}): BrowserResponse { + const body = encoder.encode(text).buffer as ArrayBuffer; + return { + status, + headers: { "content-type": "text/plain; charset=utf-8", ...extraHeaders }, + body + }; +} + +/** + * Browser router class that handles route registration and dispatching. + */ +export class BrowserRouter { + private routes: Route[] = []; + + /** + * Register a route handler. + */ + register(method: string, path: string, handler: RouteHandler): void { + const { pattern, paramNames } = pathToRegex(path); + this.routes.push({ + method: method.toUpperCase(), + pattern, + paramNames, + handler + }); + } + + /** + * Convenience methods for common HTTP methods. + */ + get(path: string, handler: RouteHandler): void { + this.register('GET', path, handler); + } + + post(path: string, handler: RouteHandler): void { + this.register('POST', path, handler); + } + + put(path: string, handler: RouteHandler): void { + this.register('PUT', path, handler); + } + + patch(path: string, handler: RouteHandler): void { + this.register('PATCH', path, handler); + } + + delete(path: string, handler: RouteHandler): void { + this.register('DELETE', path, handler); + } + + /** + * Dispatch a request to the appropriate handler. + */ + async dispatch(method: string, urlString: string, body?: unknown, headers?: Record): Promise { + const url = new URL(urlString); + const path = url.pathname; + const query = parseQuery(url.search); + const upperMethod = method.toUpperCase(); + + // Parse body based on content-type + let parsedBody = body; + let uploadedFile: UploadedFile | undefined; + if (body instanceof ArrayBuffer && headers) { + const contentType = headers['content-type'] || headers['Content-Type'] || ''; + if (contentType.includes('application/json')) { + try { + const text = new TextDecoder().decode(body); + if (text.trim()) { + parsedBody = JSON.parse(text); + } + } catch (e) { + console.warn('[Router] Failed to parse JSON body:', e); + parsedBody = body; + } + } else if (contentType.includes('multipart/form-data')) { + try { + // Reconstruct a Response so we can use the native FormData parser + const response = new Response(body, { headers: { 'content-type': contentType } }); + const formData = await response.formData(); + const formFields: Record = {}; + for (const [key, value] of formData.entries()) { + if (typeof value === 'string') { + formFields[key] = value; + } else { + // File field (Blob) — multer uses the field name "upload" + const fileBuffer = new Uint8Array(await value.arrayBuffer()); + uploadedFile = { + originalname: value.name, + mimetype: value.type || 'application/octet-stream', + buffer: fileBuffer + }; + } + } + parsedBody = formFields; + } catch (e) { + console.warn('[Router] Failed to parse multipart body:', e); + } + } + } + // Find matching route + for (const route of this.routes) { + if (route.method !== upperMethod) continue; + + const match = path.match(route.pattern); + if (!match) continue; + + // Extract path parameters + const params: Record = {}; + for (let i = 0; i < route.paramNames.length; i++) { + params[route.paramNames[i]] = decodeURIComponent(match[i + 1]); + } + + const request: BrowserRequest = { + method: upperMethod, + url: urlString, + path, + params, + query, + headers: headers ?? {}, + body: parsedBody, + file: uploadedFile + }; + + try { + const result = await getContext().init(async () => await route.handler(request)); + return this.formatResult(result); + } catch (error) { + return this.formatError(error, `Error handling ${method} ${path}`); + } + } + + // No route matched + return textResponse(`Not found: ${method} ${path}`, 404); + } + + /** + * Format a handler result into a response. + * Follows the same patterns as the server's apiResultHandler. + */ + private formatResult(result: unknown): BrowserResponse { + // Handle raw responses (e.g. from image routes that write directly to res) + if (result && typeof result === 'object' && RAW_RESPONSE in result) { + const raw = result as unknown as { status: number; headers: Record; body: unknown }; + let body: ArrayBuffer | null = null; + + if (raw.body instanceof ArrayBuffer) { + body = raw.body; + } else if (raw.body instanceof Uint8Array) { + body = raw.body.buffer as ArrayBuffer; + } else if (typeof raw.body === 'string') { + body = encoder.encode(raw.body).buffer as ArrayBuffer; + } + + return { + status: raw.status, + headers: raw.headers, + body + }; + } + + // Handle [statusCode, response] format + if (Array.isArray(result) && result.length > 0 && Number.isInteger(result[0])) { + const [statusCode, response] = result; + return jsonResponse(response, statusCode); + } + + // Handle undefined (no content) - 204 should have no body + if (result === undefined) { + return { + status: 204, + headers: {}, + body: null + }; + } + + // Default: JSON response with 200 + return jsonResponse(result, 200); + } + + /** + * Format an error into a response. + */ + private formatError(error: unknown, context: string): BrowserResponse { + console.error('[Router] Handler error:', context, error); + + // Check for known error types + if (error && typeof error === 'object') { + const err = error as { constructor?: { name?: string }; message?: string }; + + if (err.constructor?.name === 'NotFoundError') { + return jsonResponse({ message: err.message || 'Not found' }, 404); + } + + if (err.constructor?.name === 'ValidationError') { + return jsonResponse({ message: err.message || 'Validation error' }, 400); + } + } + + // Generic error + const message = error instanceof Error ? error.message : String(error); + return jsonResponse({ message }, 500); + } +} + +/** + * Create a new router instance. + */ +export function createRouter(): BrowserRouter { + return new BrowserRouter(); +} diff --git a/apps/client-standalone/src/lightweight/browser_routes.ts b/apps/client-standalone/src/lightweight/browser_routes.ts new file mode 100644 index 00000000000..556eaf9ce76 --- /dev/null +++ b/apps/client-standalone/src/lightweight/browser_routes.ts @@ -0,0 +1,340 @@ +/** + * Browser route definitions. + * This integrates with the shared route builder from @triliumnext/core. + */ + +import { BootstrapDefinition } from '@triliumnext/commons'; +import { entity_changes, getContext, getPlatform, getSharedBootstrapItems, getSql, routes, sql_init } from '@triliumnext/core'; + +import packageJson from '../../package.json' with { type: 'json' }; +import { type BrowserRequest, BrowserRouter } from './browser_router'; + +/** Minimal response object used by apiResultHandler to capture the processed result. */ +interface ResultHandlerResponse { + headers: Record; + result: unknown; + setHeader(name: string, value: string): void; +} + +/** + * Symbol used to mark a result as an already-formatted BrowserResponse, + * so that BrowserRouter.formatResult passes it through without JSON-serializing. + * Uses Symbol.for() so the same symbol is shared across modules. + */ +const RAW_RESPONSE = Symbol.for('RAW_RESPONSE'); + +type HttpMethod = 'get' | 'post' | 'put' | 'patch' | 'delete'; + +/** + * Creates an Express-like request object from a BrowserRequest. + */ +function toExpressLikeReq(req: BrowserRequest) { + return { + params: req.params, + query: req.query, + body: req.body, + headers: req.headers ?? {}, + method: req.method, + file: req.file, + get originalUrl() { return req.url; } + }; +} + +/** + * Extracts context headers from the request and sets them in the execution context, + * mirroring what the server does in route_api.ts. + */ +function setContextFromHeaders(req: BrowserRequest) { + const headers = req.headers ?? {}; + const ctx = getContext(); + ctx.set("componentId", headers["trilium-component-id"]); + ctx.set("localNowDateTime", headers["trilium-local-now-datetime"]); + ctx.set("hoistedNoteId", headers["trilium-hoisted-note-id"] || "root"); +} + +/** + * Wraps a core route handler to work with the BrowserRouter. + * Core handlers expect an Express-like request object with params, query, and body. + * Each request is wrapped in an execution context (like cls.init() on the server) + * to ensure entity change tracking works correctly. + */ +function wrapHandler(handler: (req: any) => unknown, transactional: boolean) { + return (req: BrowserRequest) => { + return getContext().init(() => { + setContextFromHeaders(req); + const expressLikeReq = toExpressLikeReq(req); + if (transactional) { + return getSql().transactional(() => handler(expressLikeReq)); + } + return handler(expressLikeReq); + }); + }; +} + +/** + * Creates an apiRoute function compatible with buildSharedApiRoutes. + * This bridges the core's route registration to the BrowserRouter. + */ +function createApiRoute(router: BrowserRouter, transactional: boolean) { + return (method: HttpMethod, path: string, handler: (req: any) => unknown) => { + router.register(method, path, wrapHandler(handler, transactional)); + }; +} + +/** + * Low-level route registration matching the server's `route()` signature: + * route(method, path, middleware[], handler, resultHandler) + * + * In standalone mode: + * - Middleware (e.g. checkApiAuth) is skipped — there's no authentication. + * - The resultHandler is applied to post-process the result (entity conversion, status codes). + */ +function createRoute(router: BrowserRouter) { + return (method: HttpMethod, path: string, _middleware: any[], handler: (req: any, res: any) => unknown, resultHandler?: ((req: any, res: any, result: unknown) => unknown) | null) => { + router.register(method, path, (req: BrowserRequest) => { + return getContext().init(() => { + setContextFromHeaders(req); + const expressLikeReq = toExpressLikeReq(req); + const mockRes = createMockExpressResponse(); + const result = getSql().transactional(() => handler(expressLikeReq, mockRes)); + + // If the handler used the mock response (e.g. image routes that call res.send()), + // return it as a raw response so BrowserRouter doesn't JSON-serialize it. + if (mockRes._used) { + return { + [RAW_RESPONSE]: true as const, + status: mockRes._status, + headers: mockRes._headers, + body: mockRes._body + }; + } + + if (resultHandler) { + // Create a minimal response object that captures what apiResultHandler sets. + const res = createResultHandlerResponse(); + resultHandler(expressLikeReq, res, result); + return res.result; + } + + return result; + }); + }); + }; +} + +/** + * Async variant of createRoute for handlers that return Promises (e.g. import). + * Uses transactionalAsync (manual BEGIN/COMMIT/ROLLBACK) instead of the synchronous + * transactional() wrapper, which would commit an empty transaction immediately when + * passed an async callback. + */ +function createAsyncRoute(router: BrowserRouter) { + return (method: HttpMethod, path: string, _middleware: any[], handler: (req: any, res: any) => Promise, resultHandler?: ((req: any, res: any, result: unknown) => unknown) | null) => { + router.register(method, path, (req: BrowserRequest) => { + return getContext().init(async () => { + setContextFromHeaders(req); + const expressLikeReq = toExpressLikeReq(req); + const mockRes = createMockExpressResponse(); + const result = await getSql().transactionalAsync(() => handler(expressLikeReq, mockRes)); + + // If the handler used the mock response (e.g. image routes that call res.send()), + // return it as a raw response so BrowserRouter doesn't JSON-serialize it. + if (mockRes._used) { + return { + [RAW_RESPONSE]: true as const, + status: mockRes._status, + headers: mockRes._headers, + body: mockRes._body + }; + } + + if (resultHandler) { + // Create a minimal response object that captures what apiResultHandler sets. + const res = createResultHandlerResponse(); + resultHandler(expressLikeReq, res, result); + return res.result; + } + + return result; + }); + }); + }; +} + +/** + * Creates a mock Express response object that captures calls to set(), send(), sendStatus(), etc. + * Used for route handlers (like image routes) that write directly to the response. + */ +function createMockExpressResponse() { + const chunks: string[] = []; + const res = { + _used: false, + _status: 200, + _headers: {} as Record, + _body: null as unknown, + set(name: string, value: string) { + res._headers[name] = value; + return res; + }, + setHeader(name: string, value: string) { + res._headers[name] = value; + return res; + }, + removeHeader(name: string) { + delete res._headers[name]; + return res; + }, + status(code: number) { + res._status = code; + return res; + }, + send(body: unknown) { + res._used = true; + res._body = body; + return res; + }, + sendStatus(code: number) { + res._used = true; + res._status = code; + return res; + }, + write(chunk: string) { + chunks.push(chunk); + return true; + }, + end() { + res._used = true; + res._body = chunks.join(""); + return res; + } + }; + return res; +} + +/** + * Standalone apiResultHandler matching the server's behavior: + * - Converts Becca entities to POJOs + * - Handles [statusCode, response] tuple format + * - Sets trilium-max-entity-change-id (captured in response headers) + */ +function apiResultHandler(_req: any, res: ResultHandlerResponse, result: unknown) { + res.headers["trilium-max-entity-change-id"] = String(entity_changes.getMaxEntityChangeId()); + result = routes.convertEntitiesToPojo(result); + + if (Array.isArray(result) && result.length > 0 && Number.isInteger(result[0])) { + const [_statusCode, response] = result; + res.result = response; + } else if (result === undefined) { + res.result = ""; + } else { + res.result = result; + } +} + +/** + * No-op middleware stubs for standalone mode. + * + * In a browser context there is no network authentication, rate limiting, + * or multi-user access, so all auth/rate-limit middleware is a no-op. + * + * `checkAppNotInitialized` still guards setup routes: if the database is + * already initialised the middleware throws so the route handler is never + * reached (mirrors the server behaviour). + */ +function noopMiddleware() { + // No-op. +} + +function checkAppNotInitialized() { + if (sql_init.isDbInitialized()) { + throw new Error("App already initialized."); + } +} + +/** + * Creates a minimal response-like object for the apiResultHandler. + */ +function createResultHandlerResponse(): ResultHandlerResponse { + return { + headers: {}, + result: undefined, + setHeader(name: string, value: string) { + this.headers[name] = value; + } + }; +} + +/** + * Register all API routes on the browser router using the shared builder. + * + * @param router - The browser router instance + */ +export function registerRoutes(router: BrowserRouter): void { + const apiRoute = createApiRoute(router, true); + routes.buildSharedApiRoutes({ + route: createRoute(router), + asyncRoute: createAsyncRoute(router), + apiRoute, + asyncApiRoute: createApiRoute(router, false), + apiResultHandler, + checkApiAuth: noopMiddleware, + checkApiAuthOrElectron: noopMiddleware, + checkAppNotInitialized, + checkCredentials: noopMiddleware, + loginRateLimiter: noopMiddleware, + uploadMiddlewareWithErrorHandling: noopMiddleware, + csrfMiddleware: noopMiddleware + }); + apiRoute('get', '/bootstrap', bootstrapRoute); + + // Dummy routes for compatibility. + apiRoute("get", "/api/script/widgets", () => []); + apiRoute("get", "/api/script/startup", () => []); + apiRoute("get", "/api/system-checks", () => ({ isCpuArchMismatch: false })); +} + +function bootstrapRoute(): BootstrapDefinition { + const assetPath = "."; + + const isDbInitialized = sql_init.isDbInitialized(); + const commonItems = { + ...getSharedBootstrapItems(assetPath, isDbInitialized), + isDev: import.meta.env.DEV, + isStandalone: true, + isMainWindow: true, + isElectron: false, + hasNativeTitleBar: false, + hasBackgroundEffects: false, + triliumVersion: packageJson.version, + device: false as const, // Let the client detect device type. + appPath: assetPath, + instanceName: "standalone", + TRILIUM_SAFE_MODE: !!getPlatform().getEnv("TRILIUM_SAFE_MODE") + }; + + if (!isDbInitialized) { + return { + ...commonItems, + baseApiUrl: "../api/", + isProtectedSessionAvailable: false, + }; + } + + return { + ...commonItems, + csrfToken: "dummy-csrf-token", + baseApiUrl: "../api/", + headingStyle: "plain", + layoutOrientation: "vertical", + platform: "web", + }; +} + +/** + * Create and configure a router with all routes registered. + */ +export function createConfiguredRouter(): BrowserRouter { + const router = new BrowserRouter(); + registerRoutes(router); + return router; +} diff --git a/apps/client-standalone/src/lightweight/cls_provider.ts b/apps/client-standalone/src/lightweight/cls_provider.ts new file mode 100644 index 00000000000..08d1a24d7b9 --- /dev/null +++ b/apps/client-standalone/src/lightweight/cls_provider.ts @@ -0,0 +1,77 @@ +import { ExecutionContext } from "@triliumnext/core"; + +/** + * Browser execution context implementation. + * + * Handles per-request context isolation with support for fire-and-forget async operations + * using a context stack and grace-period cleanup to allow unawaited promises to complete. + */ +export default class BrowserExecutionContext implements ExecutionContext { + private contextStack: Map[] = []; + private cleanupTimers = new WeakMap, ReturnType>(); + private readonly CLEANUP_GRACE_PERIOD = 1000; // 1 second for fire-and-forget operations + + private getCurrentContext(): Map { + if (this.contextStack.length === 0) { + throw new Error("ExecutionContext not initialized"); + } + return this.contextStack[this.contextStack.length - 1]; + } + + get(key: string): T { + return this.getCurrentContext().get(key); + } + + set(key: string, value: any): void { + this.getCurrentContext().set(key, value); + } + + reset(): void { + this.contextStack = []; + } + + init(callback: () => T): T { + const context = new Map(); + this.contextStack.push(context); + + // Cancel any pending cleanup timer for this context + const existingTimer = this.cleanupTimers.get(context); + if (existingTimer) { + clearTimeout(existingTimer); + this.cleanupTimers.delete(context); + } + + try { + const result = callback(); + + // If the result is a Promise + if (result && typeof result === 'object' && 'then' in result && 'catch' in result) { + const promise = result as unknown as Promise; + return promise.finally(() => { + this.scheduleContextCleanup(context); + }) as T; + } else { + // For synchronous results, schedule delayed cleanup to allow fire-and-forget operations + this.scheduleContextCleanup(context); + return result; + } + } catch (error) { + // Always clean up on error with grace period + this.scheduleContextCleanup(context); + throw error; + } + } + + private scheduleContextCleanup(context: Map): void { + const timer = setTimeout(() => { + // Remove from stack if still present + const index = this.contextStack.indexOf(context); + if (index !== -1) { + this.contextStack.splice(index, 1); + } + this.cleanupTimers.delete(context); + }, this.CLEANUP_GRACE_PERIOD); + + this.cleanupTimers.set(context, timer); + } +} diff --git a/apps/client-standalone/src/lightweight/crypto_provider.ts b/apps/client-standalone/src/lightweight/crypto_provider.ts new file mode 100644 index 00000000000..128031bba22 --- /dev/null +++ b/apps/client-standalone/src/lightweight/crypto_provider.ts @@ -0,0 +1,175 @@ +import type { Cipher, CryptoProvider, ScryptOptions } from "@triliumnext/core"; +import { binary_utils } from "@triliumnext/core"; +import { sha1 } from "js-sha1"; +import { sha256 } from "js-sha256"; +import { sha512 } from "js-sha512"; +import { md5 } from "js-md5"; +import { scrypt } from "scrypt-js"; +import aesjs from "aes-js"; + +const CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; + +/** + * Crypto provider for browser environments using pure JavaScript crypto libraries. + * Uses aes-js for synchronous AES encryption (matching Node.js behavior). + */ +export default class BrowserCryptoProvider implements CryptoProvider { + + createHash(algorithm: "md5" | "sha1" | "sha512", content: string | Uint8Array): Uint8Array { + const data = binary_utils.unwrapStringOrBuffer(content); + + let hexHash: string; + if (algorithm === "md5") { + hexHash = md5(data); + } else if (algorithm === "sha1") { + hexHash = sha1(data); + } else { + hexHash = sha512(data); + } + + // Convert hex string to Uint8Array + const bytes = new Uint8Array(hexHash.length / 2); + for (let i = 0; i < hexHash.length; i += 2) { + bytes[i / 2] = parseInt(hexHash.substr(i, 2), 16); + } + return bytes; + } + + createCipheriv(algorithm: "aes-128-cbc", key: Uint8Array, iv: Uint8Array): Cipher { + return new AesJsCipher(algorithm, key, iv, "encrypt"); + } + + createDecipheriv(algorithm: "aes-128-cbc", key: Uint8Array, iv: Uint8Array): Cipher { + return new AesJsCipher(algorithm, key, iv, "decrypt"); + } + + randomBytes(size: number): Uint8Array { + const bytes = new Uint8Array(size); + crypto.getRandomValues(bytes); + return bytes; + } + + randomString(length: number): string { + const bytes = this.randomBytes(length); + let result = ""; + for (let i = 0; i < length; i++) { + result += CHARS[bytes[i] % CHARS.length]; + } + return result; + } + + hmac(secret: string | Uint8Array, value: string | Uint8Array): string { + const secretStr = binary_utils.unwrapStringOrBuffer(secret); + const valueStr = binary_utils.unwrapStringOrBuffer(value); + // sha256.hmac returns hex, convert to base64 to match Node's behavior + const hexHash = sha256.hmac(secretStr, valueStr); + const bytes = new Uint8Array(hexHash.length / 2); + for (let i = 0; i < hexHash.length; i += 2) { + bytes[i / 2] = parseInt(hexHash.substr(i, 2), 16); + } + return btoa(String.fromCharCode(...bytes)); + } + + async scrypt( + password: Uint8Array | string, + salt: Uint8Array | string, + keyLength: number, + options: ScryptOptions = {} + ): Promise { + const { N = 16384, r = 8, p = 1 } = options; + const passwordBytes = binary_utils.wrapStringOrBuffer(password); + const saltBytes = binary_utils.wrapStringOrBuffer(salt); + + return scrypt(passwordBytes, saltBytes, N, r, p, keyLength); + } + + constantTimeCompare(a: Uint8Array, b: Uint8Array): boolean { + if (a.length !== b.length) { + return false; + } + + let result = 0; + for (let i = 0; i < a.length; i++) { + result |= a[i] ^ b[i]; + } + return result === 0; + } +} + +/** + * A synchronous cipher implementation using aes-js. + * Matches Node.js crypto behavior with update() and final() methods. + */ +class AesJsCipher implements Cipher { + private chunks: Uint8Array[] = []; + private key: Uint8Array; + private iv: Uint8Array; + private mode: "encrypt" | "decrypt"; + private finalized = false; + + constructor( + _algorithm: "aes-128-cbc", + key: Uint8Array, + iv: Uint8Array, + mode: "encrypt" | "decrypt" + ) { + this.key = key; + this.iv = iv; + this.mode = mode; + } + + update(data: Uint8Array): Uint8Array { + if (this.finalized) { + throw new Error("Cipher has already been finalized"); + } + // Buffer the data - we process everything in final() to match streaming behavior + this.chunks.push(data); + // Return empty array since aes-js CBC doesn't support true streaming + return new Uint8Array(0); + } + + final(): Uint8Array { + if (this.finalized) { + throw new Error("Cipher has already been finalized"); + } + this.finalized = true; + + // Concatenate all chunks + const totalLength = this.chunks.reduce((sum, chunk) => sum + chunk.length, 0); + const data = new Uint8Array(totalLength); + let offset = 0; + for (const chunk of this.chunks) { + data.set(chunk, offset); + offset += chunk.length; + } + + if (this.mode === "encrypt") { + // PKCS7 padding for encryption + const blockSize = 16; + const paddingLength = blockSize - (data.length % blockSize); + const paddedData = new Uint8Array(data.length + paddingLength); + paddedData.set(data); + paddedData.fill(paddingLength, data.length); + + const aesCbc = new aesjs.ModeOfOperation.cbc( + Array.from(this.key), + Array.from(this.iv) + ); + return new Uint8Array(aesCbc.encrypt(paddedData)); + } else { + // Decryption + const aesCbc = new aesjs.ModeOfOperation.cbc( + Array.from(this.key), + Array.from(this.iv) + ); + const decrypted = new Uint8Array(aesCbc.decrypt(data)); + + // Remove PKCS7 padding + const paddingLength = decrypted[decrypted.length - 1]; + if (paddingLength > 0 && paddingLength <= 16) { + return decrypted.slice(0, decrypted.length - paddingLength); + } + return decrypted; + } + } +} diff --git a/apps/client-standalone/src/lightweight/log_provider.ts b/apps/client-standalone/src/lightweight/log_provider.ts new file mode 100644 index 00000000000..7eddc54c333 --- /dev/null +++ b/apps/client-standalone/src/lightweight/log_provider.ts @@ -0,0 +1,168 @@ +import { FileBasedLogService, type LogFileInfo } from "@triliumnext/core"; + +const LOG_DIR_NAME = "logs"; +const LOG_FILE_PATTERN = /^trilium-\d{4}-\d{2}-\d{2}\.log$/; +const DEFAULT_RETENTION_DAYS = 7; + +/** + * Standalone log service using OPFS (Origin Private File System). + * Uses synchronous access handles available in service worker context. + */ +export default class StandaloneLogService extends FileBasedLogService { + private logDir: FileSystemDirectoryHandle | null = null; + private currentFile: FileSystemSyncAccessHandle | null = null; + private currentFileName: string = ""; + private textEncoder = new TextEncoder(); + private textDecoder = new TextDecoder(); + + constructor() { + super(); + } + + // ==================== Abstract Method Implementations ==================== + + protected override get eol(): string { + return "\n"; + } + + protected override async ensureLogDirectory(): Promise { + const root = await navigator.storage.getDirectory(); + this.logDir = await root.getDirectoryHandle(LOG_DIR_NAME, { create: true }); + } + + protected override async openLogFile(fileName: string): Promise { + if (!this.logDir) { + await this.ensureLogDirectory(); + } + + // Close existing file if open + if (this.currentFile) { + this.currentFile.close(); + this.currentFile = null; + } + + const fileHandle = await this.logDir!.getFileHandle(fileName, { create: true }); + + // Try to create sync access handle with retry logic for worker restarts + // Previous worker may have left handle open before being terminated + const maxRetries = 3; + const retryDelay = 100; + + for (let attempt = 0; attempt < maxRetries; attempt++) { + try { + this.currentFile = await fileHandle.createSyncAccessHandle(); + break; + } catch (error) { + if (attempt === maxRetries - 1) { + // Last attempt failed - fall back to console-only logging + console.warn("[LogService] Could not open log file, using console-only logging:", error); + this.currentFile = null; + this.currentFileName = ""; + return; + } + // Wait before retrying - previous handle may be released + await new Promise(resolve => setTimeout(resolve, retryDelay * (attempt + 1))); + } + } + + this.currentFileName = fileName; + + // Seek to end for appending + if (this.currentFile) { + const size = this.currentFile.getSize(); + this.currentFile.truncate(size); // No-op, but ensures we're at the right position + } + } + + protected override closeLogFile(): void { + if (this.currentFile) { + this.currentFile.close(); + this.currentFile = null; + this.currentFileName = ""; + } + } + + protected override writeEntry(entry: string): void { + if (!this.currentFile) { + console.log(entry); // Fallback to console if file not ready + return; + } + + const data = this.textEncoder.encode(entry); + const currentSize = this.currentFile.getSize(); + this.currentFile.write(data, { at: currentSize }); + this.currentFile.flush(); + } + + protected override readLogFile(fileName: string): string | null { + if (!this.logDir) { + return null; + } + + try { + // For the current file, we need to read from the sync handle + if (fileName === this.currentFileName && this.currentFile) { + const size = this.currentFile.getSize(); + const buffer = new ArrayBuffer(size); + const view = new DataView(buffer); + this.currentFile.read(view, { at: 0 }); + return this.textDecoder.decode(buffer); + } + + // For other files, we'd need async access - return null for now + // The current file is what's most commonly needed + return null; + } catch { + return null; + } + } + + protected override async listLogFiles(): Promise { + if (!this.logDir) { + return []; + } + + const logFiles: LogFileInfo[] = []; + + for await (const [name, handle] of this.logDir.entries()) { + if (handle.kind !== "file" || !LOG_FILE_PATTERN.test(name)) { + continue; + } + + // OPFS doesn't provide mtime directly, so we parse from filename + const match = name.match(/trilium-(\d{4})-(\d{2})-(\d{2})\.log/); + if (match) { + const mtime = new Date( + parseInt(match[1]), + parseInt(match[2]) - 1, + parseInt(match[3]) + ); + logFiles.push({ name, mtime }); + } + } + + return logFiles; + } + + protected override async deleteLogFile(fileName: string): Promise { + if (!this.logDir) { + return; + } + + // Don't delete the current file + if (fileName === this.currentFileName) { + return; + } + + try { + await this.logDir.removeEntry(fileName); + } catch { + // File might not exist or be locked + } + } + + protected override getRetentionDays(): number { + // Standalone doesn't have config system, use default + return DEFAULT_RETENTION_DAYS; + } +} diff --git a/apps/client-standalone/src/lightweight/messaging_provider.ts b/apps/client-standalone/src/lightweight/messaging_provider.ts new file mode 100644 index 00000000000..8cfc5b3c4a3 --- /dev/null +++ b/apps/client-standalone/src/lightweight/messaging_provider.ts @@ -0,0 +1,120 @@ +import type { WebSocketMessage } from "@triliumnext/commons"; +import type { ClientMessageHandler, MessageHandler,MessagingProvider } from "@triliumnext/core"; + +/** + * Messaging provider for browser Worker environments. + * + * This provider uses the Worker's postMessage API to communicate + * with the main thread. It's designed to be used inside a Web Worker + * that runs the core services. + * + * Message flow: + * - Outbound (worker → main): Uses self.postMessage() with type: "WS_MESSAGE" + * - Inbound (main → worker): Listens to onmessage for type: "WS_MESSAGE" + */ +export default class WorkerMessagingProvider implements MessagingProvider { + private messageHandlers: MessageHandler[] = []; + private clientMessageHandler?: ClientMessageHandler; + private isDisposed = false; + + constructor() { + // Listen for incoming messages from the main thread + self.addEventListener("message", this.handleIncomingMessage); + } + + private handleIncomingMessage = (event: MessageEvent) => { + if (this.isDisposed) return; + + const { type, message } = event.data || {}; + + if (type === "WS_MESSAGE" && message) { + // Dispatch to the client message handler (used by ws.ts for log-error, log-info, ping) + if (this.clientMessageHandler) { + try { + this.clientMessageHandler("main-thread", message); + } catch (e) { + console.error("[WorkerMessagingProvider] Error in client message handler:", e); + } + } + + // Dispatch to all registered handlers + for (const handler of this.messageHandlers) { + try { + handler(message as WebSocketMessage); + } catch (e) { + console.error("[WorkerMessagingProvider] Error in message handler:", e); + } + } + } + }; + + /** + * Send a message to all clients (in this case, the main thread). + * The main thread is responsible for further distribution if needed. + */ + sendMessageToAllClients(message: WebSocketMessage): void { + if (this.isDisposed) { + console.warn("[WorkerMessagingProvider] Cannot send message - provider is disposed"); + return; + } + + try { + self.postMessage({ + type: "WS_MESSAGE", + message + }); + } catch (e) { + console.error("[WorkerMessagingProvider] Error sending message:", e); + } + } + + /** + * Send a message to a specific client. + * In worker context, there's only one client (the main thread), so clientId is ignored. + */ + sendMessageToClient(_clientId: string, message: WebSocketMessage): boolean { + if (this.isDisposed) { + return false; + } + + this.sendMessageToAllClients(message); + return true; + } + + /** + * Register a handler for incoming client messages. + */ + setClientMessageHandler(handler: ClientMessageHandler): void { + this.clientMessageHandler = handler; + } + + /** + * Subscribe to incoming messages from the main thread. + */ + onMessage(handler: MessageHandler): () => void { + this.messageHandlers.push(handler); + + return () => { + this.messageHandlers = this.messageHandlers.filter(h => h !== handler); + }; + } + + /** + * Get the number of connected "clients". + * In worker context, there's always exactly 1 client (the main thread). + */ + getClientCount(): number { + return this.isDisposed ? 0 : 1; + } + + /** + * Clean up resources. + */ + dispose(): void { + if (this.isDisposed) return; + + this.isDisposed = true; + self.removeEventListener("message", this.handleIncomingMessage); + this.messageHandlers = []; + } +} diff --git a/apps/client-standalone/src/lightweight/platform_provider.ts b/apps/client-standalone/src/lightweight/platform_provider.ts new file mode 100644 index 00000000000..9c5e2f11861 --- /dev/null +++ b/apps/client-standalone/src/lightweight/platform_provider.ts @@ -0,0 +1,42 @@ +import type { PlatformProvider } from "@triliumnext/core"; + +// Build-time constant injected by Vite (see `define` in vite.config.mts). +declare const __TRILIUM_INTEGRATION_TEST__: string; + +/** Maps URL query parameter names to TRILIUM_ environment variable names. */ +const QUERY_TO_ENV: Record = { + "safeMode": "TRILIUM_SAFE_MODE", + "startNoteId": "TRILIUM_START_NOTE_ID", +}; + +export default class StandalonePlatformProvider implements PlatformProvider { + readonly isElectron = false; + readonly isMac = false; + readonly isWindows = false; + + private envMap: Record = {}; + + constructor(queryString: string) { + const params = new URLSearchParams(queryString); + for (const [queryKey, envKey] of Object.entries(QUERY_TO_ENV)) { + if (params.has(queryKey)) { + this.envMap[envKey] = params.get(queryKey) || "true"; + } + } + if (__TRILIUM_INTEGRATION_TEST__) { + this.envMap["TRILIUM_INTEGRATION_TEST"] = __TRILIUM_INTEGRATION_TEST__; + } + } + + crash(message: string): void { + console.error("[Standalone] FATAL:", message); + self.postMessage({ + type: "FATAL_ERROR", + message + }); + } + + getEnv(key: string): string | undefined { + return this.envMap[key]; + } +} diff --git a/apps/client-standalone/src/lightweight/request_provider.ts b/apps/client-standalone/src/lightweight/request_provider.ts new file mode 100644 index 00000000000..cdaa3a34e98 --- /dev/null +++ b/apps/client-standalone/src/lightweight/request_provider.ts @@ -0,0 +1,93 @@ +import type { ExecOpts, RequestProvider } from "@triliumnext/core"; + +/** + * Fetch-based implementation of RequestProvider for browser environments. + * + * Uses the Fetch API instead of Node's http/https modules. + * Proxy support is not available in browsers, so the proxy option is ignored. + */ +export default class FetchRequestProvider implements RequestProvider { + + async exec(opts: ExecOpts): Promise { + const paging = opts.paging || { + pageCount: 1, + pageIndex: 0, + requestId: "n/a" + }; + + const headers: Record = { + "Content-Type": paging.pageCount === 1 ? "application/json" : "text/plain", + "pageCount": String(paging.pageCount), + "pageIndex": String(paging.pageIndex), + "requestId": paging.requestId + }; + + // Note: the Cookie header is a forbidden header in fetch — + // the browser manages cookies automatically via credentials: 'include'. + + if (opts.auth?.password) { + headers["trilium-cred"] = btoa(`dummy:${opts.auth.password}`); + } + + let body: string | undefined; + if (opts.body) { + body = typeof opts.body === "object" ? JSON.stringify(opts.body) : opts.body; + } + + const controller = new AbortController(); + const timeoutId = opts.timeout + ? setTimeout(() => controller.abort(), opts.timeout) + : undefined; + + try { + const response = await fetch(opts.url, { + method: opts.method, + headers, + body, + signal: controller.signal, + credentials: "include" + }); + + if ([200, 201, 204].includes(response.status)) { + const text = await response.text(); + return text.trim() ? JSON.parse(text) : null; + } + const text = await response.text(); + let errorMessage: string; + try { + const json = JSON.parse(text); + errorMessage = json?.message || ""; + } catch { + errorMessage = text.substring(0, 100); + } + throw new Error(`${response.status} ${opts.method} ${opts.url}: ${errorMessage}`); + + } catch (e: any) { + if (e.name === "AbortError") { + throw new Error(`${opts.method} ${opts.url} failed, error: timeout after ${opts.timeout}ms`); + } + if (e instanceof TypeError && e.message === "Failed to fetch") { + const isCrossOrigin = !opts.url.startsWith(location.origin); + if (isCrossOrigin) { + throw new Error(`Request to ${opts.url} was blocked. The server may not allow requests from this origin (CORS), or it may be unreachable.`); + } + throw new Error(`Request to ${opts.url} failed. The server may be unreachable.`); + } + throw e; + } finally { + if (timeoutId) { + clearTimeout(timeoutId); + } + } + } + + async getImage(imageUrl: string): Promise { + const response = await fetch(imageUrl); + + if (!response.ok) { + throw new Error(`${response.status} GET ${imageUrl} failed`); + } + + return await response.arrayBuffer(); + } +} diff --git a/apps/client-standalone/src/lightweight/sql_provider.ts b/apps/client-standalone/src/lightweight/sql_provider.ts new file mode 100644 index 00000000000..576784b5ab7 --- /dev/null +++ b/apps/client-standalone/src/lightweight/sql_provider.ts @@ -0,0 +1,742 @@ +import { type BindableValue, type SAHPoolUtil, default as sqlite3InitModule } from "@sqlite.org/sqlite-wasm"; +import type { DatabaseProvider, RunResult, Statement, Transaction } from "@triliumnext/core"; + +// Type definitions for SQLite WASM (the library doesn't export these directly) +type Sqlite3Module = Awaited>; +type Sqlite3Database = InstanceType; +type Sqlite3PreparedStatement = ReturnType; + +/** + * Wraps an SQLite WASM PreparedStatement to match the Statement interface + * expected by trilium-core. + */ +class WasmStatement implements Statement { + private isRawMode = false; + private isPluckMode = false; + private isFinalized = false; + + constructor( + private stmt: Sqlite3PreparedStatement, + private db: Sqlite3Database, + private sqlite3: Sqlite3Module, + private sql: string + ) {} + + run(...params: unknown[]): RunResult { + if (this.isFinalized) { + throw new Error("Cannot call run() on finalized statement"); + } + + this.bindParams(params); + try { + // Use step() and then reset instead of stepFinalize() + // This allows the statement to be reused + this.stmt.step(); + const changes = this.db.changes(); + // Get the last insert row ID using the C API + const lastInsertRowid = this.db.pointer ? this.sqlite3.capi.sqlite3_last_insert_rowid(this.db.pointer) : 0; + this.stmt.reset(); + return { + changes, + lastInsertRowid: typeof lastInsertRowid === "bigint" ? Number(lastInsertRowid) : lastInsertRowid + }; + } catch (e) { + // Reset on error to allow reuse + this.stmt.reset(); + throw e; + } + } + + get(params: unknown): unknown { + if (this.isFinalized) { + throw new Error("Cannot call get() on finalized statement"); + } + + this.bindParams(Array.isArray(params) ? params : params !== undefined ? [params] : []); + try { + if (this.stmt.step()) { + if (this.isPluckMode) { + // In pluck mode, return only the first column value + const row = this.stmt.get([]); + return Array.isArray(row) && row.length > 0 ? row[0] : undefined; + } + return this.isRawMode ? this.stmt.get([]) : this.stmt.get({}); + } + return undefined; + } finally { + this.stmt.reset(); + } + } + + all(...params: unknown[]): unknown[] { + if (this.isFinalized) { + throw new Error("Cannot call all() on finalized statement"); + } + + this.bindParams(params); + const results: unknown[] = []; + try { + while (this.stmt.step()) { + if (this.isPluckMode) { + // In pluck mode, return only the first column value for each row + const row = this.stmt.get([]); + if (Array.isArray(row) && row.length > 0) { + results.push(row[0]); + } + } else { + results.push(this.isRawMode ? this.stmt.get([]) : this.stmt.get({})); + } + } + return results; + } finally { + this.stmt.reset(); + } + } + + iterate(...params: unknown[]): IterableIterator { + if (this.isFinalized) { + throw new Error("Cannot call iterate() on finalized statement"); + } + + this.bindParams(params); + const stmt = this.stmt; + const isRaw = this.isRawMode; + const isPluck = this.isPluckMode; + + return { + [Symbol.iterator]() { + return this; + }, + next(): IteratorResult { + if (stmt.step()) { + if (isPluck) { + const row = stmt.get([]); + const value = Array.isArray(row) && row.length > 0 ? row[0] : undefined; + return { value, done: false }; + } + return { value: isRaw ? stmt.get([]) : stmt.get({}), done: false }; + } + stmt.reset(); + return { value: undefined, done: true }; + } + }; + } + + raw(toggleState?: boolean): this { + // In raw mode, rows are returned as arrays instead of objects + // If toggleState is undefined, enable raw mode (better-sqlite3 behavior) + this.isRawMode = toggleState !== undefined ? toggleState : true; + return this; + } + + pluck(toggleState?: boolean): this { + // In pluck mode, only the first column of each row is returned + // If toggleState is undefined, enable pluck mode (better-sqlite3 behavior) + this.isPluckMode = toggleState !== undefined ? toggleState : true; + return this; + } + + /** + * Detect the prefix used for a parameter name in the SQL query. + * SQLite supports @name, :name, and $name parameter styles. + * Returns the prefix character, or ':' as default if not found. + */ + private detectParamPrefix(paramName: string): string { + // Search for the parameter with each possible prefix + for (const prefix of [':', '@', '$']) { + // Use word boundary to avoid partial matches + const pattern = new RegExp(`\\${prefix}${paramName}(?![a-zA-Z0-9_])`); + if (pattern.test(this.sql)) { + return prefix; + } + } + // Default to ':' if not found (most common in Trilium) + return ':'; + } + + private bindParams(params: unknown[]): void { + this.stmt.clearBindings(); + if (params.length === 0) { + return; + } + + // Handle single object with named parameters + if (params.length === 1 && typeof params[0] === "object" && params[0] !== null && !Array.isArray(params[0])) { + const inputBindings = params[0] as { [paramName: string]: BindableValue }; + + // SQLite WASM expects parameter names to include the prefix (@ : or $) + // We detect the prefix used in the SQL for each parameter + const bindings: { [paramName: string]: BindableValue } = {}; + for (const [key, value] of Object.entries(inputBindings)) { + // If the key already has a prefix, use it as-is + if (key.startsWith('@') || key.startsWith(':') || key.startsWith('$')) { + bindings[key] = value; + } else { + // Detect the prefix used in the SQL and apply it + const prefix = this.detectParamPrefix(key); + bindings[`${prefix}${key}`] = value; + } + } + + this.stmt.bind(bindings); + } else { + // Handle positional parameters - flatten and cast to BindableValue[] + const flatParams = params.flat() as BindableValue[]; + if (flatParams.length > 0) { + this.stmt.bind(flatParams); + } + } + } + + finalize(): void { + if (!this.isFinalized) { + try { + this.stmt.finalize(); + } catch (e) { + console.warn("Error finalizing SQLite statement:", e); + } finally { + this.isFinalized = true; + } + } + } +} + +/** + * SQLite database provider for browser environments using SQLite WASM. + * + * This provider wraps the official @sqlite.org/sqlite-wasm package to provide + * a DatabaseProvider implementation compatible with trilium-core. + * + * @example + * ```typescript + * const provider = new BrowserSqlProvider(); + * await provider.initWasm(); // Initialize SQLite WASM module + * provider.loadFromMemory(); // Open an in-memory database + * // or + * provider.loadFromBuffer(existingDbBuffer); // Load from existing data + * ``` + */ +export default class BrowserSqlProvider implements DatabaseProvider { + private db?: Sqlite3Database; + private sqlite3?: Sqlite3Module; + private _inTransaction = false; + private initPromise?: Promise; + private initError?: Error; + private statementCache: Map = new Map(); + + // OPFS state tracking + private opfsDbPath?: string; + + // SAHPool state tracking + private sahPoolUtil?: SAHPoolUtil; + private sahPoolDbName?: string; + + /** + * Get the SQLite WASM module version info. + * Returns undefined if the module hasn't been initialized yet. + */ + get version(): { libVersion: string; sourceId: string } | undefined { + return this.sqlite3?.version; + } + + /** + * Initialize the SQLite WASM module. + * This must be called before using any database operations. + * Safe to call multiple times - subsequent calls return the same promise. + * + * @returns A promise that resolves when the module is initialized + * @throws Error if initialization fails + */ + async initWasm(): Promise { + // Return existing promise if already initializing/initialized + if (this.initPromise) { + return this.initPromise; + } + + // Fail fast if we already tried and failed + if (this.initError) { + throw this.initError; + } + + this.initPromise = this.doInitWasm(); + return this.initPromise; + } + + private async doInitWasm(): Promise { + try { + console.log("[BrowserSqlProvider] Initializing SQLite WASM..."); + const startTime = performance.now(); + + this.sqlite3 = await sqlite3InitModule({ + print: console.log, + printErr: console.error, + }); + + const initTime = performance.now() - startTime; + console.log( + `[BrowserSqlProvider] SQLite WASM initialized in ${initTime.toFixed(2)}ms:`, + this.sqlite3.version.libVersion + ); + } catch (e) { + this.initError = e instanceof Error ? e : new Error(String(e)); + console.error("[BrowserSqlProvider] SQLite WASM initialization failed:", this.initError); + throw this.initError; + } + } + + /** + * Check if the SQLite WASM module has been initialized. + */ + get isInitialized(): boolean { + return this.sqlite3 !== undefined; + } + + // ==================== SAHPool VFS (preferred OPFS backend) ==================== + + /** + * Install the OPFS SAHPool VFS. This pre-allocates a pool of OPFS + * SyncAccessHandle objects, enabling WAL mode and significantly faster + * writes compared to the legacy OPFS VFS. + * + * Must be called after `initWasm()` and before `loadFromSahPool()`. + * This is async because it acquires OPFS file handles. + * + * Unlike the legacy OPFS VFS, SAHPool does **not** require SharedArrayBuffer + * or COOP/COEP headers — it only needs OPFS itself (a Worker context with + * `navigator.storage.getDirectory`). This makes it usable in Capacitor's + * Android WebView, which doesn't support cross-origin isolation. + * + * @param options.directory - OPFS directory for the pool (default: auto-derived from VFS name) + * @param options.initialCapacity - Minimum number of file slots (default: 6) + * @throws Error if the environment doesn't support OPFS (no Worker, or no OPFS API) + */ + async installSahPool(options: { directory?: string; initialCapacity?: number } = {}): Promise { + this.ensureSqlite3(); + + console.log("[BrowserSqlProvider] Installing OPFS SAHPool VFS..."); + const startTime = performance.now(); + + this.sahPoolUtil = await this.sqlite3!.installOpfsSAHPoolVfs({ + clearOnInit: false, + initialCapacity: options.initialCapacity ?? 6, + directory: options.directory, + }); + + // Ensure enough slots for DB + WAL + journal + temp files + await this.sahPoolUtil.reserveMinimumCapacity(options.initialCapacity ?? 6); + + const initTime = performance.now() - startTime; + console.log( + `[BrowserSqlProvider] SAHPool VFS installed in ${initTime.toFixed(2)}ms ` + + `(capacity: ${this.sahPoolUtil.getCapacity()}, files: ${this.sahPoolUtil.getFileCount()})` + ); + } + + /** + * Whether the SAHPool VFS has been successfully installed. + */ + get isSahPoolInstalled(): boolean { + return this.sahPoolUtil !== undefined; + } + + /** + * Access the SAHPool utility for advanced operations (import/export/migration). + */ + get sahPool(): SAHPoolUtil | undefined { + return this.sahPoolUtil; + } + + /** + * Load or create a database using the SAHPool VFS. + * This is the preferred method for persistent storage — it supports WAL mode + * and is significantly faster than the legacy OPFS VFS. + * + * @param dbName - Virtual filename within the pool (e.g., "/trilium.db"). + * Must start with a slash. + * @throws Error if SAHPool VFS is not installed + */ + loadFromSahPool(dbName: string): void { + this.ensureSqlite3(); + if (!this.sahPoolUtil) { + throw new Error( + "SAHPool VFS not installed. Call installSahPool() first." + ); + } + + console.log(`[BrowserSqlProvider] Loading database from SAHPool: ${dbName}`); + const startTime = performance.now(); + + try { + this.db = new this.sahPoolUtil.OpfsSAHPoolDb(dbName); + this.sahPoolDbName = dbName; + this.opfsDbPath = undefined; + + // SAHPool supports WAL mode — the key advantage over legacy OPFS VFS + this.db.exec("PRAGMA journal_mode = WAL"); + this.db.exec("PRAGMA synchronous = NORMAL"); + + const loadTime = performance.now() - startTime; + console.log(`[BrowserSqlProvider] SAHPool database loaded in ${loadTime.toFixed(2)}ms (WAL mode)`); + } catch (e) { + const error = e instanceof Error ? e : new Error(String(e)); + console.error(`[BrowserSqlProvider] Failed to load SAHPool database: ${error.message}`); + throw error; + } + } + + /** + * Whether the currently open database is using the SAHPool VFS. + */ + get isUsingSahPool(): boolean { + return this.sahPoolDbName !== undefined; + } + + // ==================== Legacy OPFS Support ==================== + + /** + * Check if the legacy OPFS VFS is available. + * This requires: + * - Running in a Worker context + * - Browser support for OPFS APIs + * - COOP/COEP headers sent by the server (for SharedArrayBuffer) + * + * @returns true if legacy OPFS VFS is available for use + */ + isOpfsAvailable(): boolean { + this.ensureSqlite3(); + // SQLite WASM automatically installs the OPFS VFS if the environment supports it + // We can check for its presence via sqlite3_vfs_find or the OpfsDb class + return this.sqlite3!.oo1.OpfsDb !== undefined; + } + + /** + * Load or create a database stored in OPFS for persistent storage. + * + * **Prefer `loadFromSahPool()` over this method** — it supports WAL mode + * and is significantly faster. This method is kept for migration purposes. + * The database will persist across browser sessions. + * + * Requires COOP/COEP headers to be set by the server: + * - Cross-Origin-Opener-Policy: same-origin + * - Cross-Origin-Embedder-Policy: require-corp + * + * @param path - The path for the database file in OPFS (e.g., "/trilium.db") + * Paths without a leading slash are treated as relative to OPFS root. + * Leading directories are created automatically. + * @param options - Additional options + * @throws Error if OPFS VFS is not available + * + * @example + * ```typescript + * const provider = new BrowserSqlProvider(); + * await provider.initWasm(); + * if (provider.isOpfsAvailable()) { + * provider.loadFromOpfs("/my-database.db"); + * } else { + * console.warn("OPFS not available, using in-memory database"); + * provider.loadFromMemory(); + * } + * ``` + */ + loadFromOpfs(path: string, options: { createIfNotExists?: boolean } = {}): void { + this.ensureSqlite3(); + + if (!this.isOpfsAvailable()) { + throw new Error( + "OPFS VFS is not available. This requires:\n" + + "1. Running in a Worker context\n" + + "2. Browser support for OPFS (Chrome 102+, Firefox 111+, Safari 17+)\n" + + "3. COOP/COEP headers from the server:\n" + + " Cross-Origin-Opener-Policy: same-origin\n" + + " Cross-Origin-Embedder-Policy: require-corp" + ); + } + + console.log(`[BrowserSqlProvider] Loading database from OPFS: ${path}`); + const startTime = performance.now(); + + try { + // OpfsDb automatically creates directories in the path + // Mode 'c' = create if not exists + const mode = options.createIfNotExists !== false ? 'c' : ''; + this.db = new this.sqlite3!.oo1.OpfsDb(path, mode); + this.opfsDbPath = path; + this.sahPoolDbName = undefined; + + // Configure the database for legacy OPFS + // Note: WAL mode is not supported by the legacy OPFS VFS + this.db.exec("PRAGMA journal_mode = DELETE"); + this.db.exec("PRAGMA synchronous = NORMAL"); + + const loadTime = performance.now() - startTime; + console.log(`[BrowserSqlProvider] OPFS database loaded in ${loadTime.toFixed(2)}ms`); + } catch (e) { + const error = e instanceof Error ? e : new Error(String(e)); + console.error(`[BrowserSqlProvider] Failed to load OPFS database: ${error.message}`); + throw error; + } + } + + /** + * Check if the currently open database is stored in OPFS (legacy or SAHPool). + */ + get isUsingOpfs(): boolean { + return this.opfsDbPath !== undefined || this.sahPoolDbName !== undefined; + } + + /** + * Get the OPFS path of the currently open database. + * Returns undefined if not using OPFS. + */ + get currentOpfsPath(): string | undefined { + return this.opfsDbPath ?? this.sahPoolDbName; + } + + /** + * Check if the database has been initialized with a schema. + * This is a simple sanity check that looks for the existence of core tables. + * + * @returns true if the database appears to be initialized + */ + isDbInitialized(): boolean { + this.ensureDb(); + + // Check if the 'notes' table exists (a core table that must exist in an initialized DB) + const tableExists = this.db!.selectValue( + "SELECT name FROM sqlite_master WHERE type = 'table' AND name = 'notes'" + ); + + return tableExists !== undefined; + } + + // ==================== End OPFS Support ==================== + + loadFromFile(_path: string, _isReadOnly: boolean): void { + // Browser environment doesn't have direct file system access. + // Use SAHPool or OPFS for persistent storage. + throw new Error( + "loadFromFile is not supported in browser environment. " + + "Use loadFromMemory() for temporary databases, loadFromBuffer() to load from data, " + + "loadFromSahPool() (preferred) or loadFromOpfs() for persistent storage." + ); + } + + /** + * Create an empty in-memory database. + * Data will be lost when the page is closed. + * + * For persistent storage, use loadFromOpfs() instead. + * To load demo data, call initializeDemoDatabase() after this. + */ + loadFromMemory(): void { + this.ensureSqlite3(); + console.log("[BrowserSqlProvider] Creating in-memory database..."); + const startTime = performance.now(); + + this.db = new this.sqlite3!.oo1.DB(":memory:", "c"); + this.opfsDbPath = undefined; + this.sahPoolDbName = undefined; + this.db.exec("PRAGMA journal_mode = WAL"); + + const loadTime = performance.now() - startTime; + console.log(`[BrowserSqlProvider] In-memory database created in ${loadTime.toFixed(2)}ms`); + } + + loadFromBuffer(buffer: Uint8Array): void { + this.ensureSqlite3(); + // SQLite WASM's allocFromTypedArray rejects Node's Buffer (and other + // non-Uint8Array typed arrays) with "expecting 8/16/32/64". Normalize + // to a plain Uint8Array view over the same memory so callers can pass + // anything readFileSync returns. + const view = buffer instanceof Uint8Array && buffer.constructor === Uint8Array + ? buffer + : new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); + const p = this.sqlite3!.wasm.allocFromTypedArray(view); + try { + // Cached statements reference the previous DB and become invalid + // once we swap connections. Drop them so callers re-prepare. + this.clearStatementCache(); + this.db = new this.sqlite3!.oo1.DB({ filename: ":memory:", flags: "c" }); + this.opfsDbPath = undefined; + this.sahPoolDbName = undefined; + + const rc = this.sqlite3!.capi.sqlite3_deserialize( + this.db.pointer!, + "main", + p, + view.byteLength, + view.byteLength, + this.sqlite3!.capi.SQLITE_DESERIALIZE_FREEONCLOSE | + this.sqlite3!.capi.SQLITE_DESERIALIZE_RESIZEABLE + ); + if (rc !== 0) { + throw new Error(`Failed to deserialize database: ${rc}`); + } + } catch (e) { + this.sqlite3!.wasm.dealloc(p); + throw e; + } + } + + backup(_destinationFile: string): void { + // In browser, we can serialize the database to a byte array + // For actual file backup, we'd need to use File System Access API or download + throw new Error( + "backup to file is not supported in browser environment. " + + "Use serialize() to get the database as a Uint8Array instead." + ); + } + + /** + * Serialize the database to a byte array. + * This can be used to save the database to IndexedDB, download it, etc. + */ + serialize(): Uint8Array { + this.ensureDb(); + // Use the convenience wrapper which handles all the memory management + return this.sqlite3!.capi.sqlite3_js_db_export(this.db!); + } + + prepare(query: string): Statement { + this.ensureDb(); + + // Check if we already have this statement cached + if (this.statementCache.has(query)) { + return this.statementCache.get(query)!; + } + + // Create new statement and cache it + const stmt = this.db!.prepare(query); + const wasmStatement = new WasmStatement(stmt, this.db!, this.sqlite3!, query); + this.statementCache.set(query, wasmStatement); + return wasmStatement; + } + + transaction(func: (statement: Statement) => T): Transaction { + this.ensureDb(); + + const self = this; + let savepointCounter = 0; + + // Helper function to execute within a transaction + const executeTransaction = (beginStatement: string, ...args: unknown[]): T => { + // If we're already in a transaction (either tracked via JS flag or via actual SQLite + // autocommit state), use SAVEPOINTs for nesting — this handles the case where a manual + // BEGIN was issued directly (e.g. transactionalAsync) without going through transaction(). + const sqliteInTransaction = self.db?.pointer !== undefined + && (self.sqlite3!.capi as any).sqlite3_get_autocommit(self.db!.pointer) === 0; + if (self._inTransaction || sqliteInTransaction) { + const savepointName = `sp_${++savepointCounter}_${Date.now()}`; + self.db!.exec(`SAVEPOINT ${savepointName}`); + try { + const result = func.apply(null, args as [Statement]); + self.db!.exec(`RELEASE SAVEPOINT ${savepointName}`); + return result; + } catch (e) { + self.db!.exec(`ROLLBACK TO SAVEPOINT ${savepointName}`); + throw e; + } + } + + // Not in a transaction, start a new one + self._inTransaction = true; + self.db!.exec(beginStatement); + try { + const result = func.apply(null, args as [Statement]); + self.db!.exec("COMMIT"); + return result; + } catch (e) { + self.db!.exec("ROLLBACK"); + throw e; + } finally { + self._inTransaction = false; + } + }; + + // Create the transaction function that acts like better-sqlite3's Transaction interface + // In better-sqlite3, the transaction function is callable and has .deferred(), .immediate(), etc. + const transactionWrapper = Object.assign( + // Default call executes with BEGIN (same as immediate) + (...args: unknown[]): T => executeTransaction("BEGIN", ...args), + { + // Deferred transaction - locks acquired on first data access + deferred: (...args: unknown[]): T => executeTransaction("BEGIN DEFERRED", ...args), + // Immediate transaction - acquires write lock immediately + immediate: (...args: unknown[]): T => executeTransaction("BEGIN IMMEDIATE", ...args), + // Exclusive transaction - exclusive lock + exclusive: (...args: unknown[]): T => executeTransaction("BEGIN EXCLUSIVE", ...args), + // Default is same as calling directly + default: (...args: unknown[]): T => executeTransaction("BEGIN", ...args) + } + ); + + return transactionWrapper as unknown as Transaction; + } + + get inTransaction(): boolean { + return this._inTransaction; + } + + exec(query: string): void { + this.ensureDb(); + this.db!.exec(query); + } + + private clearStatementCache(): void { + for (const statement of this.statementCache.values()) { + try { + statement.finalize(); + } catch (e) { + // Ignore errors during cleanup + console.warn("Error finalizing statement during cleanup:", e); + } + } + this.statementCache.clear(); + } + + close(): void { + this.clearStatementCache(); + + if (this.db) { + this.db.close(); + this.db = undefined; + } + + // Reset OPFS / SAHPool state + this.opfsDbPath = undefined; + this.sahPoolDbName = undefined; + } + + /** + * Get the number of rows changed by the last INSERT, UPDATE, or DELETE statement. + */ + changes(): number { + this.ensureDb(); + return this.db!.changes(); + } + + /** + * Check if the database is currently open. + */ + isOpen(): boolean { + return this.db !== undefined && this.db.isOpen(); + } + + private ensureSqlite3(): void { + if (!this.sqlite3) { + throw new Error( + "SQLite WASM module not initialized. Call initialize() first with the sqlite3 module." + ); + } + } + + private ensureDb(): void { + this.ensureSqlite3(); + if (!this.db) { + throw new Error( + "Database not opened. Call loadFromMemory(), loadFromBuffer(), " + + "loadFromSahPool(), or loadFromOpfs() first." + ); + } + } +} diff --git a/apps/client-standalone/src/lightweight/translation_provider.ts b/apps/client-standalone/src/lightweight/translation_provider.ts new file mode 100644 index 00000000000..6a194e12cb5 --- /dev/null +++ b/apps/client-standalone/src/lightweight/translation_provider.ts @@ -0,0 +1,16 @@ +import { LOCALE_IDS } from "@triliumnext/commons"; +import type i18next from "i18next"; +import I18NextHttpBackend from "i18next-http-backend"; + +export default async function translationProvider(i18nextInstance: typeof i18next, locale: LOCALE_IDS) { + await i18nextInstance.use(I18NextHttpBackend).init({ + lng: locale, + fallbackLng: "en", + ns: "server", + backend: { + loadPath: `${import.meta.resolve("../server-assets/translations")}/{{lng}}/{{ns}}.json` + }, + returnEmptyString: false, + debug: true + }); +} diff --git a/apps/client-standalone/src/lightweight/zip_export_provider_factory.ts b/apps/client-standalone/src/lightweight/zip_export_provider_factory.ts new file mode 100644 index 00000000000..12e1f75e2be --- /dev/null +++ b/apps/client-standalone/src/lightweight/zip_export_provider_factory.ts @@ -0,0 +1,18 @@ +import { type ExportFormat, type ZipExportProviderData, ZipExportProvider } from "@triliumnext/core"; + +import contentCss from "@triliumnext/ckeditor5/src/theme/ck-content.css?raw"; + +export async function standaloneZipExportProviderFactory(format: ExportFormat, data: ZipExportProviderData): Promise { + switch (format) { + case "html": { + const { default: HtmlExportProvider } = await import("@triliumnext/core/src/services/export/zip/html.js"); + return new HtmlExportProvider(data, { contentCss }); + } + case "markdown": { + const { default: MarkdownExportProvider } = await import("@triliumnext/core/src/services/export/zip/markdown.js"); + return new MarkdownExportProvider(data); + } + default: + throw new Error(`Unsupported export format: '${format}'`); + } +} diff --git a/apps/client-standalone/src/lightweight/zip_provider.ts b/apps/client-standalone/src/lightweight/zip_provider.ts new file mode 100644 index 00000000000..f5e435c5c51 --- /dev/null +++ b/apps/client-standalone/src/lightweight/zip_provider.ts @@ -0,0 +1,101 @@ +import type { FileStream, ZipArchive, ZipEntry, ZipProvider } from "@triliumnext/core/src/services/zip_provider.js"; +import { strToU8, unzip, zipSync } from "fflate"; + +type ZipOutput = { + send?: (body: unknown) => unknown; + write?: (chunk: Uint8Array | string) => unknown; + end?: (chunk?: Uint8Array | string) => unknown; +}; + +class BrowserZipArchive implements ZipArchive { + readonly #entries: Record = {}; + #destination: ZipOutput | null = null; + + append(content: string | Uint8Array, options: { name: string }) { + this.#entries[options.name] = typeof content === "string" ? strToU8(content) : content; + } + + pipe(destination: unknown) { + this.#destination = destination as ZipOutput; + } + + async finalize(): Promise { + if (!this.#destination) { + throw new Error("ZIP output destination not set."); + } + + const content = zipSync(this.#entries, { level: 9 }); + + if (typeof this.#destination.send === "function") { + this.#destination.send(content); + return; + } + + if (typeof this.#destination.end === "function") { + if (typeof this.#destination.write === "function") { + this.#destination.write(content); + this.#destination.end(); + } else { + this.#destination.end(content); + } + return; + } + + throw new Error("Unsupported ZIP output destination."); + } +} + +export default class BrowserZipProvider implements ZipProvider { + createZipArchive(): ZipArchive { + return new BrowserZipArchive(); + } + + createFileStream(_filePath: string): FileStream { + throw new Error("File stream creation is not supported in the browser."); + } + + readZipFile( + buffer: Uint8Array, + processEntry: (entry: ZipEntry, readContent: () => Promise) => Promise + ): Promise { + return new Promise((res, rej) => { + unzip(buffer, async (err, files) => { + if (err) { rej(err); return; } + + try { + for (const [fileName, data] of Object.entries(files)) { + await processEntry( + { fileName: decodeZipFileName(fileName) }, + () => Promise.resolve(data) + ); + } + res(); + } catch (e) { + rej(e); + } + }); + }); + } +} + +const utf8Decoder = new TextDecoder("utf-8", { fatal: true }); + +/** + * fflate decodes ZIP entry filenames as CP437/Latin-1 unless the language + * encoding flag (general purpose bit 11) is set, but many real-world archives + * (e.g. those produced by macOS / Linux unzip / Python's zipfile) write UTF-8 + * filenames without setting that flag. Recover the original UTF-8 bytes from + * fflate's per-byte string and re-decode them; if the result isn't valid + * UTF-8 we fall back to the as-decoded name. + */ +function decodeZipFileName(name: string): string { + const bytes = new Uint8Array(name.length); + for (let i = 0; i < name.length; i++) { + bytes[i] = name.charCodeAt(i) & 0xff; + } + try { + return utf8Decoder.decode(bytes); + } catch { + return name; + } +} diff --git a/apps/client-standalone/src/local-bridge.ts b/apps/client-standalone/src/local-bridge.ts new file mode 100644 index 00000000000..8d13d1f7ada --- /dev/null +++ b/apps/client-standalone/src/local-bridge.ts @@ -0,0 +1,115 @@ +import LocalServerWorker from "./local-server-worker?worker"; +let localWorker: Worker | null = null; +const pending = new Map(); + +function showFatalErrorDialog(message: string) { + alert(message); +} + +export function startLocalServerWorker() { + if (localWorker) return localWorker; + localWorker = new LocalServerWorker(); + localWorker.postMessage({ type: "INIT", queryString: location.search }); + + // Handle worker errors during initialization + localWorker.onerror = (event) => { + console.error("[LocalBridge] Worker error:", event); + // Reject all pending requests + for (const [, resolver] of pending) { + resolver.reject(new Error(`Worker error: ${event.message}`)); + } + pending.clear(); + }; + + localWorker.onmessage = (event) => { + const msg = event.data; + + // Handle fatal platform crashes (shown as a dialog to the user) + if (msg?.type === "FATAL_ERROR") { + console.error("[LocalBridge] Fatal error:", msg.message); + showFatalErrorDialog(msg.message); + return; + } + + // Handle worker error reports + if (msg?.type === "WORKER_ERROR") { + console.error("[LocalBridge] Worker reported error:", msg.error); + // Reject all pending requests with the error + for (const [, resolver] of pending) { + resolver.reject(new Error(msg.error?.message || "Unknown worker error")); + } + pending.clear(); + return; + } + + // Handle WebSocket-like messages from the worker (for frontend updates) + if (msg?.type === "WS_MESSAGE" && msg.message) { + // Dispatch a custom event that ws.ts listens to in standalone mode + window.dispatchEvent(new CustomEvent("trilium:ws-message", { + detail: msg.message + })); + return; + } + + if (!msg || msg.type !== "LOCAL_RESPONSE") return; + + const { id, response, error } = msg; + const resolver = pending.get(id); + if (!resolver) return; + pending.delete(id); + + if (error) resolver.reject(new Error(error)); + else resolver.resolve(response); + }; + + return localWorker; +} + +export function attachServiceWorkerBridge() { + if (!("serviceWorker" in navigator) || !navigator.serviceWorker) { + console.warn("[LocalBridge] Service workers not available — skipping bridge setup"); + return; + } + + navigator.serviceWorker.addEventListener("message", async (event) => { + const msg = event.data; + if (!msg || msg.type !== "LOCAL_FETCH") return; + + const port = event.ports && event.ports[0]; + if (!port) return; + + try { + startLocalServerWorker(); + + const id = msg.id; + const req = msg.request; + + const response = await new Promise<{ body?: ArrayBuffer }>((resolve, reject) => { + pending.set(id, { resolve, reject }); + // Transfer body to worker for efficiency (if present) + localWorker!.postMessage({ + type: "LOCAL_REQUEST", + id, + request: req + }, req.body ? [req.body] : []); + }); + + port.postMessage({ + type: "LOCAL_FETCH_RESPONSE", + id, + response + }, response.body ? [response.body] : []); + } catch (e: unknown) { + const errorMessage = e instanceof Error ? e.message : String(e); + port.postMessage({ + type: "LOCAL_FETCH_RESPONSE", + id: msg.id, + response: { + status: 500, + headers: { "content-type": "text/plain; charset=utf-8" }, + body: new TextEncoder().encode(errorMessage).buffer + } + }); + } + }); +} diff --git a/apps/client-standalone/src/local-server-worker.ts b/apps/client-standalone/src/local-server-worker.ts new file mode 100644 index 00000000000..16fd81e9a95 --- /dev/null +++ b/apps/client-standalone/src/local-server-worker.ts @@ -0,0 +1,520 @@ +// ============================================================================= +// ERROR HANDLERS FIRST - No static imports above this! +// ES modules hoist static imports, so they execute BEFORE any code runs. +// We use dynamic imports below to ensure error handlers are registered first. +// ============================================================================= + +self.onerror = (message, source, lineno, colno, error) => { + const errorMsg = `[Worker] Uncaught error: ${message}\n at ${source}:${lineno}:${colno}`; + console.error(errorMsg, error); + try { + self.postMessage({ + type: "WORKER_ERROR", + error: { + message: String(message), + source, + lineno, + colno, + stack: error?.stack || new Error().stack + } + }); + } catch (e) { + console.error("[Worker] Failed to report error:", e); + } + return false; +}; + +self.onunhandledrejection = (event) => { + const reason = event.reason; + const errorMsg = `[Worker] Unhandled rejection: ${reason?.message || reason}`; + console.error(errorMsg, reason); + try { + self.postMessage({ + type: "WORKER_ERROR", + error: { + message: String(reason?.message || reason), + stack: reason?.stack || new Error().stack + } + }); + } catch (e) { + console.error("[Worker] Failed to report rejection:", e); + } +}; + +console.log("[Worker] Error handlers installed, loading modules..."); + +// ============================================================================= +// TYPE-ONLY IMPORTS (erased at runtime, safe as static imports) +// ============================================================================= +import type { BrowserRouter } from './lightweight/browser_router'; + +// Build-time constant injected by Vite (see `define` in vite.config.mts). +declare const __TRILIUM_INTEGRATION_TEST__: string; + +// ============================================================================= +// MODULE STATE (populated by dynamic imports) +// ============================================================================= +let BrowserSqlProvider: typeof import('./lightweight/sql_provider').default; +let WorkerMessagingProvider: typeof import('./lightweight/messaging_provider').default; +let BrowserExecutionContext: typeof import('./lightweight/cls_provider').default; +let BrowserCryptoProvider: typeof import('./lightweight/crypto_provider').default; +let BrowserZipProvider: typeof import('./lightweight/zip_provider').default; +let FetchRequestProvider: typeof import('./lightweight/request_provider').default; +let StandalonePlatformProvider: typeof import('./lightweight/platform_provider').default; +let StandaloneLogService: typeof import('./lightweight/log_provider').default; +let StandaloneBackupService: typeof import('./lightweight/backup_provider').default; +let translationProvider: typeof import('./lightweight/translation_provider').default; +let createConfiguredRouter: typeof import('./lightweight/browser_routes').createConfiguredRouter; + +// Instance state +let sqlProvider: InstanceType | null = null; +let messagingProvider: InstanceType | null = null; + +// Core module, router, and initialization state +let coreModule: typeof import("@triliumnext/core") | null = null; +let router: BrowserRouter | null = null; +let initPromise: Promise | null = null; +let initError: Error | null = null; +let queryString = ""; + +/** + * Check whether a file exists at the OPFS root. Used to decide whether the + * test fixture needs to be seeded or whether we should reuse the existing + * DB (preserving changes made earlier in the same test — e.g. options set + * before a page reload). + */ +async function opfsFileExists(fileName: string): Promise { + if (typeof navigator === "undefined" || !navigator.storage?.getDirectory) { + return false; + } + const root = await navigator.storage.getDirectory(); + try { + await root.getFileHandle(fileName); + return true; + } catch { + return false; + } +} + +/** + * Write a raw byte buffer to an OPFS file. Used to drop the test fixture DB + * into OPFS as a regular file so SQLite's OPFS VFS can then open it. Requires + * a Worker context (`createSyncAccessHandle` isn't available on the main thread + * in some browsers). + */ +async function writeOpfsFile(fileName: string, buffer: Uint8Array): Promise { + const root = await navigator.storage.getDirectory(); + const fileHandle = await root.getFileHandle(fileName, { create: true }); + const accessHandle = await (fileHandle as unknown as { + createSyncAccessHandle(): Promise<{ + truncate(size: number): void; + write(buffer: Uint8Array, opts: { at: number }): number; + flush(): void; + close(): void; + }>; + }).createSyncAccessHandle(); + try { + accessHandle.truncate(0); + accessHandle.write(buffer, { at: 0 }); + accessHandle.flush(); + } finally { + accessHandle.close(); + } +} + +/** + * Read a file from the OPFS root into a Uint8Array. + * Used during migration from legacy OPFS VFS to SAHPool. + */ +async function readOpfsFile(fileName: string): Promise { + const root = await navigator.storage.getDirectory(); + const fileHandle = await root.getFileHandle(fileName); + const file = await fileHandle.getFile(); + return new Uint8Array(await file.arrayBuffer()); +} + +/** + * Delete a file from the OPFS root. + * Used to clean up the legacy OPFS database after migration to SAHPool. + */ +async function deleteOpfsFile(fileName: string): Promise { + const root = await navigator.storage.getDirectory(); + await root.removeEntry(fileName); +} + +/** + * Verify that a buffer contains a valid SQLite database by checking the + * 16-byte magic string "SQLite format 3\0". + */ +function assertSqliteMagic(buffer: Uint8Array, source: string): void { + const magic = new TextDecoder().decode(buffer.subarray(0, 15)); + if (magic !== "SQLite format 3") { + throw new Error( + `${source} is not a SQLite database ` + + `(got ${buffer.byteLength} bytes starting with "${magic}"). ` + + `The file is likely missing and the SPA fallback is returning index.html.` + ); + } +} + +/** + * Migrate database from legacy OPFS VFS to SAHPool VFS. + * Checks if a legacy `/trilium.db` file exists in the OPFS root, and if the + * SAHPool doesn't already have it. If migration is needed, the legacy file is + * read, imported into the pool, and then deleted. + */ +async function migrateFromLegacyOpfs(dbName: string): Promise { + const legacyFileName = dbName.replace(/^\//, ""); // strip leading slash + const legacyExists = await opfsFileExists(legacyFileName); + + if (!legacyExists) { + return; // Nothing to migrate + } + + // Check if SAHPool already has this DB (e.g. migration already happened) + const poolFiles = sqlProvider!.sahPool!.getFileNames(); + if (poolFiles.includes(dbName)) { + console.log("[Worker] SAHPool already contains the database, deleting legacy OPFS file..."); + await deleteOpfsFile(legacyFileName); + return; + } + + console.log("[Worker] Migrating database from legacy OPFS to SAHPool VFS..."); + const startTime = performance.now(); + + const buffer = await readOpfsFile(legacyFileName); + assertSqliteMagic(buffer, "Legacy OPFS database"); + + await sqlProvider!.sahPool!.importDb(dbName, buffer); + await deleteOpfsFile(legacyFileName); + + // Also clean up legacy journal/WAL files if they exist + for (const suffix of ["-journal", "-wal", "-shm"]) { + try { + await deleteOpfsFile(legacyFileName + suffix); + } catch { + // Ignore — file may not exist + } + } + + const elapsed = performance.now() - startTime; + console.log(`[Worker] Migration complete in ${elapsed.toFixed(2)}ms (${buffer.byteLength} bytes)`); +} + +/** + * Load the test fixture database for integration tests. + * Seeds from the fixture if not already present, using SAHPool when available. + */ +async function loadTestDatabase(sahPoolAvailable: boolean, dbName: string): Promise { + if (sahPoolAvailable) { + const poolFiles = sqlProvider!.sahPool!.getFileNames(); + if (!poolFiles.includes(dbName)) { + console.log("[Worker] Integration test mode: seeding fixture database into SAHPool..."); + const buffer = await fetchTestFixture(); + await sqlProvider!.sahPool!.importDb(dbName, buffer); + } else { + console.log("[Worker] Integration test mode: reusing existing SAHPool DB from earlier in this test"); + } + sqlProvider!.loadFromSahPool(dbName); + } else { + // Fallback to legacy OPFS for tests when SAHPool isn't available + const legacyFileName = dbName.replace(/^\//, ""); + if (!(await opfsFileExists(legacyFileName))) { + console.log("[Worker] Integration test mode: seeding fixture database into OPFS..."); + const buffer = await fetchTestFixture(); + await writeOpfsFile(legacyFileName, buffer); + } else { + console.log("[Worker] Integration test mode: reusing existing OPFS DB from earlier in this test"); + } + sqlProvider!.loadFromOpfs(dbName); + } +} + +/** + * Fetch the test fixture database and validate it. + */ +async function fetchTestFixture(): Promise { + const response = await fetch("/test-fixtures/document.db"); + if (!response.ok) { + throw new Error(`Failed to fetch test fixture: ${response.status} ${response.statusText}`); + } + const buffer = new Uint8Array(await response.arrayBuffer()); + assertSqliteMagic(buffer, "Test fixture at /test-fixtures/document.db"); + return buffer; +} + +/** + * Load all required modules using dynamic imports. + * This allows errors to be caught by our error handlers. + */ +async function loadModules(): Promise { + console.log("[Worker] Loading lightweight modules..."); + const [ + sqlModule, + messagingModule, + clsModule, + cryptoModule, + zipModule, + requestModule, + platformModule, + logModule, + backupModule, + translationModule, + routesModule + ] = await Promise.all([ + import('./lightweight/sql_provider.js'), + import('./lightweight/messaging_provider.js'), + import('./lightweight/cls_provider.js'), + import('./lightweight/crypto_provider.js'), + import('./lightweight/zip_provider.js'), + import('./lightweight/request_provider.js'), + import('./lightweight/platform_provider.js'), + import('./lightweight/log_provider.js'), + import('./lightweight/backup_provider.js'), + import('./lightweight/translation_provider.js'), + import('./lightweight/browser_routes.js') + ]); + + BrowserSqlProvider = sqlModule.default; + WorkerMessagingProvider = messagingModule.default; + BrowserExecutionContext = clsModule.default; + BrowserCryptoProvider = cryptoModule.default; + BrowserZipProvider = zipModule.default; + FetchRequestProvider = requestModule.default; + StandalonePlatformProvider = platformModule.default; + StandaloneLogService = logModule.default; + StandaloneBackupService = backupModule.default; + translationProvider = translationModule.default; + createConfiguredRouter = routesModule.createConfiguredRouter; + + // Create instances + sqlProvider = new BrowserSqlProvider(); + messagingProvider = new WorkerMessagingProvider(); + + console.log("[Worker] Lightweight modules loaded successfully"); +} + +/** + * Initialize SQLite WASM and load the core module. + * This happens once at worker startup. + */ +async function initialize(): Promise { + if (initPromise) { + return initPromise; // Already initializing + } + if (initError) { + throw initError; // Failed before, don't retry + } + + initPromise = (async () => { + try { + // First, load all modules dynamically + await loadModules(); + + console.log("[Worker] Initializing SQLite WASM..."); + await sqlProvider!.initWasm(); + + // Try to install the SAHPool VFS (preferred: supports WAL, much faster) + let sahPoolAvailable = false; + try { + await sqlProvider!.installSahPool(); + sahPoolAvailable = true; + } catch (e) { + console.warn("[Worker] SAHPool VFS not available, will fall back to legacy OPFS or in-memory:", e); + } + + // Integration test mode is baked in at build time via the + // __TRILIUM_INTEGRATION_TEST__ Vite define (derived from the + // TRILIUM_INTEGRATION_TEST env var when the bundle was built). + const integrationTestMode = __TRILIUM_INTEGRATION_TEST__; + const dbName = "/trilium.db"; + + if (integrationTestMode === "memory") { + // Use OPFS for the DB in integration test mode so option changes + // (and any other writes) survive page reloads within a single test. + // Playwright gives each test a fresh BrowserContext, which means a + // fresh OPFS — so on the first worker init of a test we seed from + // the fixture, and subsequent inits in the same test reuse it. + await loadTestDatabase(sahPoolAvailable, dbName); + } else if (sahPoolAvailable) { + // SAHPool available — migrate from legacy OPFS if needed, then open + await migrateFromLegacyOpfs(dbName); + console.log("[Worker] SAHPool available, loading persistent database (WAL mode)..."); + sqlProvider!.loadFromSahPool(dbName); + } else if (sqlProvider!.isOpfsAvailable()) { + // Fall back to legacy OPFS VFS (no WAL, slower writes). + // This only kicks in if SAHPool installation failed for some + // reason but SharedArrayBuffer + legacy OPFS are both available. + console.warn("[Worker] SAHPool unavailable; using legacy OPFS VFS (no WAL mode)."); + sqlProvider!.loadFromOpfs(dbName); + } else { + // Fall back to in-memory database (non-persistent). + // SAHPool only needs a Worker + OPFS API, so reaching this + // branch means the environment lacks OPFS entirely. + console.warn("[Worker] OPFS not available, using in-memory database (data will not persist)"); + sqlProvider!.loadFromMemory(); + } + + console.log("[Worker] Database loaded"); + + console.log("[Worker] Loading @triliumnext/core..."); + const schemaModule = await import("@triliumnext/core/src/assets/schema.sql?raw"); + coreModule = await import("@triliumnext/core"); + + // Initialize log service with OPFS persistence + const logService = new StandaloneLogService(); + await logService.initialize(); + console.log("[Worker] Log service initialized with OPFS"); + + await coreModule.initializeCore({ + executionContext: new BrowserExecutionContext(), + crypto: new BrowserCryptoProvider(), + zip: new BrowserZipProvider(), + zipExportProviderFactory: (await import("./lightweight/zip_export_provider_factory.js")).standaloneZipExportProviderFactory, + messaging: messagingProvider!, + request: new FetchRequestProvider(), + platform: new StandalonePlatformProvider(queryString), + log: logService, + backup: new StandaloneBackupService(coreModule!.options), + translations: translationProvider, + schema: schemaModule.default, + getDemoArchive: async () => { + const response = await fetch("/server-assets/db/demo.zip"); + if (!response.ok) return null; + return new Uint8Array(await response.arrayBuffer()); + }, + image: (await import("./services/image_provider.js")).standaloneImageProvider, + dbConfig: { + provider: sqlProvider!, + isReadOnly: false, + onTransactionCommit: () => { + coreModule?.ws.sendTransactionEntityChangesToAllClients(); + }, + onTransactionRollback: () => { + // No-op for now + } + } + }); + coreModule.ws.init(); + + console.log("[Worker] Supported routes", Object.keys(coreModule.routes)); + + // Create and configure the router + router = createConfiguredRouter(); + console.log("[Worker] Router configured"); + + // initializeDb runs initDbConnection inside an execution context, + // which resolves dbReady — required before beccaLoaded can settle. + coreModule.sql_init.initializeDb(); + + if (coreModule.sql_init.isDbInitialized()) { + console.log("[Worker] Database already initialized, loading becca..."); + await coreModule.becca_loader.beccaLoaded; + + // `initTranslations` runs before `initSql` inside `initializeCore` + // (options_init needs translations, creating a chicken-and-egg), + // so it always defaults to "en" on a fresh worker boot. Now that + // the DB is up we can read the real locale and, if it differs, + // switch i18next and rebuild the hidden subtree with the correct + // titles. This must happen BEFORE `startScheduler` registers its + // own `dbReady.then(checkHiddenSubtree)` so the scheduled rebuild + // sees the right language. + const dbLocale = coreModule.options.getOptionOrNull("locale"); + if (dbLocale && dbLocale !== "en") { + console.log(`[Worker] Reconciling i18next locale to "${dbLocale}" from DB`); + await coreModule.i18n.changeLanguage(dbLocale); + } + } else { + console.log("[Worker] Database not initialized, skipping becca load (will be loaded during DB initialization)"); + } + + coreModule.scheduler.startScheduler(); + + console.log("[Worker] Initialization complete"); + } catch (error) { + initError = error instanceof Error ? error : new Error(String(error)); + console.error("[Worker] Initialization failed:", initError); + throw initError; + } + })(); + + return initPromise; +} + +/** + * Ensure the worker is initialized before processing requests. + * Returns the router if initialization was successful. + */ +async function ensureInitialized() { + await initialize(); + if (!router) { + throw new Error("Router not initialized"); + } + return router; +} + +interface LocalRequest { + method: string; + url: string; + body?: unknown; + headers?: Record; +} + +// Main dispatch +async function dispatch(request: LocalRequest) { + // Ensure initialization is complete and get the router + const appRouter = await ensureInitialized(); + + // Dispatch to the router + return appRouter.dispatch(request.method, request.url, request.body, request.headers); +} + +// Wait for the INIT message before initializing so that queryString +// (which may contain ?integrationTest=memory for e2e) is available. +let initReceived = false; + +self.onmessage = async (event) => { + const msg = event.data; + if (!msg) return; + + if (msg.type === "INIT") { + queryString = msg.queryString || ""; + if (!initReceived) { + initReceived = true; + console.log("[Worker] Starting initialization..."); + initialize().catch(err => { + console.error("[Worker] Initialization failed:", err); + self.postMessage({ + type: "WORKER_ERROR", + error: { + message: String(err?.message || err), + stack: err?.stack + } + }); + }); + } + return; + } + + if (msg.type !== "LOCAL_REQUEST") return; + + const { id, request } = msg; + + try { + const response = await dispatch(request); + + // Transfer body back (if any) - use options object for proper typing + (self as unknown as Worker).postMessage({ + type: "LOCAL_RESPONSE", + id, + response + }, { transfer: response.body ? [response.body] : [] }); + } catch (e) { + console.error("[Worker] Dispatch error:", e); + (self as unknown as Worker).postMessage({ + type: "LOCAL_RESPONSE", + id, + error: String((e as Error)?.message || e) + }); + } +}; diff --git a/apps/client-standalone/src/main.ts b/apps/client-standalone/src/main.ts new file mode 100644 index 00000000000..b7e36101d54 --- /dev/null +++ b/apps/client-standalone/src/main.ts @@ -0,0 +1,97 @@ +import { attachServiceWorkerBridge, startLocalServerWorker } from "./local-bridge.js"; + +async function waitForServiceWorkerControl(): Promise { + if (!("serviceWorker" in navigator) || !navigator.serviceWorker) { + const isSecure = location.protocol === "https:" || location.hostname === "localhost" || location.hostname === "127.0.0.1"; + const hints: string[] = []; + if (!isSecure) { + hints.push(`The page is served over ${location.protocol}//${location.hostname} which is not a secure context. Service workers require HTTPS (or localhost).`); + } + if (window.isSecureContext === false) { + hints.push("The browser reports this is not a secure context."); + } + throw new Error( + "Service workers are not available in this browser.\n\n" + + "Trilium standalone mode requires service workers to function.\n" + + (hints.length ? "\nPossible cause:\n- " + hints.join("\n- ") + "\n" : "") + + "\nTo fix this, access the application over HTTPS or via localhost." + ); + } + + // If already controlling, we're good + if (navigator.serviceWorker.controller) { + console.log("[Bootstrap] Service worker already controlling"); + return; + } + + console.log("[Bootstrap] Waiting for service worker to take control..."); + + // Register service worker + await navigator.serviceWorker.register("./sw.js", { scope: "/" }); + + // Wait for it to be ready (installed + activated) + await navigator.serviceWorker.ready; + + // Check if we're now controlling + if (navigator.serviceWorker.controller) { + console.log("[Bootstrap] Service worker now controlling"); + return; + } + + // If not controlling yet, we need to reload the page for SW to take control + // This is standard PWA behavior on first install + console.log("[Bootstrap] Service worker installed but not controlling yet - reloading page"); + + // Wait a tiny bit for SW to fully activate + await new Promise(resolve => setTimeout(resolve, 100)); + + // Reload to let SW take control + window.location.reload(); + + // Throw to stop execution (page will reload) + throw new Error("Reloading for service worker activation"); +} + +async function bootstrap() { + /* fixes https://github.com/webpack/webpack/issues/10035 */ + window.global = globalThis; + + try { + // 1) Start local worker ASAP (so /bootstrap is fast) + startLocalServerWorker(); + + // 2) Bridge SW -> local worker + attachServiceWorkerBridge(); + + // 3) Wait for service worker to control the page (may reload on first install) + await waitForServiceWorkerControl(); + + await loadScripts(); + } catch (err) { + // If error is from reload, it will stop here (page reloads) + // Otherwise, show error to user + if (err instanceof Error && err.message.includes("Reloading")) { + // Page is reloading, do nothing + return; + } + + console.error("[Bootstrap] Fatal error:", err); + document.body.innerHTML = ` +
+

Failed to Initialize

+

The application failed to start. Please check the browser console for details.

+
${err instanceof Error ? err.message : String(err)}
+ +
+ `; + document.body.style.display = "block"; + } +} + +async function loadScripts() { + await import("../../client/src/index.js"); +} + +bootstrap(); diff --git a/apps/client-standalone/src/services/data_encryption.spec.ts b/apps/client-standalone/src/services/data_encryption.spec.ts new file mode 100644 index 00000000000..c4b9e07d965 --- /dev/null +++ b/apps/client-standalone/src/services/data_encryption.spec.ts @@ -0,0 +1,67 @@ +import { describe, it, expect } from "vitest"; +import { data_encryption } from "@triliumnext/core"; + +// Note: BrowserCryptoProvider is already initialized via test_setup.ts + +describe("data_encryption with BrowserCryptoProvider", () => { + it("should encrypt and decrypt ASCII text correctly", () => { + const key = new Uint8Array(16).fill(42); + const plainText = "Hello, World!"; + + const encrypted = data_encryption.encrypt(key, plainText); + expect(typeof encrypted).toBe("string"); + expect(encrypted.length).toBeGreaterThan(0); + + const decrypted = data_encryption.decryptString(key, encrypted); + expect(decrypted).toBe(plainText); + }); + + it("should encrypt and decrypt UTF-8 text correctly", () => { + const key = new Uint8Array(16).fill(42); + const plainText = "Привет мир! 你好世界! 🎉"; + + const encrypted = data_encryption.encrypt(key, plainText); + const decrypted = data_encryption.decryptString(key, encrypted); + expect(decrypted).toBe(plainText); + }); + + it("should encrypt and decrypt empty string", () => { + const key = new Uint8Array(16).fill(42); + const plainText = ""; + + const encrypted = data_encryption.encrypt(key, plainText); + const decrypted = data_encryption.decryptString(key, encrypted); + expect(decrypted).toBe(plainText); + }); + + it("should encrypt and decrypt binary data", () => { + const key = new Uint8Array(16).fill(42); + const plainData = new Uint8Array([0, 1, 2, 255, 128, 64]); + + const encrypted = data_encryption.encrypt(key, plainData); + const decrypted = data_encryption.decrypt(key, encrypted); + expect(decrypted).toBeInstanceOf(Uint8Array); + expect(Array.from(decrypted as Uint8Array)).toEqual(Array.from(plainData)); + }); + + it("should fail decryption with wrong key", () => { + const key1 = new Uint8Array(16).fill(42); + const key2 = new Uint8Array(16).fill(43); + const plainText = "Secret message"; + + const encrypted = data_encryption.encrypt(key1, plainText); + + // decrypt returns false when digest doesn't match + const result = data_encryption.decrypt(key2, encrypted); + expect(result).toBe(false); + }); + + it("should handle large content", () => { + const key = new Uint8Array(16).fill(42); + const plainText = "x".repeat(100000); + + const encrypted = data_encryption.encrypt(key, plainText); + const decrypted = data_encryption.decryptString(key, encrypted); + expect(decrypted).toBe(plainText); + }); +}); diff --git a/apps/client-standalone/src/services/image_provider.ts b/apps/client-standalone/src/services/image_provider.ts new file mode 100644 index 00000000000..d5c2ca949ee --- /dev/null +++ b/apps/client-standalone/src/services/image_provider.ts @@ -0,0 +1,96 @@ +/** + * Standalone image provider implementation. + * Uses pure JavaScript for format detection without compression. + * Images are saved as-is without resizing. + */ + +import type { ImageProvider, ImageFormat, ProcessedImage } from "@triliumnext/core"; + +/** + * Detect image type from buffer using magic bytes. + */ +function getImageTypeFromBuffer(buffer: Uint8Array): ImageFormat | null { + if (buffer.length < 12) { + return null; + } + + // Check for SVG (text-based) + if (isSvg(buffer)) { + return { ext: "svg", mime: "image/svg+xml" }; + } + + // JPEG: FF D8 FF + if (buffer[0] === 0xff && buffer[1] === 0xd8 && buffer[2] === 0xff) { + return { ext: "jpg", mime: "image/jpeg" }; + } + + // PNG: 89 50 4E 47 0D 0A 1A 0A + if ( + buffer[0] === 0x89 && + buffer[1] === 0x50 && + buffer[2] === 0x4e && + buffer[3] === 0x47 && + buffer[4] === 0x0d && + buffer[5] === 0x0a && + buffer[6] === 0x1a && + buffer[7] === 0x0a + ) { + return { ext: "png", mime: "image/png" }; + } + + // GIF: "GIF" + if (buffer[0] === 0x47 && buffer[1] === 0x49 && buffer[2] === 0x46) { + return { ext: "gif", mime: "image/gif" }; + } + + // WebP: RIFF....WEBP + if ( + buffer[0] === 0x52 && + buffer[1] === 0x49 && + buffer[2] === 0x46 && + buffer[3] === 0x46 && + buffer[8] === 0x57 && + buffer[9] === 0x45 && + buffer[10] === 0x42 && + buffer[11] === 0x50 + ) { + return { ext: "webp", mime: "image/webp" }; + } + + // BMP: "BM" + if (buffer[0] === 0x42 && buffer[1] === 0x4d) { + return { ext: "bmp", mime: "image/bmp" }; + } + + return null; +} + +/** + * Check if buffer contains SVG content. + */ +function isSvg(buffer: Uint8Array): boolean { + const maxBytes = Math.min(buffer.length, 1000); + let str = ""; + for (let i = 0; i < maxBytes; i++) { + str += String.fromCharCode(buffer[i]); + } + + const trimmed = str.trim().toLowerCase(); + return trimmed.startsWith(" { + // Standalone doesn't do compression - just detect format and return original + const format = getImageTypeFromBuffer(buffer) || { ext: "dat", mime: "application/octet-stream" }; + + return { + buffer, + format + }; + } +}; diff --git a/apps/client-standalone/src/sw.ts b/apps/client-standalone/src/sw.ts new file mode 100644 index 00000000000..4e5b5831f7d --- /dev/null +++ b/apps/client-standalone/src/sw.ts @@ -0,0 +1,196 @@ +// public/sw.js +const VERSION = "localserver-v1.4"; +const STATIC_CACHE = `static-${VERSION}`; + +// Check if running in dev mode (passed via URL parameter) +const isDev = true; + +if (isDev) { + console.log('[Service Worker] Running in DEV mode - caching disabled'); +} + +// Adjust these to your routes: +const LOCAL_FIRST_PREFIXES = [ + "/bootstrap", + "/api/", + "/sync/", + "/search/" +]; + +// Optional: basic precache list (keep small; you can expand later) +const PRECACHE_URLS = [ + // "/", + // "/index.html", + // "/manifest.webmanifest", + // "/favicon.ico", +]; + +self.addEventListener("install", (event) => { + event.waitUntil((async () => { + // Skip precaching in dev mode + if (!isDev) { + const cache = await caches.open(STATIC_CACHE); + await cache.addAll(PRECACHE_URLS); + } + self.skipWaiting(); + })()); +}); + +self.addEventListener("activate", (event) => { + event.waitUntil((async () => { + // Cleanup old caches + const keys = await caches.keys(); + await Promise.all(keys.map((k) => (k === STATIC_CACHE ? Promise.resolve() : caches.delete(k)))); + await self.clients.claim(); + })()); +}); + +function isLocalFirst(url) { + return LOCAL_FIRST_PREFIXES.some((p) => url.pathname.startsWith(p)); +} + +async function cacheFirst(request) { + // In dev mode, always bypass cache + if (isDev) { + return fetch(request); + } + + const cache = await caches.open(STATIC_CACHE); + const cached = await cache.match(request); + if (cached) return cached; + + const fresh = await fetch(request); + // Cache only successful GETs + if (request.method === "GET" && fresh.ok) cache.put(request, fresh.clone()); + return fresh; +} + +async function networkFirst(request) { + // In dev mode, always bypass cache + if (isDev) { + return fetch(request); + } + + const cache = await caches.open(STATIC_CACHE); + try { + const fresh = await fetch(request); + // Cache only successful GETs + if (request.method === "GET" && fresh.ok) cache.put(request, fresh.clone()); + return fresh; + } catch (error) { + // Fallback to cache if network fails + const cached = await cache.match(request); + if (cached) return cached; + throw error; + } +} + +async function forwardToClientLocalServer(request, _clientId) { + // Find the main app window to handle the request + // We must route to the main app (which has the local bridge), not iframes like PDF.js viewer + // @ts-expect-error - self.clients is valid in service worker context + const all = await self.clients.matchAll({ type: "window", includeUncontrolled: true }); + + // Find the main app window - it's the one NOT serving pdfjs or other embedded content + // The main app has the local bridge handler for LOCAL_FETCH messages + let client = all.find((c: { url: string }) => { + const url = new URL(c.url); + // Main app is at root or index.html, not in /pdfjs/ or other iframe paths + return !url.pathname.startsWith("/pdfjs/"); + }) || null; + + // If no main app window found, fall back to any available client + if (!client) { + client = all[0] || null; + } + + // If no page is available, fall back to network + if (!client) return fetch(request); + + const reqUrl = request.url; + const headersObj = {}; + for (const [k, v] of request.headers.entries()) headersObj[k] = v; + + const body = (request.method === "GET" || request.method === "HEAD") + ? null + : await request.arrayBuffer(); + + const id = crypto.randomUUID(); + const channel = new MessageChannel(); + + const responsePromise = new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + reject(new Error("Local server timeout")); + }, 30_000); + + channel.port1.onmessage = (event) => { + clearTimeout(timeout); + resolve(event.data); + }; + channel.port1.onmessageerror = () => { + clearTimeout(timeout); + reject(new Error("Local server message error")); + }; + }); + + // Send to the client with a reply port + client.postMessage({ + type: "LOCAL_FETCH", + id, + request: { + url: reqUrl, + method: request.method, + headers: headersObj, + body // ArrayBuffer or null + } + }, [channel.port2]); + + const localResp = await responsePromise; + + if (!localResp || localResp.type !== "LOCAL_FETCH_RESPONSE" || localResp.id !== id) { + // Protocol mismatch; fall back + return fetch(request); + } + + // localResp.response: { status, headers, body } + const { status, headers, body: respBody } = localResp.response; + + const respHeaders = new Headers(); + if (headers) { + for (const [k, v] of Object.entries(headers)) respHeaders.set(k, String(v)); + } + + return new Response(respBody ? respBody : null, { + status: status || 200, + headers: respHeaders + }); +} + +self.addEventListener("fetch", (event) => { + const url = new URL(event.request.url); + + // Only handle same-origin + if (url.origin !== self.location.origin) return; + + // API-ish: local-first via bridge (must be checked before navigate handling, + // because export triggers a navigation to an /api/ URL) + if (isLocalFirst(url)) { + event.respondWith(forwardToClientLocalServer(event.request, event.clientId)); + return; + } + + // HTML files: network-first to ensure updates are reflected immediately + if (event.request.mode === "navigate" || url.pathname.endsWith(".html")) { + event.respondWith(networkFirst(event.request)); + return; + } + + // Static assets: cache-first for performance + if (event.request.method === "GET") { + event.respondWith(cacheFirst(event.request)); + return; + } + + // Default + event.respondWith(fetch(event.request)); +}); diff --git a/apps/client-standalone/src/test_setup.ts b/apps/client-standalone/src/test_setup.ts new file mode 100644 index 00000000000..08e73f1116c --- /dev/null +++ b/apps/client-standalone/src/test_setup.ts @@ -0,0 +1,137 @@ +import { createRequire } from "node:module"; +import { readFileSync } from "node:fs"; +import { fileURLToPath } from "node:url"; + +import { initializeCore, options } from "@triliumnext/core"; +import schemaSql from "@triliumnext/core/src/assets/schema.sql?raw"; +import serverEnTranslations from "../../server/src/assets/translations/en/server.json"; +import { beforeAll } from "vitest"; + +import StandaloneBackupService from "./lightweight/backup_provider.js"; +import BrowserExecutionContext from "./lightweight/cls_provider.js"; +import BrowserCryptoProvider from "./lightweight/crypto_provider.js"; +import StandalonePlatformProvider from "./lightweight/platform_provider.js"; +import BrowserSqlProvider from "./lightweight/sql_provider.js"; +import BrowserZipProvider from "./lightweight/zip_provider.js"; +import { standaloneImageProvider } from "./services/image_provider.js"; + +// ============================================================================= +// SQLite WASM compatibility shims +// ============================================================================= +// The @sqlite.org/sqlite-wasm package loads its .wasm via fetch, and its +// bundled `instantiateWasm` hook overrides any user-supplied alternative. +// Two things go wrong under vitest + happy-dom: +// 1. happy-dom's `fetch()` refuses `file://` URLs. +// 2. happy-dom installs its own Response global, which Node's +// `WebAssembly.instantiateStreaming` rejects ("Received an instance of +// Response" — it wants undici's Response). +// We intercept fetch for file:// URLs ourselves and force instantiateStreaming +// to fall back to the ArrayBuffer path. +const fileFetchCache = new Map(); + +function readFileAsArrayBuffer(url: string): ArrayBuffer { + let cached = fileFetchCache.get(url); + if (!cached) { + const bytes = readFileSync(fileURLToPath(url)); + cached = bytes.buffer.slice(bytes.byteOffset, bytes.byteOffset + bytes.byteLength) as ArrayBuffer; + fileFetchCache.set(url, cached); + } + return cached; +} + +const originalFetch = globalThis.fetch; +globalThis.fetch = (async (input: RequestInfo | URL, init?: RequestInit) => { + const url = typeof input === "string" + ? input + : input instanceof URL + ? input.href + : input.url; + + if (url.startsWith("file://")) { + const body = readFileAsArrayBuffer(url); + return new Response(body, { + status: 200, + headers: { "Content-Type": "application/wasm" } + }); + } + + return originalFetch(input as RequestInfo, init); +}) as typeof fetch; + +WebAssembly.instantiateStreaming = (async (source, importObject) => { + const response = await source; + const bytes = await response.arrayBuffer(); + return WebAssembly.instantiate(bytes, importObject); +}) as typeof WebAssembly.instantiateStreaming; + +// ============================================================================= +// happy-dom HTMLParser spec compliance patch +// ============================================================================= +// Per HTML5 parsing spec, a single U+000A LINE FEED immediately after a
,
+// , or