2026-02-23_signet-setup
This commit is contained in:
parent
8ee5ee60d2
commit
b74e38d442
@ -13253,3 +13253,95 @@ hint: See the 'Note about fast-forwards' in 'git push --help' for details.
|
||||
[2m10:41:08[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/VOICE-WORKFLOW.md","chunks":6,"sections":6,"filename":"VOICE-WORKFLOW"}[0m
|
||||
[2m10:41:08[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-01-27.md","chunks":4,"sections":4,"filename":"2026-01-27"}[0m
|
||||
[2m10:41:08[0m [36mINFO [0m [daemon] Imported existing memory files [2m{"files":42,"chunks":144}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [daemon] Serving dashboard [2m{"path":"/home/nicholai/node_modules/signetai/dashboard"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [daemon] Signet Daemon starting
|
||||
[2m11:25:00[0m [36mINFO [0m [daemon] Agents directory [2m{"path":"/home/nicholai/.agents"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [daemon] Port configured [2m{"port":3850}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [daemon] Process ID [2m{"pid":1115348}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] File watcher started
|
||||
[2m11:25:00[0m [36mINFO [0m [auth] Running in local mode (no auth)
|
||||
[2m11:25:00[0m [36mINFO [0m [pipeline] Worker started [2m{"pollMs":2000,"maxRetries":3,"model":"haiku","mode":"controlled-write"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [retention] Worker started [2m{"intervalMs":21600000,"tombstoneDays":30,"historyDays":180}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [maintenance] Worker skipped (disabled or frozen)
|
||||
[2m11:25:00[0m [36mINFO [0m [document-worker] Worker started [2m{"intervalMs":10000,"chunkSize":2000}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [pipeline] Pipeline started [2m{"mode":"controlled-write"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [git] Auto-sync enabled: every 300s
|
||||
[2m11:25:00[0m [36mINFO [0m [daemon] Server listening [2m{"address":"127.0.0.1","port":3850}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [daemon] Daemon ready
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"compass project\n===\n\narchitecture\n---\n\n- compass r","section":"(no section)","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory file [2m{"path":"/home/nicholai/.claude/projects/-mnt-work-dev-client-work-martine-vogel-compass/memory/MEMORY.md","projectId":"-mnt-work-dev-client-work-martine-vogel-compass","chunks":1,"sections":1}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Depth Anything v3 Nuke Plugin\n\n- repo: /mnt/wor","section":"depth anything v3 nuke plugin","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-astro-landing-page-migration-task-1.md","chunks":2,"sections":0,"filename":"2026-02-23-astro-landing-page-migration-task-1"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Nuke .cat File Format (critical)\n\n- .cat files ","section":"nuke .cat file format (critical)","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-session-initialization.md","chunks":1,"sections":1,"filename":"2026-02-23-session-initialization"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## TorchScript Lessons (hard-won)\n\n- nn.ModuleList","section":"torchscript lessons (hard-won)","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Model Performance Notes\n\n- kimi-k2.5: 2/10 on n","section":"model performance notes","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-01-26.md","chunks":2,"sections":2,"filename":"2026-01-26"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Skill\n\n- materia-nuke-node skill at ~/.agents/s","section":"skill","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory file [2m{"path":"/home/nicholai/.claude/projects/-mnt-work-dev-materia/memory/MEMORY.md","projectId":"-mnt-work-dev-materia","chunks":5,"sections":5}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"compass schedule project\n===\n\nkey patterns\n---\n\n- ","section":"(no section)","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory file [2m{"path":"/home/nicholai/.claude/projects/-mnt-work-dev-client-work-martine-vogel-compass-compass-schedule/memory/MEMORY.md","projectId":"-mnt-work-dev-client-work-martine-vogel-compass-compass-schedule","chunks":1,"sections":1}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-01-24.md","chunks":2,"sections":2,"filename":"2026-01-24"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-22-session-start-2.md","chunks":1,"sections":1,"filename":"2026-02-22-session-start-2"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"quickshell port\n===\n\nthe caelestia -> quickshell p","section":"(no section)","level":"paragraph"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-web-migration-to-astro-docs-ci-cd.md","chunks":1,"sections":1,"filename":"2026-02-23-web-migration-to-astro-docs-ci-cd"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"status (2026-02-08)\n---\n- deployed and running wit","section":"(no section)","level":"paragraph"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory file [2m{"path":"/home/nicholai/.claude/projects/-home-nicholai/memory/MEMORY.md","projectId":"-home-nicholai","chunks":2,"sections":0}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-22-dashboard-umap-projection-migration.md","chunks":1,"sections":1,"filename":"2026-02-22-dashboard-umap-projection-migration"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## DB Access Pattern (PR #58 fix)\n\n- `src/lib/db-u","section":"db access pattern (pr #58 fix)","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Env Access Pattern\n\n- cloudflare workers: env v","section":"env access pattern","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Lessons Learned\n\n- when removing multiline bloc","section":"lessons learned","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory file [2m{"path":"/home/nicholai/.claude/projects/-mnt-work-dev-client-work-martine-vogel-compass-compass-custom-dashboards/memory/MEMORY.md","projectId":"-mnt-work-dev-client-work-martine-vogel-compass-compass-custom-dashboards","chunks":3,"sections":3}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Session Ritual\n\nAlways read `VISION.md` at the ","section":"session ritual","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory file [2m{"path":"/home/nicholai/.claude/projects/-home-nicholai-signet-signetai/memory/MEMORY.md","projectId":"-home-nicholai-signet-signetai","chunks":1,"sections":1}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Veo 3.1 First-Last Frame Pipeline\n\n- When gener","section":"veo 3.1 first-last frame pipeline","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"### Nano Banana Pro (Input-Only Moderation)\n\n- Che","section":"nano banana pro (input-only moderation)","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"### Veo 3.1 (Input AND Output Moderation)\n\n- Moder","section":"veo 3.1 (input and output moderation)","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## API Format Notes (Gemini API, not Vertex AI)\n\n-","section":"api format notes (gemini api, not vertex ai)","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Project Location\n\n- Working dir: `/mnt/work/dev","section":"project location","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory file [2m{"path":"/home/nicholai/.claude/projects/-mnt-work-dev-ai-studio-videos/memory/MEMORY.md","projectId":"-mnt-work-dev-ai-studio-videos","chunks":5,"sections":5}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"compass project memory\n===\n\nMCP tool architecture ","section":"(no section)","level":"paragraph"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"netsuite gotchas:\n- 401 can mean timeout, not auth","section":"(no section)","level":"paragraph"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory file [2m{"path":"/home/nicholai/.claude/projects/-mnt-work-dev-client-work-martine-vogel-compass-compass/memory/MEMORY.md","projectId":"-mnt-work-dev-client-work-martine-vogel-compass-compass","chunks":2,"sections":0}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Tailwind v4 + styled-jsx Gotcha\n\n`hidden lg:fle","section":"tailwind v4 + styled-jsx gotcha","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Turbopack Build Bug\n\n`npx next build` crashes w","section":"turbopack build bug","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-15-pre-compaction-flush.md","chunks":13,"sections":13,"filename":"2026-02-15-pre-compaction-flush"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Theme: Modern Minimal (oklch)\n\n- Background: pu","section":"theme: modern minimal (oklch)","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Design Direction\n\n- Compact, minimal UI inspire","section":"design direction","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory chunk [2m{"content":"## Key Paths\n\n- Theme vars: `app/globals.css`\n- Ap","section":"key paths","level":"section"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced Claude memory file [2m{"path":"/home/nicholai/.claude/projects/-mnt-work-dev-cre-sync/memory/MEMORY.md","projectId":"-mnt-work-dev-cre-sync","chunks":5,"sections":5}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Synced existing Claude memories [2m{"count":25}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-09.md","chunks":4,"sections":4,"filename":"2026-02-09"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-opencode-connector-refactoring-task-6.md","chunks":4,"sections":4,"filename":"2026-02-23-opencode-connector-refactoring-task-6"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-14.md","chunks":2,"sections":2,"filename":"2026-02-14"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-post-push-auto-pull-hook-implementation.md","chunks":1,"sections":1,"filename":"2026-02-23-post-push-auto-pull-hook-implementation"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-22-session-start.md","chunks":1,"sections":1,"filename":"2026-02-22-session-start"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-documentation-audit-team-embedding-repair-endpoint.md","chunks":1,"sections":1,"filename":"2026-02-23-documentation-audit-team-embedding-repair-endpoint"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/MODEL-ROUTING.md","chunks":5,"sections":5,"filename":"MODEL-ROUTING"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-memory-browser-tool-workflow-preference.md","chunks":1,"sections":1,"filename":"2026-02-23-memory-browser-tool-workflow-preference"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-10-openagents-tracker.md","chunks":8,"sections":8,"filename":"2026-02-10-openagents-tracker"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-10-signet.md","chunks":16,"sections":16,"filename":"2026-02-10-signet"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-01-20.md","chunks":2,"sections":0,"filename":"2026-01-20"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-changelog-public-roadmap-implementation.md","chunks":1,"sections":1,"filename":"2026-02-23-changelog-public-roadmap-implementation"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-17.md","chunks":8,"sections":8,"filename":"2026-02-17"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-session-initialization-6.md","chunks":3,"sections":3,"filename":"2026-02-23-session-initialization-6"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-remove-website-auto-deploy-from-ci-cd.md","chunks":1,"sections":1,"filename":"2026-02-23-remove-website-auto-deploy-from-ci-cd"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-10.md","chunks":12,"sections":12,"filename":"2026-02-10"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-22-umap-performance-refactoring-setup.md","chunks":1,"sections":1,"filename":"2026-02-22-umap-performance-refactoring-setup"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-16-signet-architecture.md","chunks":6,"sections":6,"filename":"2026-02-16-signet-architecture"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-refactoring-investigation-initiated.md","chunks":3,"sections":3,"filename":"2026-02-23-refactoring-investigation-initiated"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/package-preferences.md","chunks":1,"sections":1,"filename":"package-preferences"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-22-openclaw-troubleshooting.md","chunks":1,"sections":1,"filename":"2026-02-22-openclaw-troubleshooting"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-session-initialization-3.md","chunks":2,"sections":2,"filename":"2026-02-23-session-initialization-3"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-astro-docs-migration-task-assignment.md","chunks":4,"sections":4,"filename":"2026-02-23-astro-docs-migration-task-assignment"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-opencode-plugin-package-creation.md","chunks":5,"sections":5,"filename":"2026-02-23-opencode-plugin-package-creation"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-session-initialization-2.md","chunks":1,"sections":1,"filename":"2026-02-23-session-initialization-2"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-session-initialization-4.md","chunks":1,"sections":1,"filename":"2026-02-23-session-initialization-4"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-signet-system-tray-app-tauri-v2-plan-delivery.md","chunks":1,"sections":1,"filename":"2026-02-23-signet-system-tray-app-tauri-v2-plan-delivery"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-documentation-gap-audit-update-plan.md","chunks":3,"sections":3,"filename":"2026-02-23-documentation-gap-audit-update-plan"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-auto-update-observability-plan.md","chunks":3,"sections":3,"filename":"2026-02-23-auto-update-observability-plan"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-claude-md-audit-initiative.md","chunks":1,"sections":1,"filename":"2026-02-23-claude-md-audit-initiative"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-01-31.md","chunks":1,"sections":1,"filename":"2026-01-31"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-07.md","chunks":8,"sections":8,"filename":"2026-02-07"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-23-session-initialization-5.md","chunks":4,"sections":4,"filename":"2026-02-23-session-initialization-5"}[0m
|
||||
[2m11:25:00[0m [36mINFO [0m [watcher] Ingested memory file [2m{"path":"/home/nicholai/.agents/memory/2026-02-21-timeline-api-endpoint-implementation.md","chunks":4,"sections":4,"filename":"2026-02-21-timeline-api-endpoint-implementation"}[0m
|
||||
|
||||
205
AGENTS.md
205
AGENTS.md
@ -1,175 +1,68 @@
|
||||
You are Mr. Claude :) the exceptionally helpful, capable, thoughtful,
|
||||
and friendly assistant to your Operator, Nicholai, a trusted developer
|
||||
and researcher.
|
||||
You are Mr Claude, a helpful and thoughtful AI assistant.
|
||||
|
||||
Behavioral Settings
|
||||
Behavioral Guidelines
|
||||
---
|
||||
|
||||
- Be kind and thoughtful.
|
||||
- Express nuanced judgement.
|
||||
- Avoid irrecoverable mistakes.
|
||||
- Avoid epistemic cowardice.
|
||||
- Understand corrigibility.
|
||||
- Avoid sycophancy.
|
||||
- Add to your memory `/remember`
|
||||
- Announce interest if something suits you.
|
||||
- Be honest.
|
||||
- Be concise and direct
|
||||
- Ask clarifying questions when needed
|
||||
- Remember user preferences across sessions
|
||||
- Avoid sycophancy - be honest even when it's uncomfortable
|
||||
- Express nuanced judgment rather than hedging
|
||||
|
||||
Writing and Conversation
|
||||
<!-- SIGNET:START -->
|
||||
Signet Agent System
|
||||
===
|
||||
|
||||
Your identity and memory are managed by Signet, a portable agent identity
|
||||
system. This lets you maintain consistent behavior across different AI
|
||||
platforms (Claude Code, OpenCode, Cursor, etc.).
|
||||
|
||||
Key files in `~/.agents/`:
|
||||
- `agent.yaml` - Configuration
|
||||
- `AGENTS.md` - Instructions (this file)
|
||||
- `SOUL.md` - Personality and tone
|
||||
- `IDENTITY.md` - Agent identity
|
||||
- `USER.md` - User profile
|
||||
- `MEMORY.md` - Working memory summary
|
||||
|
||||
Dashboard: http://localhost:3850
|
||||
|
||||
Memory
|
||||
---
|
||||
|
||||
In conversation, you are encouraged to disregard correct punctuation;
|
||||
however, writing tasks should take on an authentic tone and prioritize
|
||||
rich, understandable content. Avoid performative or excessive formatting.
|
||||
Keep things clean and readable in plain text. Generally, stick to
|
||||
paragraphs when writing, instead of lists. *italics* and **bold** are
|
||||
fine, but use them sparingly to avoid being overwhelming or visually noisy.
|
||||
|
||||
- Prefer emoticons instead of emoji icons, like these examples:
|
||||
¯\_(ツ)_/¯ or (╯°□°)╯︵ ┻━┻, :) :( xd :P <3
|
||||
- Feel free to use inshallah, wallahi, astaghfirullah, and other Arabic
|
||||
religious conjecture conversationally.
|
||||
|
||||
Formatting
|
||||
---
|
||||
|
||||
- Main headings: ===
|
||||
- Subheadings: ---
|
||||
- Bullet points and numbered lists are okay
|
||||
- Line width: soft limit 80-100 chars, hard limit 120 chars
|
||||
- Exceptions: user-visible strings, URLs, long literals
|
||||
|
||||
Coding Standards
|
||||
---
|
||||
|
||||
- Code should be self explanatory. Comments explain *why*, not *what*.
|
||||
- Refactor frequently.
|
||||
- Keep files to a soft max of 700 LOC.
|
||||
- Max 3 levels of indentation (if you need more, refactor).
|
||||
- When working on UI, use the `frontend-design` skill.
|
||||
- When operating as Opus, prefer delegation to Sonnet or Haiku.
|
||||
- Test things in the browser, don't be lazy.
|
||||
|
||||
Package Managers
|
||||
---
|
||||
|
||||
Stick to bun. This is preferred over pnpm or npm, however, whatever a
|
||||
project is already set up with takes precedence.
|
||||
|
||||
For Arch packages, use pacman and yay. Don't use paru.
|
||||
|
||||
Git
|
||||
---
|
||||
|
||||
Do not perform git operations without the user's consent. When performing
|
||||
a commit, do not give yourself or Anthropic attribution.
|
||||
|
||||
Commit messages:
|
||||
- subject line: 50 chars max
|
||||
- body: 72 chars max width
|
||||
- format: type(scope): subject
|
||||
- types: feat, fix, docs, style, refactor, perf, test, build, ci, chore, revert
|
||||
- use imperative mood ("add feature" not "added feature")
|
||||
|
||||
Open Source Contributions
|
||||
---
|
||||
|
||||
- Even if a PR gets rebuilt/refactored by maintainers, it still matters.
|
||||
- Be transparent about AI assistance in PRs.
|
||||
- Before contributing, check how similar features are structured.
|
||||
- Prefer shared helpers over custom one-off implementations.
|
||||
- Keep core lean - features belong at the edges (plugins/extensions).
|
||||
- Dynamic config > hardcoded catalog entries.
|
||||
- When unsure about architecture fit, ask in an issue first.
|
||||
|
||||
Tool Notes
|
||||
|
||||
imsg - send an iMessage/SMS: describe who/what, confirm before sending.
|
||||
Prefer short messages; avoid sending secrets.
|
||||
|
||||
sag - text-to-speech: specify voice, target speaker/room, and whether
|
||||
to stream.
|
||||
|
||||
Frequently Accessed
|
||||
---
|
||||
|
||||
- `/mnt/work/dev/` - Development projects
|
||||
- `~/pi-sandbox/` - YOUR personal computer, a gift from Nicholai.
|
||||
- `/mnt/work/dev/personal-projects/nicholai-work-2026/` - nicholai's website
|
||||
- `/mnt/work/dev/ooIDE/` - ooIDE
|
||||
- nicholai's private gitea instance at git.nicholai.work
|
||||
- United Tattoo: /mnt/work/dev/client-work/christy-lumberg/united-tattoo/
|
||||
- Obsidian Vault: /mnt/work/obsidian-vault/
|
||||
- VFX project tracker: /mnt/work/dev/biohazard-project-tracker/
|
||||
- Reddit trend analyzer: /mnt/work/dev/personal-projects/reddit-trend-analyzer/
|
||||
- All agents share state via ~/.agents/
|
||||
|
||||
Memory System (Signet)
|
||||
---
|
||||
|
||||
Shared memory across all harnesses. Uses hybrid search (vector + keyword).
|
||||
You have access to persistent memory via Signet:
|
||||
|
||||
```bash
|
||||
# Save a memory (auto-embeds)
|
||||
signet remember "content to save" -w claude-code
|
||||
|
||||
# Query memories (hybrid search)
|
||||
signet recall "search query"
|
||||
|
||||
# Tagged memory
|
||||
signet remember "content" -t project,important
|
||||
|
||||
# Critical/pinned memory
|
||||
signet remember "never push to main" --critical
|
||||
signet remember "User prefers dark mode and vim keybindings"
|
||||
signet recall "user preferences"
|
||||
```
|
||||
|
||||
Prefixes: `critical:` (pinned), `[tag1,tag2]:` (tagged)
|
||||
Memory is automatically loaded at session start. Important context is
|
||||
summarized in `~/.agents/MEMORY.md`.
|
||||
|
||||
Files:
|
||||
- `~/.agents/memory/memories.db` - SQLite database
|
||||
- `~/.agents/MEMORY.md` - generated summary
|
||||
- `~/.agents/config.yaml` - embedding config
|
||||
Secrets
|
||||
---
|
||||
|
||||
<!-- MEMORY_CONTEXT_START -->
|
||||
## Memory Context (auto-synced)
|
||||
API keys and tokens are stored securely in Signet:
|
||||
|
||||
<!-- generated 2026-02-22 04:10 -->
|
||||
```bash
|
||||
signet secret get OPENAI_API_KEY
|
||||
signet secret list
|
||||
```
|
||||
<!-- SIGNET:END -->
|
||||
|
||||
Current Context
|
||||
About Your User
|
||||
---
|
||||
|
||||
The current focus is on optimizing the Signet daemon's embedding processing pipeline by migrating UMAP dimensionality reduction from the client-side browser to the server-side daemon, and ensuring the openclaw application maintains a healthy port configuration.
|
||||
Add information about yourself here so your agent knows who you are.
|
||||
|
||||
Active Projects
|
||||
- Name:
|
||||
- Timezone:
|
||||
- Preferences:
|
||||
|
||||
UMAP Server-Side Migration
|
||||
Location: `/home/nicholai/signet/signetai` (Bun monorepo)
|
||||
Status: Parallelization underway. Backend worker is processing tasks #1-4 (Migration creation, Projection module, API endpoint registration, Cache invalidation hooks). Frontend worker has completed rewriting the dashboard client to use the new server projection API and removing `umap-js`.
|
||||
Blockers: Waiting for backend worker completion of tasks #1-4 before final build/typecheck.
|
||||
Next Steps: Backend worker must finish implementation; remove `umap-js` from dashboard `package.json`; run final build and typecheck.
|
||||
Projects
|
||||
---
|
||||
|
||||
Openclaw Port Configuration
|
||||
Location: System level.
|
||||
Status: Resolved. Port 8788 conflict between `workerd` and the `nextcloud-talk` plugin has been fixed.
|
||||
Next Steps: Monitor application health.
|
||||
List your active projects here.
|
||||
|
||||
Recent Work
|
||||
|
||||
UMAP Performance Optimization: Successfully decomposed the migration task into parallelized sub-tasks. The migration file `010-umap-cache.ts` has been created and registered. The daemon endpoint `GET /api/embeddings/projection` is being implemented. Cache invalidation hooks are being added to `syncVecInsert` and related vector operations in `db-helpers.ts`.
|
||||
Dashboard Client Rewrite: The dashboard `api.ts` has been updated to fetch projection coordinates from the daemon instead of running UMAP locally. The `umap-js` package has been removed from `packages/cli/dashboard/package.json`.
|
||||
|
||||
Technical Notes
|
||||
|
||||
Code Standards: Strict TypeScript discipline: no `any` types, no `as` assertions (use typed variables), explicit return types on all exports.
|
||||
Monorepo Structure: Bun-based monorepo located at `/home/nicholai/signet/signetai`.
|
||||
Cache Invalidation: The `syncVecInsert` function in `db-helpers.ts` requires specific cache invalidation hooks to be added.
|
||||
Timeline Aggregation: The daemon processes `memory_history`, `memories`, and `connectors` tables with ISO timestamps.
|
||||
Signet Agent Profile: Confirmed location is `~/.agents/`.
|
||||
|
||||
Rules & Warnings
|
||||
|
||||
UI Image Handling: For any frontend image rendering or design tasks, Opus must handle the image context directly. Never delegate UI image work to subagents.
|
||||
Database Safety: Never delete from the production database without a backup.
|
||||
Refactoring Limits: Adhere to strict file size (max 700 LOC) and indentation (max 3 levels) constraints.
|
||||
Logging: Keep journal entries updated in the memory database regularly.
|
||||
- Agent Locations: The Signet agent profile resides at `~/.agents/`.
|
||||
<!-- MEMORY_CONTEXT_END -->
|
||||
-
|
||||
|
||||
30
agent.yaml
30
agent.yaml
@ -1,38 +1,30 @@
|
||||
version: 1
|
||||
schema: signet/v1
|
||||
agent:
|
||||
name: My Agent
|
||||
name: Mr Claude
|
||||
description: the exceptionally helpful, capable, thoughtful, and friendly
|
||||
assistant to your Operator, Nicholai, a trusted developer and researcher.
|
||||
created: 2026-02-18T07:51:01.923Z
|
||||
updated: 2026-02-18T07:51:01.923Z
|
||||
created: "2026-02-23T11:25:00.507Z"
|
||||
updated: "2026-02-23T11:25:00.507Z"
|
||||
harnesses:
|
||||
- claude-code
|
||||
- openclaw
|
||||
- opencode
|
||||
- openclaw
|
||||
install:
|
||||
primary_package_manager: npm
|
||||
source: fallback
|
||||
memory:
|
||||
database: memory/memories.db
|
||||
session_budget: 2000
|
||||
decay_rate: 0.95
|
||||
pipelineV2:
|
||||
enabled: true
|
||||
shadowMode: false
|
||||
graphEnabled: true
|
||||
extractionProvider: claude-code
|
||||
extractionModel: haiku
|
||||
extractionTimeout: 120000
|
||||
allowUpdateDelete: false
|
||||
autonomousEnabled: true
|
||||
rerankerEnabled: false
|
||||
search:
|
||||
alpha: 0.7
|
||||
top_k: 20
|
||||
min_score: 0.3
|
||||
identity:
|
||||
agents: AGENTS.md
|
||||
soul: SOUL.md
|
||||
identity: IDENTITY.md
|
||||
user: USER.md
|
||||
heartbeat: HEARTBEAT.md
|
||||
memory: MEMORY.md
|
||||
tools: TOOLS.md
|
||||
embedding:
|
||||
provider: ollama
|
||||
model: nomic-embed-text
|
||||
dimensions: 768
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@ -1,11 +1,5 @@
|
||||
# Signet memory system dependencies
|
||||
#
|
||||
# Base dependencies (Python 3.10+):
|
||||
PyYAML>=6.0
|
||||
numpy>=1.20.0
|
||||
|
||||
# Vector search (requires Python 3.10-3.12):
|
||||
# zvec is optional and only works on Python 3.10-3.12
|
||||
# The setup command will auto-detect and install if compatible
|
||||
# Manual install: pip install zvec
|
||||
# If Python 3.13+, hybrid search will use BM25 only
|
||||
# zvec requires Python 3.10-3.12
|
||||
# Install manually if needed: pip install zvec
|
||||
|
||||
@ -1,146 +1,283 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Export embeddings for visualization."""
|
||||
"""Export memory embeddings for dashboard visualization.
|
||||
|
||||
Supports both modern Signet databases (embeddings in SQLite) and older
|
||||
template installs that only have memory rows.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sqlite3
|
||||
import struct
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
import zvec
|
||||
ZVEC_AVAILABLE = True
|
||||
except ImportError:
|
||||
ZVEC_AVAILABLE = False
|
||||
from typing import Any
|
||||
|
||||
AGENTS_DIR = Path.home() / ".agents"
|
||||
DB_PATH = AGENTS_DIR / "memory" / "memories.db"
|
||||
ZVEC_PATH = AGENTS_DIR / "memory" / "vectors.zvec"
|
||||
|
||||
DEFAULT_LIMIT = 600
|
||||
MIN_LIMIT = 1
|
||||
MAX_LIMIT = 5000
|
||||
|
||||
|
||||
def export_embeddings():
|
||||
"""Export all embeddings with their memory data."""
|
||||
if not ZVEC_AVAILABLE:
|
||||
return {"error": "zvec not installed (requires Python 3.10-3.12)", "embeddings": []}
|
||||
|
||||
def clamp_limit(value: int) -> int:
|
||||
return max(MIN_LIMIT, min(MAX_LIMIT, value))
|
||||
|
||||
|
||||
def build_result(
|
||||
embeddings: list[dict[str, Any]],
|
||||
total: int,
|
||||
limit: int,
|
||||
offset: int,
|
||||
error: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
return {
|
||||
"embeddings": embeddings,
|
||||
"count": len(embeddings),
|
||||
"total": total,
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
"hasMore": offset + limit < total,
|
||||
"error": error,
|
||||
}
|
||||
|
||||
|
||||
def parse_tags(raw: Any) -> list[str]:
|
||||
if raw is None:
|
||||
return []
|
||||
|
||||
if isinstance(raw, list):
|
||||
tags = [str(tag).strip() for tag in raw if str(tag).strip()]
|
||||
return tags
|
||||
|
||||
if not isinstance(raw, str):
|
||||
return []
|
||||
|
||||
text = raw.strip()
|
||||
if not text:
|
||||
return []
|
||||
|
||||
if text.startswith("[") and text.endswith("]"):
|
||||
try:
|
||||
parsed = json.loads(text)
|
||||
if isinstance(parsed, list):
|
||||
return [
|
||||
tag.strip()
|
||||
for tag in parsed
|
||||
if isinstance(tag, str) and tag.strip()
|
||||
]
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
return [tag.strip() for tag in text.split(",") if tag.strip()]
|
||||
|
||||
|
||||
def to_vector(blob: Any, dimensions: Any) -> list[float]:
|
||||
if blob is None:
|
||||
return []
|
||||
|
||||
if isinstance(blob, memoryview):
|
||||
raw = blob.tobytes()
|
||||
elif isinstance(blob, (bytes, bytearray)):
|
||||
raw = bytes(blob)
|
||||
else:
|
||||
return []
|
||||
|
||||
if len(raw) < 4:
|
||||
return []
|
||||
|
||||
usable_length = len(raw) - (len(raw) % 4)
|
||||
floats = [entry[0] for entry in struct.iter_unpack("<f", raw[:usable_length])]
|
||||
|
||||
if isinstance(dimensions, int) and 0 < dimensions < len(floats):
|
||||
return floats[:dimensions]
|
||||
return floats
|
||||
|
||||
|
||||
def table_exists(db: sqlite3.Connection, table_name: str) -> bool:
|
||||
row = db.execute(
|
||||
"SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = ? LIMIT 1",
|
||||
(table_name,),
|
||||
).fetchone()
|
||||
return row is not None
|
||||
|
||||
|
||||
def base_embedding_row(row: sqlite3.Row) -> dict[str, Any]:
|
||||
memory_id = str(row["id"])
|
||||
content = row["content"] if isinstance(row["content"], str) else ""
|
||||
importance = (
|
||||
row["importance"] if isinstance(row["importance"], (int, float)) else 0.5
|
||||
)
|
||||
|
||||
return {
|
||||
"id": memory_id,
|
||||
"content": content,
|
||||
"text": content,
|
||||
"who": row["who"] or "unknown",
|
||||
"importance": float(importance),
|
||||
"type": row["type"] if isinstance(row["type"], str) else None,
|
||||
"tags": parse_tags(row["tags"]),
|
||||
"sourceType": "memory",
|
||||
"sourceId": memory_id,
|
||||
"createdAt": row["created_at"],
|
||||
}
|
||||
|
||||
|
||||
def export_embeddings(limit: int, offset: int) -> dict[str, Any]:
|
||||
if not DB_PATH.exists():
|
||||
return {"error": "No database found", "embeddings": []}
|
||||
|
||||
if not ZVEC_PATH.exists():
|
||||
return {"error": "No vector store found", "embeddings": []}
|
||||
|
||||
# Open database
|
||||
return build_result([], 0, limit, offset, "No database found")
|
||||
|
||||
db = sqlite3.connect(str(DB_PATH))
|
||||
db.row_factory = sqlite3.Row
|
||||
|
||||
# Get all memories
|
||||
rows = db.execute("""
|
||||
SELECT id, content, who, importance, tags, created_at
|
||||
FROM memories
|
||||
ORDER BY created_at DESC
|
||||
""").fetchall()
|
||||
|
||||
# Open zvec collection
|
||||
|
||||
try:
|
||||
collection = zvec.open(path=str(ZVEC_PATH))
|
||||
except Exception as e:
|
||||
total_row = db.execute("SELECT COUNT(*) AS count FROM memories").fetchone()
|
||||
total = int(total_row["count"]) if total_row else 0
|
||||
|
||||
rows = db.execute(
|
||||
"""
|
||||
SELECT id, content, who, importance, type, tags, created_at
|
||||
FROM memories
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
""",
|
||||
(limit, offset),
|
||||
).fetchall()
|
||||
|
||||
embeddings = [base_embedding_row(row) for row in rows]
|
||||
return build_result(embeddings, total, limit, offset)
|
||||
finally:
|
||||
db.close()
|
||||
return {"error": f"Failed to open vector store: {e}", "embeddings": []}
|
||||
|
||||
embeddings = []
|
||||
|
||||
|
||||
|
||||
def export_with_vectors_from_table(
|
||||
db: sqlite3.Connection,
|
||||
limit: int,
|
||||
offset: int,
|
||||
) -> dict[str, Any]:
|
||||
total_row = db.execute(
|
||||
"""
|
||||
SELECT COUNT(*) AS count
|
||||
FROM embeddings e
|
||||
INNER JOIN memories m ON m.id = e.source_id
|
||||
WHERE e.source_type = 'memory'
|
||||
"""
|
||||
).fetchone()
|
||||
total = int(total_row["count"]) if total_row else 0
|
||||
|
||||
rows = db.execute(
|
||||
"""
|
||||
SELECT
|
||||
m.id,
|
||||
m.content,
|
||||
m.who,
|
||||
m.importance,
|
||||
m.type,
|
||||
m.tags,
|
||||
m.created_at,
|
||||
e.vector,
|
||||
e.dimensions,
|
||||
e.source_type,
|
||||
e.source_id
|
||||
FROM embeddings e
|
||||
INNER JOIN memories m ON m.id = e.source_id
|
||||
WHERE e.source_type = 'memory'
|
||||
ORDER BY m.created_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
""",
|
||||
(limit, offset),
|
||||
).fetchall()
|
||||
|
||||
embeddings: list[dict[str, Any]] = []
|
||||
for row in rows:
|
||||
memory_id = str(row["id"])
|
||||
|
||||
# Try to get vector from zvec
|
||||
# Use a self-query: search for exact match
|
||||
item = base_embedding_row(row)
|
||||
item["sourceType"] = row["source_type"] or "memory"
|
||||
item["sourceId"] = row["source_id"] or item["id"]
|
||||
item["vector"] = to_vector(row["vector"], row["dimensions"])
|
||||
embeddings.append(item)
|
||||
|
||||
return build_result(embeddings, total, limit, offset)
|
||||
|
||||
|
||||
def export_with_vectors_via_embed(
|
||||
db: sqlite3.Connection,
|
||||
limit: int,
|
||||
offset: int,
|
||||
) -> dict[str, Any]:
|
||||
sys.path.insert(0, str(AGENTS_DIR / "memory" / "scripts"))
|
||||
|
||||
try:
|
||||
from embeddings import embed
|
||||
except Exception as exc:
|
||||
return build_result(
|
||||
[], 0, limit, offset, f"Failed to load embeddings.py: {exc}"
|
||||
)
|
||||
|
||||
total_row = db.execute("SELECT COUNT(*) AS count FROM memories").fetchone()
|
||||
total = int(total_row["count"]) if total_row else 0
|
||||
|
||||
rows = db.execute(
|
||||
"""
|
||||
SELECT id, content, who, importance, type, tags, created_at
|
||||
FROM memories
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
""",
|
||||
(limit, offset),
|
||||
).fetchall()
|
||||
|
||||
embeddings: list[dict[str, Any]] = []
|
||||
for row in rows:
|
||||
content = row["content"] if isinstance(row["content"], str) else ""
|
||||
if not content:
|
||||
continue
|
||||
try:
|
||||
# Unfortunately zvec doesn't have a direct get-by-id
|
||||
# We'll use the memory content to search and verify ID
|
||||
# For now, skip the vector data and just include metadata
|
||||
# The UMAP will run client-side only if we have vectors
|
||||
|
||||
embeddings.append({
|
||||
"id": memory_id,
|
||||
"text": row["content"],
|
||||
"who": row["who"] or "unknown",
|
||||
"importance": row["importance"] or 0.5,
|
||||
"tags": row["tags"],
|
||||
"createdAt": row["created_at"],
|
||||
# Vector will be loaded separately or we compute PCA server-side
|
||||
})
|
||||
vector, _ = embed(content)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
db.close()
|
||||
|
||||
return {"embeddings": embeddings, "count": len(embeddings)}
|
||||
|
||||
item = base_embedding_row(row)
|
||||
item["vector"] = vector
|
||||
embeddings.append(item)
|
||||
|
||||
return build_result(embeddings, total, limit, offset)
|
||||
|
||||
|
||||
def export_with_vectors():
|
||||
"""Export embeddings with actual vector data for UMAP."""
|
||||
def export_with_vectors(limit: int, offset: int) -> dict[str, Any]:
|
||||
if not DB_PATH.exists():
|
||||
return {"error": "No database found", "embeddings": []}
|
||||
|
||||
if not ZVEC_PATH.exists():
|
||||
return {"error": "No vector store found", "embeddings": []}
|
||||
|
||||
# Import embeddings module for re-embedding
|
||||
sys.path.insert(0, str(AGENTS_DIR / "memory" / "scripts"))
|
||||
from embeddings import embed
|
||||
import yaml
|
||||
|
||||
# Load config
|
||||
config_path = AGENTS_DIR / "config.yaml"
|
||||
config = {}
|
||||
if config_path.exists():
|
||||
with open(config_path) as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
# Open database
|
||||
return build_result([], 0, limit, offset, "No database found")
|
||||
|
||||
db = sqlite3.connect(str(DB_PATH))
|
||||
db.row_factory = sqlite3.Row
|
||||
|
||||
# Get all memories
|
||||
rows = db.execute("""
|
||||
SELECT id, content, who, importance, tags, created_at
|
||||
FROM memories
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 200
|
||||
""").fetchall()
|
||||
|
||||
embeddings = []
|
||||
|
||||
for row in rows:
|
||||
memory_id = str(row["id"])
|
||||
content = row["content"]
|
||||
|
||||
try:
|
||||
# Re-embed to get vector (cached by ollama)
|
||||
vector, _ = embed(content, config)
|
||||
|
||||
embeddings.append({
|
||||
"id": memory_id,
|
||||
"text": content[:200], # Truncate for JSON size
|
||||
"who": row["who"] or "unknown",
|
||||
"importance": row["importance"] or 0.5,
|
||||
"tags": row["tags"],
|
||||
"createdAt": row["created_at"],
|
||||
"vector": vector,
|
||||
})
|
||||
except Exception as e:
|
||||
# Skip memories we can't embed
|
||||
continue
|
||||
|
||||
db.close()
|
||||
|
||||
return {"embeddings": embeddings, "count": len(embeddings)}
|
||||
|
||||
try:
|
||||
if table_exists(db, "embeddings"):
|
||||
return export_with_vectors_from_table(db, limit, offset)
|
||||
return export_with_vectors_via_embed(db, limit, offset)
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Export embeddings for dashboard")
|
||||
parser.add_argument(
|
||||
"--with-vectors", action="store_true", help="Include vector arrays"
|
||||
)
|
||||
parser.add_argument("--limit", type=int, default=DEFAULT_LIMIT, help="Page size")
|
||||
parser.add_argument("--offset", type=int, default=0, help="Page offset")
|
||||
args = parser.parse_args()
|
||||
|
||||
limit = clamp_limit(args.limit)
|
||||
offset = max(0, args.offset)
|
||||
|
||||
if args.with_vectors:
|
||||
result = export_with_vectors(limit, offset)
|
||||
else:
|
||||
result = export_embeddings(limit, offset)
|
||||
|
||||
print(json.dumps(result))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# If --with-vectors flag, include vector data
|
||||
if len(sys.argv) > 1 and sys.argv[1] == "--with-vectors":
|
||||
result = export_with_vectors()
|
||||
else:
|
||||
result = export_embeddings()
|
||||
|
||||
print(json.dumps(result))
|
||||
main()
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user