main efe6af9b2492 cached
294 files
1.1 MB
276.0k tokens
852 symbols
2 requests
Download .txt
Showing preview only (1,226K chars total). Download the full file or copy to clipboard to get everything.
Repository: Crosstalk-Solutions/project-nomad
Branch: main
Commit: efe6af9b2492
Files: 294
Total size: 1.1 MB

Directory structure:
gitextract_i1jwi_c6/

├── .dockerignore
├── .github/
│   ├── ISSUE_TEMPLATE/
│   │   ├── bug_report.yml
│   │   ├── config.yml
│   │   └── feature_request.yml
│   ├── dependabot.yaml
│   ├── scripts/
│   │   └── finalize-release-notes.sh
│   └── workflows/
│       ├── build-disk-collector.yml
│       ├── build-primary-image.yml
│       ├── build-sidecar-updater.yml
│       ├── release.yml
│       └── validate-collection-urls.yml
├── .gitignore
├── .releaserc.json
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Dockerfile
├── LICENSE
├── README.md
├── admin/
│   ├── .editorconfig
│   ├── ace.js
│   ├── adonisrc.ts
│   ├── app/
│   │   ├── controllers/
│   │   │   ├── benchmark_controller.ts
│   │   │   ├── chats_controller.ts
│   │   │   ├── collection_updates_controller.ts
│   │   │   ├── docs_controller.ts
│   │   │   ├── downloads_controller.ts
│   │   │   ├── easy_setup_controller.ts
│   │   │   ├── home_controller.ts
│   │   │   ├── maps_controller.ts
│   │   │   ├── ollama_controller.ts
│   │   │   ├── rag_controller.ts
│   │   │   ├── settings_controller.ts
│   │   │   ├── system_controller.ts
│   │   │   └── zim_controller.ts
│   │   ├── exceptions/
│   │   │   ├── handler.ts
│   │   │   └── internal_server_error_exception.ts
│   │   ├── jobs/
│   │   │   ├── check_service_updates_job.ts
│   │   │   ├── check_update_job.ts
│   │   │   ├── download_model_job.ts
│   │   │   ├── embed_file_job.ts
│   │   │   ├── run_benchmark_job.ts
│   │   │   └── run_download_job.ts
│   │   ├── middleware/
│   │   │   ├── container_bindings_middleware.ts
│   │   │   ├── force_json_response_middleware.ts
│   │   │   └── maps_static_middleware.ts
│   │   ├── models/
│   │   │   ├── benchmark_result.ts
│   │   │   ├── benchmark_setting.ts
│   │   │   ├── chat_message.ts
│   │   │   ├── chat_session.ts
│   │   │   ├── collection_manifest.ts
│   │   │   ├── installed_resource.ts
│   │   │   ├── kv_store.ts
│   │   │   ├── service.ts
│   │   │   └── wikipedia_selection.ts
│   │   ├── services/
│   │   │   ├── benchmark_service.ts
│   │   │   ├── chat_service.ts
│   │   │   ├── collection_manifest_service.ts
│   │   │   ├── collection_update_service.ts
│   │   │   ├── container_registry_service.ts
│   │   │   ├── docker_service.ts
│   │   │   ├── docs_service.ts
│   │   │   ├── download_service.ts
│   │   │   ├── map_service.ts
│   │   │   ├── ollama_service.ts
│   │   │   ├── queue_service.ts
│   │   │   ├── rag_service.ts
│   │   │   ├── system_service.ts
│   │   │   ├── system_update_service.ts
│   │   │   ├── zim_extraction_service.ts
│   │   │   └── zim_service.ts
│   │   ├── utils/
│   │   │   ├── downloads.ts
│   │   │   ├── fs.ts
│   │   │   ├── misc.ts
│   │   │   └── version.ts
│   │   └── validators/
│   │       ├── benchmark.ts
│   │       ├── chat.ts
│   │       ├── common.ts
│   │       ├── curated_collections.ts
│   │       ├── download.ts
│   │       ├── ollama.ts
│   │       ├── rag.ts
│   │       ├── settings.ts
│   │       ├── system.ts
│   │       └── zim.ts
│   ├── bin/
│   │   ├── console.ts
│   │   ├── server.ts
│   │   └── test.ts
│   ├── commands/
│   │   ├── benchmark/
│   │   │   ├── results.ts
│   │   │   ├── run.ts
│   │   │   └── submit.ts
│   │   └── queue/
│   │       └── work.ts
│   ├── config/
│   │   ├── app.ts
│   │   ├── bodyparser.ts
│   │   ├── cors.ts
│   │   ├── database.ts
│   │   ├── hash.ts
│   │   ├── inertia.ts
│   │   ├── logger.ts
│   │   ├── queue.ts
│   │   ├── session.ts
│   │   ├── shield.ts
│   │   ├── static.ts
│   │   ├── transmit.ts
│   │   └── vite.ts
│   ├── constants/
│   │   ├── broadcast.ts
│   │   ├── kv_store.ts
│   │   ├── misc.ts
│   │   ├── ollama.ts
│   │   ├── service_names.ts
│   │   └── zim_extraction.ts
│   ├── database/
│   │   ├── migrations/
│   │   │   ├── 1751086751801_create_services_table.ts
│   │   │   ├── 1763499145832_update_services_table.ts
│   │   │   ├── 1764912210741_create_curated_collections_table.ts
│   │   │   ├── 1764912270123_create_curated_collection_resources_table.ts
│   │   │   ├── 1768170944482_update_services_add_installation_statuses_table.ts
│   │   │   ├── 1768453747522_update_services_add_icon.ts
│   │   │   ├── 1769097600001_create_benchmark_results_table.ts
│   │   │   ├── 1769097600002_create_benchmark_settings_table.ts
│   │   │   ├── 1769300000001_add_powered_by_and_display_order_to_services.ts
│   │   │   ├── 1769300000002_update_services_friendly_names.ts
│   │   │   ├── 1769324448000_add_builder_tag_to_benchmark_results.ts
│   │   │   ├── 1769400000001_create_installed_tiers_table.ts
│   │   │   ├── 1769400000002_create_kv_store_table.ts
│   │   │   ├── 1769500000001_create_wikipedia_selection_table.ts
│   │   │   ├── 1769646771604_create_create_chat_sessions_table.ts
│   │   │   ├── 1769646798266_create_create_chat_messages_table.ts
│   │   │   ├── 1769700000001_create_zim_file_metadata_table.ts
│   │   │   ├── 1770269324176_add_unique_constraint_to_curated_collection_resources_table.ts
│   │   │   ├── 1770273423670_drop_installed_tiers_table.ts
│   │   │   ├── 1770849108030_create_create_collection_manifests_table.ts
│   │   │   ├── 1770849119787_create_create_installed_resources_table.ts
│   │   │   ├── 1770850092871_create_drop_legacy_curated_tables_table.ts
│   │   │   ├── 1771000000001_add_update_fields_to_services.ts
│   │   │   └── 1771000000002_pin_latest_service_images.ts
│   │   └── seeders/
│   │       └── service_seeder.ts
│   ├── docs/
│   │   ├── about.md
│   │   ├── faq.md
│   │   ├── getting-started.md
│   │   ├── home.md
│   │   ├── release-notes.md
│   │   └── use-cases.md
│   ├── eslint.config.js
│   ├── inertia/
│   │   ├── app/
│   │   │   └── app.tsx
│   │   ├── components/
│   │   │   ├── ActiveDownloads.tsx
│   │   │   ├── ActiveEmbedJobs.tsx
│   │   │   ├── ActiveModelDownloads.tsx
│   │   │   ├── Alert.tsx
│   │   │   ├── BouncingDots.tsx
│   │   │   ├── BouncingLogo.tsx
│   │   │   ├── BuilderTagSelector.tsx
│   │   │   ├── CategoryCard.tsx
│   │   │   ├── CuratedCollectionCard.tsx
│   │   │   ├── DebugInfoModal.tsx
│   │   │   ├── DownloadURLModal.tsx
│   │   │   ├── DynamicIcon.tsx
│   │   │   ├── Footer.tsx
│   │   │   ├── HorizontalBarChart.tsx
│   │   │   ├── InfoTooltip.tsx
│   │   │   ├── InstallActivityFeed.tsx
│   │   │   ├── LoadingSpinner.tsx
│   │   │   ├── MarkdocRenderer.tsx
│   │   │   ├── ProgressBar.tsx
│   │   │   ├── StorageProjectionBar.tsx
│   │   │   ├── StyledButton.tsx
│   │   │   ├── StyledModal.tsx
│   │   │   ├── StyledSectionHeader.tsx
│   │   │   ├── StyledSidebar.tsx
│   │   │   ├── StyledTable.tsx
│   │   │   ├── ThemeToggle.tsx
│   │   │   ├── TierSelectionModal.tsx
│   │   │   ├── UpdateServiceModal.tsx
│   │   │   ├── WikipediaSelector.tsx
│   │   │   ├── chat/
│   │   │   │   ├── ChatAssistantAvatar.tsx
│   │   │   │   ├── ChatButton.tsx
│   │   │   │   ├── ChatInterface.tsx
│   │   │   │   ├── ChatMessageBubble.tsx
│   │   │   │   ├── ChatModal.tsx
│   │   │   │   ├── ChatSidebar.tsx
│   │   │   │   ├── KnowledgeBaseModal.tsx
│   │   │   │   └── index.tsx
│   │   │   ├── file-uploader/
│   │   │   │   ├── index.css
│   │   │   │   └── index.tsx
│   │   │   ├── inputs/
│   │   │   │   ├── Input.tsx
│   │   │   │   └── Switch.tsx
│   │   │   ├── layout/
│   │   │   │   └── BackToHomeHeader.tsx
│   │   │   ├── maps/
│   │   │   │   └── MapComponent.tsx
│   │   │   ├── markdoc/
│   │   │   │   ├── Heading.tsx
│   │   │   │   ├── Image.tsx
│   │   │   │   ├── List.tsx
│   │   │   │   ├── ListItem.tsx
│   │   │   │   └── Table.tsx
│   │   │   └── systeminfo/
│   │   │       ├── CircularGauge.tsx
│   │   │       ├── InfoCard.tsx
│   │   │       └── StatusCard.tsx
│   │   ├── context/
│   │   │   ├── ModalContext.ts
│   │   │   └── NotificationContext.ts
│   │   ├── css/
│   │   │   └── app.css
│   │   ├── hooks/
│   │   │   ├── useDebounce.ts
│   │   │   ├── useDiskDisplayData.ts
│   │   │   ├── useDownloads.ts
│   │   │   ├── useEmbedJobs.ts
│   │   │   ├── useErrorNotification.ts
│   │   │   ├── useInternetStatus.ts
│   │   │   ├── useMapRegionFiles.ts
│   │   │   ├── useOllamaModelDownloads.ts
│   │   │   ├── useServiceInstallationActivity.ts
│   │   │   ├── useServiceInstalledStatus.tsx
│   │   │   ├── useSystemInfo.ts
│   │   │   ├── useSystemSetting.ts
│   │   │   ├── useTheme.ts
│   │   │   └── useUpdateAvailable.ts
│   │   ├── layouts/
│   │   │   ├── AppLayout.tsx
│   │   │   ├── DocsLayout.tsx
│   │   │   ├── MapsLayout.tsx
│   │   │   └── SettingsLayout.tsx
│   │   ├── lib/
│   │   │   ├── api.ts
│   │   │   ├── builderTagWords.ts
│   │   │   ├── classNames.ts
│   │   │   ├── collections.ts
│   │   │   ├── navigation.ts
│   │   │   └── util.ts
│   │   ├── pages/
│   │   │   ├── about.tsx
│   │   │   ├── chat.tsx
│   │   │   ├── docs/
│   │   │   │   └── show.tsx
│   │   │   ├── easy-setup/
│   │   │   │   ├── complete.tsx
│   │   │   │   └── index.tsx
│   │   │   ├── errors/
│   │   │   │   ├── not_found.tsx
│   │   │   │   └── server_error.tsx
│   │   │   ├── home.tsx
│   │   │   ├── maps.tsx
│   │   │   └── settings/
│   │   │       ├── apps.tsx
│   │   │       ├── benchmark.tsx
│   │   │       ├── legal.tsx
│   │   │       ├── maps.tsx
│   │   │       ├── models.tsx
│   │   │       ├── support.tsx
│   │   │       ├── system.tsx
│   │   │       ├── update.tsx
│   │   │       └── zim/
│   │   │           ├── index.tsx
│   │   │           └── remote-explorer.tsx
│   │   ├── providers/
│   │   │   ├── ModalProvider.tsx
│   │   │   ├── NotificationProvider.tsx
│   │   │   └── ThemeProvider.tsx
│   │   └── tsconfig.json
│   ├── package.json
│   ├── providers/
│   │   └── map_static_provider.ts
│   ├── resources/
│   │   └── views/
│   │       └── inertia_layout.edge
│   ├── start/
│   │   ├── env.ts
│   │   ├── kernel.ts
│   │   └── routes.ts
│   ├── tailwind.config.ts
│   ├── tests/
│   │   └── bootstrap.ts
│   ├── tsconfig.json
│   ├── types/
│   │   ├── benchmark.ts
│   │   ├── chat.ts
│   │   ├── collections.ts
│   │   ├── docker.ts
│   │   ├── downloads.ts
│   │   ├── files.ts
│   │   ├── kv_store.ts
│   │   ├── maps.ts
│   │   ├── ollama.ts
│   │   ├── rag.ts
│   │   ├── services.ts
│   │   ├── system.ts
│   │   ├── util.ts
│   │   └── zim.ts
│   ├── util/
│   │   ├── docs.ts
│   │   ├── files.ts
│   │   └── zim.ts
│   ├── views/
│   │   └── inertia_layout.edge
│   └── vite.config.ts
├── collections/
│   ├── CATEGORIES-TODO.md
│   ├── kiwix-categories.json
│   ├── maps.json
│   └── wikipedia.json
├── install/
│   ├── collect_disk_info.sh
│   ├── entrypoint.sh
│   ├── install_nomad.sh
│   ├── management_compose.yaml
│   ├── migrate-disk-collector.md
│   ├── migrate-disk-collector.sh
│   ├── run_updater_fixes.sh
│   ├── sidecar-disk-collector/
│   │   ├── Dockerfile
│   │   └── collect-disk-info.sh
│   ├── sidecar-updater/
│   │   ├── Dockerfile
│   │   └── update-watcher.sh
│   ├── start_nomad.sh
│   ├── stop_nomad.sh
│   ├── uninstall_nomad.sh
│   ├── update_nomad.sh
│   ├── wikipedia_en_100_mini_2025-06.zim
│   └── wikipedia_en_100_mini_2026-01.zim
└── package.json

================================================
FILE CONTENTS
================================================

================================================
FILE: .dockerignore
================================================
.env
.env.*
.git
node_modules
*.log
admin/storage
admin/node_modules
admin/build

================================================
FILE: .github/ISSUE_TEMPLATE/bug_report.yml
================================================
name: Bug Report
description: Report a bug or issue with Project N.O.M.A.D.
title: "[Bug]: "
labels: ["bug", "needs-triage"]
body:
  - type: markdown
    attributes:
      value: |
        Thanks for taking the time to report a bug! Please fill out the information below to help us diagnose and fix the issue.
        
        **Before submitting:**
        - Search existing issues to avoid duplicates
        - Ensure you're running the latest version of N.O.M.A.D.
        - Redact any personal or sensitive information from logs/configs
        - Please don't submit issues related to running N.O.M.A.D. on Unraid or another NAS - we don't have plans to support these kinds of platforms at this time

  - type: dropdown
    id: issue-category
    attributes:
      label: Issue Category
      description: What area is this issue related to?
      options:
        - Installation/Setup
        - AI Assistant (Ollama)
        - Knowledge Base/RAG (Document Upload)
        - Docker/Container Issues
        - GPU Configuration
        - Content Downloads (ZIM, Maps, Collections)
        - Service Management (Start/Stop/Update)
        - System Performance/Resources
        - UI/Frontend Issue
        - Other
    validations:
      required: true

  - type: textarea
    id: description
    attributes:
      label: Bug Description
      description: Provide a clear and concise description of what the bug is
      placeholder: What happened? What did you expect to happen?
    validations:
      required: true

  - type: textarea
    id: reproduction
    attributes:
      label: Steps to Reproduce
      description: How can we reproduce this issue?
      placeholder: |
        1. Go to '...'
        2. Click on '...'
        3. See error
    validations:
      required: true

  - type: textarea
    id: expected-behavior
    attributes:
      label: Expected Behavior
      description: What did you expect to happen?
      placeholder: Describe the expected outcome
    validations:
      required: true

  - type: textarea
    id: actual-behavior
    attributes:
      label: Actual Behavior
      description: What actually happened?
      placeholder: Describe what actually occurred, including any error messages
    validations:
      required: true

  - type: input
    id: nomad-version
    attributes:
      label: N.O.M.A.D. Version
      description: What version of N.O.M.A.D. are you running? (Check Settings > Update or run `docker ps` and check nomad_admin image tag)
      placeholder: "e.g., 1.29.0"
    validations:
      required: true

  - type: dropdown
    id: os
    attributes:
      label: Operating System
      description: What OS are you running N.O.M.A.D. on?
      options:
        - Ubuntu 24.04
        - Ubuntu 22.04
        - Ubuntu 20.04
        - Debian 13 (Trixie)
        - Debian 12 (Bookworm)
        - Debian 11 (Bullseye)
        - Other Debian-based
        - Other (not yet officially supported)
    validations:
      required: true

  - type: input
    id: docker-version
    attributes:
      label: Docker Version
      description: What version of Docker are you running? (`docker --version`)
      placeholder: "e.g., Docker version 24.0.7"

  - type: dropdown
    id: gpu-present
    attributes:
      label: Do you have a dedicated GPU?
      options:
        - "Yes"
        - "No"
        - "Not sure"
    validations:
      required: true

  - type: input
    id: gpu-model
    attributes:
      label: GPU Model (if applicable)
      description: What GPU model do you have? (Check Settings > System or run `nvidia-smi` if NVIDIA GPU)
      placeholder: "e.g., NVIDIA GeForce RTX 3060"

  - type: textarea
    id: system-specs
    attributes:
      label: System Specifications
      description: Provide relevant system specs (CPU, RAM, available disk space)
      placeholder: |
        CPU: 
        RAM: 
        Available Disk Space: 
        GPU (if any): 

  - type: textarea
    id: service-status
    attributes:
      label: Service Status (if relevant)
      description: If this is a service-related issue, what's the status of relevant services? (Check Settings > Apps or run `docker ps`)
      placeholder: |
        Paste output from `docker ps` or describe service states from the UI

  - type: textarea
    id: logs
    attributes:
      label: Relevant Logs
      description: |
        Include any relevant logs or error messages. **Please redact any personal/sensitive information.**
        
        Useful commands for collecting logs:
        - N.O.M.A.D. management app: `docker logs nomad_admin`
        - Ollama: `docker logs nomad_ollama`
        - Qdrant: `docker logs nomad_qdrant`
        - Specific service: `docker logs nomad_<service-name>`
      placeholder: Paste relevant log output here
      render: shell

  - type: textarea
    id: browser-console
    attributes:
      label: Browser Console Errors (if UI issue)
      description: If this is a UI issue, include any errors from your browser's developer console (F12)
      placeholder: Paste browser console errors here
      render: javascript

  - type: textarea
    id: screenshots
    attributes:
      label: Screenshots
      description: If applicable, add screenshots to help explain your problem (drag and drop images here)

  - type: textarea
    id: additional-context
    attributes:
      label: Additional Context
      description: Add any other context about the problem here (network setup, custom configurations, recent changes, etc.)

  - type: checkboxes
    id: terms
    attributes:
      label: Pre-submission Checklist
      description: Please confirm the following before submitting
      options:
        - label: I have searched for existing issues that might be related to this bug
          required: true
        - label: I am running the latest version of Project N.O.M.A.D. (or have noted my version above)
          required: true
        - label: I have redacted any personal or sensitive information from logs and screenshots
          required: true
        - label: This issue is NOT related to running N.O.M.A.D. on an unsupported/non-Debian-based OS
          required: false


================================================
FILE: .github/ISSUE_TEMPLATE/config.yml
================================================
blank_issues_enabled: false
contact_links:
  - name: 💬 Discord Community
    url: https://discord.com/invite/crosstalksolutions
    about: Join our Discord community for general questions, support, and discussions
  - name: 📖 Documentation
    url: https://projectnomad.us
    about: Check the official documentation and guides
  - name: 🏆 Community Leaderboard
    url: https://benchmark.projectnomad.us
    about: View the N.O.M.A.D. benchmark leaderboard
  - name: 🤝 Contributing Guide
    url: https://github.com/Crosstalk-Solutions/project-nomad/blob/main/CONTRIBUTING.md
    about: Learn how to contribute to Project N.O.M.A.D.
  - name: 📅 Roadmap
    url: https://roadmap.projectnomad.us
    about: See our public roadmap, vote on features, and suggest new ones

================================================
FILE: .github/ISSUE_TEMPLATE/feature_request.yml
================================================
name: Feature Request
description: Suggest a new feature or enhancement for Project N.O.M.A.D.
title: "[Feature]: "
labels: ["enhancement", "needs-discussion"]
body:
  - type: markdown
    attributes:
      value: |
        Thanks for your interest in improving Project N.O.M.A.D.! Before you submit a feature request, consider checking our [roadmap](https://roadmap.projectnomad.us) to see if it's already planned or in progress. You're welcome to suggest new ideas there if you don't plan on opening PRs yourself.

        
        **Please note:** Feature requests are not guaranteed to be implemented. All requests are evaluated based on alignment with the project's goals, feasibility, and community demand.
        
        **Before submitting:**
        - Search existing feature requests and our [roadmap](https://roadmap.projectnomad.us) to avoid duplicates
        - Consider if this aligns with N.O.M.A.D.'s mission: offline-first knowledge and education
        - Consider the technical feasibility of the feature: N.O.M.A.D. is designed to be containerized and run on a wide range of hardware, so features that require heavy resources (aside from GPU-intensive tasks) or complex host configurations may be less likely to be implemented
        - Consider the scope of the feature: Small, focused enhancements that can be implemented incrementally are more likely to be implemented than large, broad features that would require significant development effort or have an unclear path forward
        - If you're able to contribute code, testing, or documentation, that significantly increases the chances of your feature being implemented

  - type: dropdown
    id: feature-category
    attributes:
      label: Feature Category
      description: What area does this feature relate to?
      options:
        - New Service/Tool Integration
        - AI Assistant Enhancement
        - Knowledge Base/RAG Improvement
        - Content Management (ZIM, Maps, Collections)
        - UI/UX Improvement
        - System Management
        - Performance Optimization
        - Documentation
        - Security
        - Other
    validations:
      required: true

  - type: textarea
    id: problem
    attributes:
      label: Problem Statement
      description: What problem does this feature solve? Is your feature request related to a pain point?
      placeholder: I find it frustrating when... / It would be helpful if... / Users struggle with...
    validations:
      required: true

  - type: textarea
    id: solution
    attributes:
      label: Proposed Solution
      description: Describe the feature or enhancement you'd like to see
      placeholder: Add a feature that... / Change the behavior to... / Integrate with...
    validations:
      required: true

  - type: textarea
    id: alternatives
    attributes:
      label: Alternative Solutions
      description: Have you considered any alternative solutions or workarounds?
      placeholder: I've tried... / Another approach could be... / A workaround is...

  - type: textarea
    id: use-case
    attributes:
      label: Use Case
      description: Describe a specific scenario where this feature would be valuable
      placeholder: |
        As a [type of user], when I [do something], I want to [accomplish something] so that [benefit].
        
        Example: Because I have a dedicated GPU, I want to be able to see in the UI if GPU support is enabled so that I can optimize performance and troubleshoot issues more easily.

  - type: dropdown
    id: user-type
    attributes:
      label: Who would benefit from this feature?
      description: What type of users would find this most valuable?
      multiple: true
      options:
        - Individual/Home Users
        - Families
        - Teachers/Educators
        - Students
        - Survivalists/Preppers
        - Developers/Contributors
        - Organizations
        - All Users
    validations:
      required: true

  - type: dropdown
    id: priority
    attributes:
      label: How important is this feature to you?
      options:
        - Critical - Blocking my use of N.O.M.A.D.
        - High - Would significantly improve my experience
        - Medium - Would be nice to have
        - Low - Minor convenience
    validations:
      required: true

  - type: textarea
    id: implementation-ideas
    attributes:
      label: Implementation Ideas (Optional)
      description: If you have technical suggestions for how this could be implemented, share them here
      placeholder: This could potentially use... / It might integrate with... / A possible approach is...

  - type: textarea
    id: examples
    attributes:
      label: Examples or References
      description: Are there similar features in other applications? Include links, screenshots, or descriptions
      placeholder: Similar to how [app name] does... / See this example at [URL]

  - type: dropdown
    id: willing-to-contribute
    attributes:
      label: Would you be willing to help implement this?
      description: Contributing increases the likelihood of implementation
      options:
        - "Yes - I can write the code"
        - "Yes - I can help test"
        - "Yes - I can help with documentation"
        - "Maybe - with guidance"
        - "No - I don't have the skills/time"
    validations:
      required: true

  - type: textarea
    id: additional-context
    attributes:
      label: Additional Context
      description: Add any other context, mockups, diagrams, or information about the feature request

  - type: checkboxes
    id: checklist
    attributes:
      label: Pre-submission Checklist
      description: Please confirm the following before submitting
      options:
        - label: I have searched for existing feature requests that might be similar
          required: true
        - label: This feature aligns with N.O.M.A.D.'s mission of offline-first knowledge and education
          required: true
        - label: I understand that feature requests are not guaranteed to be implemented
          required: true


================================================
FILE: .github/dependabot.yaml
================================================
version: 2
updates:
  - package-ecosystem: "npm"
    directory: "/admin"
    schedule:
      interval: "weekly"
    target-branch: "rc"

================================================
FILE: .github/scripts/finalize-release-notes.sh
================================================
#!/usr/bin/env bash
#
# finalize-release-notes.sh
#
# Stamps the "## Unreleased" section in a release-notes file with a version
# and date, and extracts the section content for use in GitHub releases / email.
# Also includes all commits since the last release for complete transparency.
#
# Usage:  finalize-release-notes.sh <version> <file-path>
#
# Exit codes:
#   0 - Success: section stamped and extracted
#   1 - No "## Unreleased" section found (skip gracefully)
#   2 - Unreleased section exists but is empty (skip gracefully)

set -euo pipefail

VERSION="${1:?Usage: finalize-release-notes.sh <version> <file-path>}"
FILE="${2:?Usage: finalize-release-notes.sh <version> <file-path>}"

if [[ ! -f "$FILE" ]]; then
  echo "Error: File not found: $FILE" >&2
  exit 1
fi

# Find the line number of the ## Unreleased header (case-insensitive)
HEADER_LINE=$(grep -inm1 '^## unreleased' "$FILE" | cut -d: -f1)

if [[ -z "$HEADER_LINE" ]]; then
  echo "No '## Unreleased' section found. Skipping."
  exit 1
fi

TOTAL_LINES=$(wc -l < "$FILE")

# Find the next section header (## Version ...) or --- separator after the Unreleased header
NEXT_SECTION_LINE=""
if [[ $HEADER_LINE -lt $TOTAL_LINES ]]; then
  NEXT_SECTION_LINE=$(tail -n +"$((HEADER_LINE + 1))" "$FILE" \
    | grep -nm1 '^## \|^---$' \
    | cut -d: -f1)
fi

if [[ -n "$NEXT_SECTION_LINE" ]]; then
  # NEXT_SECTION_LINE is relative to HEADER_LINE+1, convert to absolute
  END_LINE=$((HEADER_LINE + NEXT_SECTION_LINE - 1))
else
  # Section runs to end of file
  END_LINE=$TOTAL_LINES
fi

# Extract content between header and next section (exclusive of both boundaries)
CONTENT_START=$((HEADER_LINE + 1))
CONTENT_END=$END_LINE

# Extract the section body (between header line and the next boundary)
SECTION_BODY=$(sed -n "${CONTENT_START},${CONTENT_END}p" "$FILE" | sed '/^$/N;/^\n$/d')

# Check for actual content: strip blank lines and lines that are only markdown headers (###...)
TRIMMED=$(echo "$SECTION_BODY" | sed '/^[[:space:]]*$/d')
HAS_CONTENT=$(echo "$SECTION_BODY" | sed '/^[[:space:]]*$/d' | grep -v '^###' || true)

if [[ -z "$TRIMMED" || -z "$HAS_CONTENT" ]]; then
  echo "Unreleased section is empty. Skipping."
  exit 2
fi

# Format the date as "Month Day, Year"
DATE_STAMP=$(date +'%B %-d, %Y')
NEW_HEADER="## Version ${VERSION} - ${DATE_STAMP}"

# Build the replacement: swap the header line, keep everything else intact
{
  # Lines before the Unreleased header
  if [[ $HEADER_LINE -gt 1 ]]; then
    head -n "$((HEADER_LINE - 1))" "$FILE"
  fi
  # New versioned header
  echo "$NEW_HEADER"
  # Content between header and next section
  sed -n "${CONTENT_START},${CONTENT_END}p" "$FILE"
  # Rest of the file after the section
  if [[ $END_LINE -lt $TOTAL_LINES ]]; then
    tail -n +"$((END_LINE + 1))" "$FILE"
  fi
} > "${FILE}.tmp"

mv "${FILE}.tmp" "$FILE"

# Get commits since the last release
LAST_TAG=$(git describe --tags --abbrev=0 HEAD^ 2>/dev/null || echo "")
COMMIT_LIST=""

if [[ -n "$LAST_TAG" ]]; then
  echo "Fetching commits since ${LAST_TAG}..."
  # Get commits between last tag and HEAD, excluding merge commits and skip ci commits
  COMMIT_LIST=$(git log "${LAST_TAG}..HEAD" \
    --no-merges \
    --pretty=format:"- %s ([%h](https://github.com/${GITHUB_REPOSITORY}/commit/%H))" \
    --grep="\[skip ci\]" --invert-grep \
    || echo "")
else
  echo "No previous tag found, fetching all commits..."
  COMMIT_LIST=$(git log \
    --no-merges \
    --pretty=format:"- %s ([%h](https://github.com/${GITHUB_REPOSITORY}/commit/%H))" \
    --grep="\[skip ci\]" --invert-grep \
    || echo "")
fi

# Write the extracted section content (for GitHub release body / future email)
{
  echo "$NEW_HEADER"
  echo ""
  if [[ -n "$TRIMMED" ]]; then
    echo "$TRIMMED"
    echo ""
  fi
  
  # Add commit history if available
  if [[ -n "$COMMIT_LIST" ]]; then
    echo "---"
    echo ""
    echo "### 📝 All Changes"
    echo ""
    echo "$COMMIT_LIST"
  fi
} > "${FILE}.section"

echo "Finalized release notes for v${VERSION}"
echo "  Updated: ${FILE}"
echo "  Extracted: ${FILE}.section"
exit 0


================================================
FILE: .github/workflows/build-disk-collector.yml
================================================
name: Build Disk Collector Image

on:
  workflow_dispatch:
    inputs:
      version:
        description: 'Semantic version to label the Docker image under (no "v" prefix, e.g. "1.2.3")'
        required: true
        type: string
      tag_latest:
        description: 'Also tag this image as :latest?'
        required: false
        type: boolean
        default: false

jobs:
  check_authorization:
    name: Check authorization to publish new Docker image
    runs-on: ubuntu-latest
    outputs:
      isAuthorized: ${{ steps.check-auth.outputs.is_authorized }}
    steps:
      - name: check-auth
        id: check-auth
        run: echo "is_authorized=${{ contains(secrets.DEPLOYMENT_AUTHORIZED_USERS, github.triggering_actor) }}" >> $GITHUB_OUTPUT
  build:
    name: Build disk-collector image
    needs: check_authorization
    if: needs.check_authorization.outputs.isAuthorized == 'true'
    runs-on: ubuntu-latest
    permissions:
      contents: read
      packages: write
    steps:
      - name: Checkout code
        uses: actions/checkout@v4
      - name: Log in to GitHub Container Registry
        uses: docker/login-action@v2
        with:
          registry: ghcr.io
          username: ${{ github.actor }}
          password: ${{ secrets.GITHUB_TOKEN }}
      - name: Build and push
        uses: docker/build-push-action@v5
        with:
          context: install/sidecar-disk-collector
          push: true
          tags: |
            ghcr.io/crosstalk-solutions/project-nomad-disk-collector:${{ inputs.version }}
            ghcr.io/crosstalk-solutions/project-nomad-disk-collector:v${{ inputs.version }}
            ${{ inputs.tag_latest && 'ghcr.io/crosstalk-solutions/project-nomad-disk-collector:latest' || '' }}


================================================
FILE: .github/workflows/build-primary-image.yml
================================================
name: Build Primary Docker Image

on:
  workflow_dispatch:
    inputs:
      version:
        description: 'Semantic version to label the Docker image under (no "v" prefix, e.g. "1.2.3")'
        required: true
        type: string
      tag_latest:
        description: 'Also tag this image as :latest? (Keep false for RC and beta releases)'
        required: false
        type: boolean
        default: false

jobs:
  check_authorization:
    name: Check authorization to publish new Docker image
    runs-on: ubuntu-latest
    outputs:
      isAuthorized: ${{ steps.check-auth.outputs.is_authorized }}
    steps:
      - name: check-auth
        id: check-auth
        run: echo "is_authorized=${{ contains(secrets.DEPLOYMENT_AUTHORIZED_USERS, github.triggering_actor) }}" >> $GITHUB_OUTPUT      
  build:
    name: Build Docker image
    needs: check_authorization
    if: needs.check_authorization.outputs.isAuthorized == 'true'
    runs-on: ubuntu-latest
    permissions:
      contents: read
      packages: write
    steps:
      - name: Checkout code
        uses: actions/checkout@v4
      - name: Log in to GitHub Container Registry
        uses: docker/login-action@v2
        with:
          registry: ghcr.io
          username: ${{ github.actor }}
          password: ${{ secrets.GITHUB_TOKEN }}
      - name: Build and push
        uses: docker/build-push-action@v5
        with:
          push: true
          tags: |
            ghcr.io/crosstalk-solutions/project-nomad:${{ inputs.version }}
            ghcr.io/crosstalk-solutions/project-nomad:v${{ inputs.version }}
            ${{ inputs.tag_latest && 'ghcr.io/crosstalk-solutions/project-nomad:latest' || '' }}
          build-args: |
            VERSION=${{ inputs.version }}
            BUILD_DATE=${{ github.event.workflow_run.created_at }}
            VCS_REF=${{ github.sha }}


================================================
FILE: .github/workflows/build-sidecar-updater.yml
================================================
name: Build Sidecar Updater Image

on:
  workflow_dispatch:
    inputs:
      version:
        description: 'Semantic version to label the Docker image under (no "v" prefix, e.g. "1.2.3")'
        required: true
        type: string
      tag_latest:
        description: 'Also tag this image as :latest?'
        required: false
        type: boolean
        default: false

jobs:
  check_authorization:
    name: Check authorization to publish new Docker image
    runs-on: ubuntu-latest
    outputs:
      isAuthorized: ${{ steps.check-auth.outputs.is_authorized }}
    steps:
      - name: check-auth
        id: check-auth
        run: echo "is_authorized=${{ contains(secrets.DEPLOYMENT_AUTHORIZED_USERS, github.triggering_actor) }}" >> $GITHUB_OUTPUT
  build:
    name: Build sidecar-updater image
    needs: check_authorization
    if: needs.check_authorization.outputs.isAuthorized == 'true'
    runs-on: ubuntu-latest
    permissions:
      contents: read
      packages: write
    steps:
      - name: Checkout code
        uses: actions/checkout@v4
      - name: Log in to GitHub Container Registry
        uses: docker/login-action@v2
        with:
          registry: ghcr.io
          username: ${{ github.actor }}
          password: ${{ secrets.GITHUB_TOKEN }}
      - name: Build and push
        uses: docker/build-push-action@v5
        with:
          context: install/sidecar-updater
          push: true
          tags: |
            ghcr.io/crosstalk-solutions/project-nomad-sidecar-updater:${{ inputs.version }}
            ghcr.io/crosstalk-solutions/project-nomad-sidecar-updater:v${{ inputs.version }}
            ${{ inputs.tag_latest && 'ghcr.io/crosstalk-solutions/project-nomad-sidecar-updater:latest' || '' }}


================================================
FILE: .github/workflows/release.yml
================================================
name: Release SemVer

on: workflow_dispatch

jobs:
  check_authorization:
    name: Check authorization to release new version
    runs-on: ubuntu-latest
    outputs:
      isAuthorized: ${{ steps.check-auth.outputs.is_authorized }}
    steps:
      - name: check-auth
        id: check-auth
        run: echo "is_authorized=${{ contains(secrets.DEPLOYMENT_AUTHORIZED_USERS, github.triggering_actor) }}" >> $GITHUB_OUTPUT
  release:
    name: Release
    needs: check_authorization
    if: needs.check_authorization.outputs.isAuthorized == 'true'
    runs-on: ubuntu-latest
    outputs:
      didRelease: ${{ steps.semver.outputs.new_release_published }}
      newVersion: ${{ steps.semver.outputs.new_release_version }}
    steps:
      - name: Checkout
        uses: actions/checkout@v4
        with:
          fetch-depth: 0
          persist-credentials: false
      - name: semantic-release
        uses: cycjimmy/semantic-release-action@v3
        id: semver
        env:
          GITHUB_TOKEN: ${{ secrets.COSMISTACKBOT_ACCESS_TOKEN }}
          GIT_AUTHOR_NAME: cosmistack-bot
          GIT_AUTHOR_EMAIL: dev@cosmistack.com
          GIT_COMMITTER_NAME: cosmistack-bot
          GIT_COMMITTER_EMAIL: dev@cosmistack.com

      - name: Finalize release notes
        # Skip for pre-releases (versions containing a hyphen, e.g. 1.27.0-rc.1)
        if: |
          steps.semver.outputs.new_release_published == 'true' &&
          !contains(steps.semver.outputs.new_release_version, '-')
        id: finalize-notes
        env:
          GITHUB_REPOSITORY: ${{ github.repository }}
        run: |
          git pull origin main
          chmod +x .github/scripts/finalize-release-notes.sh
          EXIT_CODE=0
          .github/scripts/finalize-release-notes.sh \
            "${{ steps.semver.outputs.new_release_version }}" \
            admin/docs/release-notes.md || EXIT_CODE=$?
          if [[ "$EXIT_CODE" -eq 0 ]]; then
            echo "has_notes=true" >> $GITHUB_OUTPUT
          else
            echo "has_notes=false" >> $GITHUB_OUTPUT
          fi

      - name: Commit finalized release notes
        if: |
          steps.semver.outputs.new_release_published == 'true' &&
          steps.finalize-notes.outputs.has_notes == 'true' &&
          !contains(steps.semver.outputs.new_release_version, '-')
        run: |
          git config user.name "cosmistack-bot"
          git config user.email "dev@cosmistack.com"
          git remote set-url origin https://x-access-token:${{ secrets.COSMISTACKBOT_ACCESS_TOKEN }}@github.com/${{ github.repository }}.git
          git add admin/docs/release-notes.md
          git commit -m "docs(release): finalize v${{ steps.semver.outputs.new_release_version }} release notes [skip ci]"
          git push origin main

      - name: Update GitHub release body
        if: |
          steps.semver.outputs.new_release_published == 'true' &&
          steps.finalize-notes.outputs.has_notes == 'true' &&
          !contains(steps.semver.outputs.new_release_version, '-')
        env:
          GH_TOKEN: ${{ secrets.COSMISTACKBOT_ACCESS_TOKEN }}
        run: |
          gh release edit "v${{ steps.semver.outputs.new_release_version }}" \
            --notes-file admin/docs/release-notes.md.section

      # Future: Send release notes email
      # - name: Send release notes email
      #   if: steps.semver.outputs.new_release_published == 'true' && steps.finalize-notes.outputs.has_notes == 'true'
      #   run: |
      #     curl -X POST "https://api.projectnomad.us/api/v1/newsletter/release" \
      #       -H "Authorization: Bearer ${{ secrets.NOMAD_API_KEY }}" \
      #       -H "Content-Type: application/json" \
      #       -d "{\"version\": \"${{ steps.semver.outputs.new_release_version }}\", \"body\": $(cat admin/docs/release-notes.md.section | jq -Rs .)}"

================================================
FILE: .github/workflows/validate-collection-urls.yml
================================================
name: Validate Collection URLs

on:
  push:
    paths:
      - 'collections/**.json'
  pull_request:
    paths:
      - 'collections/**.json'

jobs:
  validate-urls:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4

      - name: Extract and validate URLs
        run: |
          FAILED=0
          CHECKED=0
          FAILED_URLS=""

          # Recursively extract all non-null string URLs from every JSON file in collections/
          URLS=$(jq -r '.. | .url? | select(type == "string")' collections/*.json | sort -u)

          while IFS= read -r url; do
            [ -z "$url" ] && continue
            CHECKED=$((CHECKED + 1))
            printf "Checking: %s ... " "$url"

            # Use Range: bytes=0-0 to avoid downloading the full file.
            # --max-filesize 1 aborts early if the server ignores the Range header
            # and returns 200 with the full body. The HTTP status is still captured.
            HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" \
              --range 0-0 \
              --max-filesize 1 \
              --max-time 30 \
              --location \
              "$url")

            if [ "$HTTP_CODE" = "200" ] || [ "$HTTP_CODE" = "206" ]; then
              echo "OK ($HTTP_CODE)"
            else
              echo "FAILED ($HTTP_CODE)"
              FAILED=$((FAILED + 1))
              FAILED_URLS="$FAILED_URLS\n  - $url (HTTP $HTTP_CODE)"
            fi
          done <<< "$URLS"

          echo ""
          echo "Checked $CHECKED URLs, $FAILED failed."

          if [ "$FAILED" -gt 0 ]; then
            echo ""
            echo "Broken URLs:"
            printf "%b\n" "$FAILED_URLS"
            exit 1
          fi


================================================
FILE: .gitignore
================================================
# Logs
logs
*.log

# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json

# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release

# Dependency directories
node_modules/

# Optional npm cache directory
.npm

# dotenv environment variables file
.env

# Build / Dist
dist
build
tmp

# macOS Metafiles
.DS_Store

# Fonts
.ttf

# Runtime-generated Files
server/public
server/temp

# IDE Files
.vscode
.idea

# Frontend assets compiled code
admin/public/assets

# Admin specific development files
admin/storage


================================================
FILE: .releaserc.json
================================================
{
  "branches": [
    "main",
    { "name": "rc", "prerelease": "rc" }
  ],
  "plugins": [
    "@semantic-release/commit-analyzer",
    "@semantic-release/release-notes-generator",
    ["@semantic-release/npm", {
      "npmPublish": false
    }],
    ["@semantic-release/git", {
      "assets": ["package.json"],
      "message": "chore(release): ${nextRelease.version} [skip ci]"
    }],
    "@semantic-release/github"
  ]
}

================================================
FILE: CODE_OF_CONDUCT.md
================================================
# Contributor Covenant Code of Conduct

## Our Pledge

We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.

We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.

## Our Standards

Examples of behavior that contributes to a positive environment for our
community include:

* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
  and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
  overall community

Examples of unacceptable behavior include:

* The use of sexualized language or imagery, and sexual attention or
  advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
  address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
  professional setting

## Enforcement Responsibilities

Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.

Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.

## Scope

This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.

## Enforcement

Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
.
All complaints will be reviewed and investigated promptly and fairly.

All community leaders are obligated to respect the privacy and security of the
reporter of any incident.

## Enforcement Guidelines

Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:

### 1. Correction

**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.

**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.

### 2. Warning

**Community Impact**: A violation through a single incident or series
of actions.

**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.

### 3. Temporary Ban

**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.

**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.

### 4. Permanent Ban

**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior,  harassment of an
individual, or aggression toward or disparagement of classes of individuals.

**Consequence**: A permanent ban from any sort of public interaction within
the community.

## Attribution

This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.

Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).

[homepage]: https://www.contributor-covenant.org

For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.


================================================
FILE: CONTRIBUTING.md
================================================
# Contributing to Project N.O.M.A.D.

Thank you for your interest in contributing to Project N.O.M.A.D.! Community contributions are what keep this project growing and improving. Please read this guide fully before getting started — it will save you (and the maintainers) a lot of time.

> **Note:** Acceptance of contributions is not guaranteed. All pull requests are evaluated based on quality, relevance, and alignment with the project's goals. The maintainers of Project N.O.M.A.D. ("Nomad") reserve the right accept, deny, or modify any pull request at their sole discretion.

---

## Table of Contents

- [Code of Conduct](#code-of-conduct)
- [Before You Start](#before-you-start)
- [Getting Started](#getting-started)
- [Development Workflow](#development-workflow)
- [Commit Messages](#commit-messages)
- [Release Notes](#release-notes)
- [Versioning](#versioning)
- [Submitting a Pull Request](#submitting-a-pull-request)
- [Feedback & Community](#feedback--community)

---

## Code of Conduct

Please read and review our full [Code of Conduct](https://github.com/Crosstalk-Solutions/project-nomad/blob/main/CODE_OF_CONDUCT.md) before contributing. In short: please be respectful and considerate in all interactions with maintainers and other contributors.

We are committed to providing a welcoming environment for everyone. Disrespectful or abusive behavior will not be tolerated. 

---

## Before You Start

**Open an issue first.** Before writing any code, please [open an issue](../../issues/new) to discuss your proposed change. This helps avoid duplicate work and ensures your contribution aligns with the project's direction.

When opening an issue:
- Use a clear, descriptive title
- Describe the problem you're solving or the feature you want to add
- If it's a bug, include steps to reproduce it and as much detail about your environment as possible
- Ensure you redact any personal or sensitive information in any logs, configs, etc.

---

## Getting Started with Contributing
**Please note**: this is the Getting Started guide for developing and contributing to Nomad, NOT [installing Nomad](https://github.com/Crosstalk-Solutions/project-nomad/blob/main/README.md) for regular use! 

### Prerequisites

- A Debian-based OS (Ubuntu recommended)
- `sudo`/root privileges
- Docker installed and running
- A stable internet connection (required for dependency downloads)
- Node.js (for frontend/admin work)

### Fork & Clone

1. Click **Fork** at the top right of this repository
2. Clone your fork locally:
   ```bash
   git clone https://github.com/YOUR_USERNAME/project-nomad.git
   cd project-nomad
   ```
3. Add the upstream remote so you can stay in sync:
   ```bash
   git remote add upstream https://github.com/Crosstalk-Solutions/project-nomad.git
   ```

### Avoid Installing a Release Version Locally
Because Nomad relies heavily on Docker, we actually recommend against installing a release version of the project on the same local machine where you are developing. This can lead to conflicts with ports, volumes, and other resources. Instead, you can run your development version in a separate Docker environment while keeping your local machine clean. It certainly __can__ be done, but it adds complexity to your setup and workflow. If you choose to install a release version locally, please ensure you have a clear strategy for managing potential conflicts and resource usage.

---

## Development Workflow

1. **Sync with upstream** before starting any new work. We prefer rebasing over merge commits to keep a clean, linear git history as much as possible (this also makes it easier for maintainers to review and merge your changes). To sync with upstream:
   ```bash
   git fetch upstream
   git checkout main
   git rebase upstream/main
   ```

2. **Create a feature branch** off `main` with a descriptive name:
   ```bash
   git checkout -b fix/issue-123
   # or
   git checkout -b feature/add-new-tool
   ```

3. **Make your changes.** Follow existing code style and conventions. Test your changes locally against a running N.O.M.A.D. instance before submitting.

4. **Add release notes** (see [Release Notes](#release-notes) below).

5. **Commit your changes** using [Conventional Commits](#commit-messages).

6. **Push your branch** and open a pull request.

---

## Commit Messages

This project uses [Conventional Commits](https://www.conventionalcommits.org/). All commit messages must follow this format:

```
<type>(<scope>): <description>
```

**Common types:**

| Type | When to use |
|------|-------------|
| `feat` | A new user-facing feature |
| `fix` | A bug fix |
| `docs` | Documentation changes only |
| `refactor` | Code change that isn't a fix or feature and does not affect functionality |
| `chore` | Build process, dependency updates, tooling |
| `test` | Adding or updating tests |

**Scope** is optional but encouraged — use it to indicate the area of the codebase affected (e.g., `api`, `ui`, `maps`).

**Examples:**
```
feat(ui): add dark mode toggle to Command Center
fix(api): resolve container status not updating after restart
docs: update hardware requirements in README
chore(deps): bump docker-compose to v2.24
```

---

## Release Notes

Human-readable release notes live in [`admin/docs/release-notes.md`](admin/docs/release-notes.md) and are displayed directly in the Command Center UI.

When your changes include anything user-facing, **add a summary to the `## Unreleased` section** at the top of that file under the appropriate heading:

- **Features** — new user-facing capabilities
- **Bug Fixes** — corrections to existing behavior
- **Improvements** — enhancements, refactors, docs, or dependency updates

Use the format `- **Area**: Description` to stay consistent with existing entries.

**Example:**
```markdown
## Unreleased

### Features
- **Maps**: Added support for downloading South America regional maps

### Bug Fixes
- **AI Chat**: Fixed document upload failing on filenames with special characters
```

> When a release is triggered, CI automatically stamps the version and date, commits the update, and publishes the content to the GitHub release. You do not need to do this manually.

---

## Versioning

This project uses [Semantic Versioning](https://semver.org/). Versions are managed in the root `package.json` and updated automatically by `semantic-release`. The `project-nomad` Docker image uses this version. The `admin/package.json` version stays at `0.0.0` and should not be changed manually.

---

## Submitting a Pull Request

1. Push your branch to your fork:
   ```bash
   git push origin your-branch-name
   ```
2. Open a pull request against the `main` branch of this repository
3. In the PR description:
   - Summarize what your changes do and why
   - Reference the related issue (e.g., `Closes #123`)
   - Note any relevant testing steps or environment details
4. Be responsive to feedback — maintainers may request changes. Pull requests with no activity for an extended period may be closed.

---

## Feedback & Community

Have questions or want to discuss ideas before opening an issue? Join the community:

- **Discord:** [Join the Crosstalk Solutions server](https://discord.com/invite/crosstalksolutions) — the best place to get help, share your builds, and talk with other N.O.M.A.D. users
- **Website:** [www.projectnomad.us](https://www.projectnomad.us)
- **Benchmark Leaderboard:** [benchmark.projectnomad.us](https://benchmark.projectnomad.us)

---

*Project N.O.M.A.D. is licensed under the [Apache License 2.0](LICENSE).*

================================================
FILE: Dockerfile
================================================
FROM node:22-slim AS base

# Install bash & curl for entrypoint script compatibility, graphicsmagick for pdf2pic, and vips-dev & build-base for sharp 
RUN apt-get update && apt-get install -y bash curl graphicsmagick libvips-dev build-essential

# All deps stage
FROM base AS deps
WORKDIR /app
ADD admin/package.json admin/package-lock.json ./
RUN npm ci

# Production only deps stage
FROM base AS production-deps
WORKDIR /app
ADD admin/package.json admin/package-lock.json ./
RUN npm ci --omit=dev

# Build stage
FROM base AS build
WORKDIR /app
COPY --from=deps /app/node_modules /app/node_modules
ADD admin/ ./
RUN node ace build

# Production stage
FROM base
ARG VERSION=dev
ARG BUILD_DATE
ARG VCS_REF

# Labels
LABEL org.opencontainers.image.title="Project N.O.M.A.D" \
      org.opencontainers.image.description="The Project N.O.M.A.D Official Docker image" \
      org.opencontainers.image.version="${VERSION}" \
      org.opencontainers.image.created="${BUILD_DATE}" \
      org.opencontainers.image.revision="${VCS_REF}" \
      org.opencontainers.image.vendor="Crosstalk Solutions, LLC" \
      org.opencontainers.image.documentation="https://github.com/CrosstalkSolutions/project-nomad/blob/main/README.md" \
      org.opencontainers.image.source="https://github.com/CrosstalkSolutions/project-nomad" \
      org.opencontainers.image.licenses="Apache-2.0"

ENV NODE_ENV=production
WORKDIR /app
COPY --from=production-deps /app/node_modules /app/node_modules
COPY --from=build /app/build /app
# Copy root package.json for version info
COPY package.json /app/version.json

# Copy docs and README for access within the container
COPY admin/docs /app/docs
COPY README.md /app/README.md

# Copy entrypoint script and ensure it's executable
COPY install/entrypoint.sh /usr/local/bin/entrypoint.sh
RUN chmod +x /usr/local/bin/entrypoint.sh

EXPOSE 8080
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]

================================================
FILE: LICENSE
================================================
                                 Apache License
                           Version 2.0, January 2004
                        http://www.apache.org/licenses/

   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

   1. Definitions.

      "License" shall mean the terms and conditions for use, reproduction,
      and distribution as defined by Sections 1 through 9 of this document.

      "Licensor" shall mean the copyright owner or entity authorized by
      the copyright owner that is granting the License.

      "Legal Entity" shall mean the union of the acting entity and all
      other entities that control, are controlled by, or are under common
      control with that entity. For the purposes of this definition,
      "control" means (i) the power, direct or indirect, to cause the
      direction or management of such entity, whether by contract or
      otherwise, or (ii) ownership of fifty percent (50%) or more of the
      outstanding shares, or (iii) beneficial ownership of such entity.

      "You" (or "Your") shall mean an individual or Legal Entity
      exercising permissions granted by this License.

      "Source" form shall mean the preferred form for making modifications,
      including but not limited to software source code, documentation
      source, and configuration files.

      "Object" form shall mean any form resulting from mechanical
      transformation or translation of a Source form, including but
      not limited to compiled object code, generated documentation,
      and conversions to other media types.

      "Work" shall mean the work of authorship, whether in Source or
      Object form, made available under the License, as indicated by a
      copyright notice that is included in or attached to the work
      (an example is provided in the Appendix below).

      "Derivative Works" shall mean any work, whether in Source or Object
      form, that is based on (or derived from) the Work and for which the
      editorial revisions, annotations, elaborations, or other modifications
      represent, as a whole, an original work of authorship. For the purposes
      of this License, Derivative Works shall not include works that remain
      separable from, or merely link (or bind by name) to the interfaces of,
      the Work and Derivative Works thereof.

      "Contribution" shall mean any work of authorship, including
      the original version of the Work and any modifications or additions
      to that Work or Derivative Works thereof, that is intentionally
      submitted to the Licensor for inclusion in the Work by the copyright owner
      or by an individual or Legal Entity authorized to submit on behalf of
      the copyright owner. For the purposes of this definition, "submitted"
      means any form of electronic, verbal, or written communication sent
      to the Licensor or its representatives, including but not limited to
      communication on electronic mailing lists, source code control systems,
      and issue tracking systems that are managed by, or on behalf of, the
      Licensor for the purpose of discussing and improving the Work, but
      excluding communication that is conspicuously marked or otherwise
      designated in writing by the copyright owner as "Not a Contribution."

      "Contributor" shall mean Licensor and any individual or Legal Entity
      on behalf of whom a Contribution has been received by the Licensor and
      subsequently incorporated within the Work.

   2. Grant of Copyright License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      copyright license to reproduce, prepare Derivative Works of,
      publicly display, publicly perform, sublicense, and distribute the
      Work and such Derivative Works in Source or Object form.

   3. Grant of Patent License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      (except as stated in this section) patent license to make, have made,
      use, offer to sell, sell, import, and otherwise transfer the Work,
      where such license applies only to those patent claims licensable
      by such Contributor that are necessarily infringed by their
      Contribution(s) alone or by combination of their Contribution(s)
      with the Work to which such Contribution(s) was submitted. If You
      institute patent litigation against any entity (including a
      cross-claim or counterclaim in a lawsuit) alleging that the Work
      or a Contribution incorporated within the Work constitutes direct
      or contributory patent infringement, then any patent licenses
      granted to You under this License for that Work shall terminate
      as of the date such litigation is filed.

   4. Redistribution. You may reproduce and distribute copies of the
      Work or Derivative Works thereof in any medium, with or without
      modifications, and in Source or Object form, provided that You
      meet the following conditions:

      (a) You must give any other recipients of the Work or
          Derivative Works a copy of this License; and

      (b) You must cause any modified files to carry prominent notices
          stating that You changed the files; and

      (c) You must retain, in the Source form of any Derivative Works
          that You distribute, all copyright, patent, trademark, and
          attribution notices from the Source form of the Work,
          excluding those notices that do not pertain to any part of
          the Derivative Works; and

      (d) If the Work includes a "NOTICE" text file as part of its
          distribution, then any Derivative Works that You distribute must
          include a readable copy of the attribution notices contained
          within such NOTICE file, excluding any notices that do not
          pertain to any part of the Derivative Works, in at least one
          of the following places: within a NOTICE text file distributed
          as part of the Derivative Works; within the Source form or
          documentation, if provided along with the Derivative Works; or,
          within a display generated by the Derivative Works, if and
          wherever such third-party notices normally appear. The contents
          of the NOTICE file are for informational purposes only and
          do not modify the License. You may add Your own attribution
          notices within Derivative Works that You distribute, alongside
          or as an addendum to the NOTICE text from the Work, provided
          that such additional attribution notices cannot be construed
          as modifying the License.

      You may add Your own copyright statement to Your modifications and
      may provide additional or different license terms and conditions
      for use, reproduction, or distribution of Your modifications, or
      for any such Derivative Works as a whole, provided Your use,
      reproduction, and distribution of the Work otherwise complies with
      the conditions stated in this License.

   5. Submission of Contributions. Unless You explicitly state otherwise,
      any Contribution intentionally submitted for inclusion in the Work
      by You to the Licensor shall be under the terms and conditions of
      this License, without any additional terms or conditions.
      Notwithstanding the above, nothing herein shall supersede or modify
      the terms of any separate license agreement you may have executed
      with Licensor regarding such Contributions.

   6. Trademarks. This License does not grant permission to use the trade
      names, trademarks, service marks, or product names of the Licensor,
      except as required for reasonable and customary use in describing the
      origin of the Work and reproducing the content of the NOTICE file.

   7. Disclaimer of Warranty. Unless required by applicable law or
      agreed to in writing, Licensor provides the Work (and each
      Contributor provides its Contributions) on an "AS IS" BASIS,
      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      implied, including, without limitation, any warranties or conditions
      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      PARTICULAR PURPOSE. You are solely responsible for determining the
      appropriateness of using or redistributing the Work and assume any
      risks associated with Your exercise of permissions under this License.

   8. Limitation of Liability. In no event and under no legal theory,
      whether in tort (including negligence), contract, or otherwise,
      unless required by applicable law (such as deliberate and grossly
      negligent acts) or agreed to in writing, shall any Contributor be
      liable to You for damages, including any direct, indirect, special,
      incidental, or consequential damages of any character arising as a
      result of this License or out of the use or inability to use the
      Work (including but not limited to damages for loss of goodwill,
      work stoppage, computer failure or malfunction, or any and all
      other commercial damages or losses), even if such Contributor
      has been advised of the possibility of such damages.

   9. Accepting Warranty or Additional Liability. While redistributing
      the Work or Derivative Works thereof, You may choose to offer,
      and charge a fee for, acceptance of support, warranty, indemnity,
      or other liability obligations and/or rights consistent with this
      License. However, in accepting such obligations, You may act only
      on Your own behalf and on Your sole responsibility, not on behalf
      of any other Contributor, and only if You agree to indemnify,
      defend, and hold each Contributor harmless for any liability
      incurred by, or claims asserted against, such Contributor by reason
      of your accepting any such warranty or additional liability.

   END OF TERMS AND CONDITIONS

   Copyright 2024-2026 Crosstalk Solutions LLC

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.


================================================
FILE: README.md
================================================
<div align="center">
<img src="https://raw.githubusercontent.com/Crosstalk-Solutions/project-nomad/refs/heads/main/admin/public/project_nomad_logo.png" width="200" height="200"/>

# Project N.O.M.A.D.
### Node for Offline Media, Archives, and Data

**Knowledge That Never Goes Offline**

[![Website](https://img.shields.io/badge/Website-projectnomad.us-blue)](https://www.projectnomad.us)
[![Discord](https://img.shields.io/badge/Discord-Join%20Community-5865F2)](https://discord.com/invite/crosstalksolutions)
[![Benchmark](https://img.shields.io/badge/Benchmark-Leaderboard-green)](https://benchmark.projectnomad.us)

</div>

---

Project N.O.M.A.D. is a self-contained, offline-first knowledge and education server packed with critical tools, knowledge, and AI to keep you informed and empowered—anytime, anywhere.

## Installation & Quickstart
Project N.O.M.A.D. can be installed on any Debian-based operating system (we recommend Ubuntu). Installation is completely terminal-based, and all tools and resources are designed to be accessed through the browser, so there's no need for a desktop environment if you'd rather setup N.O.M.A.D. as a "server" and access it through other clients.

*Note: sudo/root privileges are required to run the install script*

#### Quick Install (Debian-based OS Only)
```bash
sudo apt-get update && sudo apt-get install -y curl && curl -fsSL https://raw.githubusercontent.com/Crosstalk-Solutions/project-nomad/refs/heads/main/install/install_nomad.sh -o install_nomad.sh && sudo bash install_nomad.sh
```

Project N.O.M.A.D. is now installed on your device! Open a browser and navigate to `http://localhost:8080` (or `http://DEVICE_IP:8080`) to start exploring!

### Advanced Installation
For more control over the installation process, copy and paste the [Docker Compose template](https://raw.githubusercontent.com/Crosstalk-Solutions/project-nomad/refs/heads/main/install/management_compose.yaml) into a `docker-compose.yml` file and customize it to your liking (be sure to replace any placeholders with your actual values). Then, run `docker compose up -d` to start the Command Center and its dependencies. Note: this method is recommended for advanced users only, as it requires familiarity with Docker and manual configuration before starting.

## How It Works
N.O.M.A.D. is a management UI ("Command Center") and API that orchestrates a collection of containerized tools and resources via [Docker](https://www.docker.com/). It handles installation, configuration, and updates for everything — so you don't have to.

**Built-in capabilities include:**
- **AI Chat with Knowledge Base** — local AI chat powered by [Ollama](https://ollama.com/), with document upload and semantic search (RAG via [Qdrant](https://qdrant.tech/))
- **Information Library** — offline Wikipedia, medical references, ebooks, and more via [Kiwix](https://kiwix.org/)
- **Education Platform** — Khan Academy courses with progress tracking via [Kolibri](https://learningequality.org/kolibri/)
- **Offline Maps** — downloadable regional maps via [ProtoMaps](https://protomaps.com)
- **Data Tools** — encryption, encoding, and analysis via [CyberChef](https://gchq.github.io/CyberChef/)
- **Notes** — local note-taking via [FlatNotes](https://github.com/dullage/flatnotes)
- **System Benchmark** — hardware scoring with a [community leaderboard](https://benchmark.projectnomad.us)
- **Easy Setup Wizard** — guided first-time configuration with curated content collections

N.O.M.A.D. also includes built-in tools like a Wikipedia content selector, ZIM library manager, and content explorer.

## What's Included

| Capability | Powered By | What You Get |
|-----------|-----------|-------------|
| Information Library | Kiwix | Offline Wikipedia, medical references, survival guides, ebooks |
| AI Assistant | Ollama + Qdrant | Built-in chat with document upload and semantic search |
| Education Platform | Kolibri | Khan Academy courses, progress tracking, multi-user support |
| Offline Maps | ProtoMaps | Downloadable regional maps with search and navigation |
| Data Tools | CyberChef | Encryption, encoding, hashing, and data analysis |
| Notes | FlatNotes | Local note-taking with markdown support |
| System Benchmark | Built-in | Hardware scoring, Builder Tags, and community leaderboard |

## Device Requirements
While many similar offline survival computers are designed to be run on bare-minimum, lightweight hardware, Project N.O.M.A.D. is quite the opposite. To install and run the
available AI tools, we highly encourage the use of a beefy, GPU-backed device to make the most of your install.

At it's core, however, N.O.M.A.D. is still very lightweight. For a barebones installation of the management application itself, the following minimal specs are required:

*Note: Project N.O.M.A.D. is not sponsored by any hardware manufacturer and is designed to be as hardware-agnostic as possible. The harware listed below is for example/comparison use only*

#### Minimum Specs
- Processor: 2 GHz dual-core processor or better
- RAM: 4GB system memory
- Storage: At least 5 GB free disk space
- OS: Debian-based (Ubuntu recommended)
- Stable internet connection (required during install only)

To run LLM's and other included AI tools:

#### Optimal Specs
- Processor: AMD Ryzen 7 or Intel Core i7 or better
- RAM: 32 GB system memory
- Graphics: NVIDIA RTX 3060 or AMD equivalent or better (more VRAM = run larger models)
- Storage: At least 250 GB free disk space (preferably on SSD)
- OS: Debian-based (Ubuntu recommended)
- Stable internet connection (required during install only)

**For detailed build recommendations at three price points ($150–$1,000+), see the [Hardware Guide](https://www.projectnomad.us/hardware).**

Again, Project N.O.M.A.D. itself is quite lightweight - it's the tools and resources you choose to install with N.O.M.A.D. that will determine the specs required for your unique deployment

## About Internet Usage & Privacy
Project N.O.M.A.D. is designed for offline usage. An internet connection is only required during the initial installation (to download dependencies) and if you (the user) decide to download additional tools and resources at a later time. Otherwise, N.O.M.A.D. does not require an internet connection and has ZERO built-in telemetry.

To test internet connectivity, N.O.M.A.D. attempts to make a request to Cloudflare's utility endpoint, `https://1.1.1.1/cdn-cgi/trace` and checks for a successful response.

## About Security
By design, Project N.O.M.A.D. is intended to be open and available without hurdles - it includes no authentication. If you decide to connect your device to a local network after install (e.g. for allowing other devices to access it's resources), you can block/open ports to control which services are exposed.

**Will authentication be added in the future?** Maybe. It's not currently a priority, but if there's enough demand for it, we may consider building in an optional authentication layer in a future release to support uses cases where multiple users need access to the same instance but with different permission levels (e.g. family use with parental controls, classroom use with teacher/admin accounts, etc.). For now, we recommend using network-level controls to manage access if you're planning to expose your N.O.M.A.D. instance to other devices on a local network. N.O.M.A.D. is not designed to be exposed directly to the internet, and we strongly advise against doing so unless you really know what you're doing, have taken appropriate security measures, and understand the risks involved.

## Contributing
Contributions are welcome and appreciated! Please read this section fully to understand how to contribute to the project.

### General Guidelines

- **Open an issue first**: Before starting work on a new feature or bug fix, please open an issue to discuss your proposed changes. This helps ensure that your contribution aligns with the project's goals and avoids duplicate work. Title the issue clearly and provide a detailed description of the problem or feature you want to work on.
- **Fork the repository**: Click the "Fork" button at the top right of the repository page to create a copy of the project under your GitHub account.
- **Create a new branch**: In your forked repository, create a new branch for your work. Use a descriptive name for the branch that reflects the purpose of your changes (e.g., `fix/issue-123` or `feature/add-new-tool`).
- **Make your changes**: Implement your changes in the new branch. Follow the existing code style and conventions used in the project. Be sure to test your changes locally to ensure they work as expected.
- **Add Release Notes**: If your changes include new features, bug fixes, or improvements, please see the "Release Notes" section below to properly document your contribution for the next release.
- **Conventional Commits**: When committing your changes, please use conventional commit messages to provide clear and consistent commit history. The format is `<type>(<scope>): <description>`, where:
  - `type` is the type of change (e.g., `feat` for new features, `fix` for bug fixes, `docs` for documentation changes, etc.)
  - `scope` is an optional area of the codebase that your change affects (e.g., `api`, `ui`, `docs`, etc.)
  - `description` is a brief summary of the change
- **Submit a pull request**: Once your changes are ready, submit a pull request to the main repository. Provide a clear description of your changes and reference any related issues. The project maintainers will review your pull request and may provide feedback or request changes before it can be merged.
- **Be responsive to feedback**: If the maintainers request changes or provide feedback on your pull request, please respond in a timely manner. Stale pull requests may be closed if there is no activity for an extended period.
- **Follow the project's code of conduct**: Please adhere to the project's code of conduct when interacting with maintainers and other contributors. Be respectful and considerate in your communications.
- **No guarantee of acceptance**: The project is community-driven, and all contributions are appreciated, but acceptance is not guaranteed. The maintainers will evaluate each contribution based on its quality, relevance, and alignment with the project's goals.
- **Thank you for contributing to Project N.O.M.A.D.!** Your efforts help make this project better for everyone.

### Versioning
This project uses semantic versioning. The version is managed in the root `package.json` 
and automatically updated by semantic-release. For simplicity's sake, the "project-nomad" image
uses the same version defined there instead of the version in `admin/package.json` (stays at 0.0.0), as it's the only published image derived from the code.

### Release Notes
Human-readable release notes live in [`admin/docs/release-notes.md`](admin/docs/release-notes.md) and are displayed in the Command Center's built-in documentation.

When working on changes, add a summary to the `## Unreleased` section at the top of that file under the appropriate heading:

- **Features** — new user-facing capabilities
- **Bug Fixes** — corrections to existing behavior
- **Improvements** — enhancements, refactors, docs, or dependency updates

Use the format `- **Area**: Description` to stay consistent with existing entries. When a release is triggered, CI automatically stamps the version and date, commits the update, and pushes the content to the GitHub release.

## Community & Resources

- **Website:** [www.projectnomad.us](https://www.projectnomad.us) - Learn more about the project
- **Discord:** [Join the Community](https://discord.com/invite/crosstalksolutions) - Get help, share your builds, and connect with other NOMAD users
- **Benchmark Leaderboard:** [benchmark.projectnomad.us](https://benchmark.projectnomad.us) - See how your hardware stacks up against other NOMAD builds

## License

Project N.O.M.A.D. is licensed under the [Apache License 2.0](LICENSE).

## Helper Scripts
Once installed, Project N.O.M.A.D. has a few helper scripts should you ever need to troubleshoot issues or perform maintenance that can't be done through the Command Center. All of these scripts are found in Project N.O.M.A.D.'s install directory, `/opt/project-nomad`

###

###### Start Script - Starts all installed project containers
```bash
sudo bash /opt/project-nomad/start_nomad.sh
```
###

###### Stop Script - Stops all installed project containers
```bash
sudo bash /opt/project-nomad/stop_nomad.sh
```
###

###### Update Script - Attempts to pull the latest images for the Command Center and its dependencies (i.e. mysql) and recreate the containers. Note: this *only* updates the Command Center containers. It does not update the installable application containers - that should be done through the Command Center UI
```bash
sudo bash /opt/project-nomad/update_nomad.sh
```

###### Uninstall Script - Need to start fresh? Use the uninstall script to make your life easy. Note: this cannot be undone!
```bash
curl -fsSL https://raw.githubusercontent.com/Crosstalk-Solutions/project-nomad/refs/heads/main/install/uninstall_nomad.sh -o uninstall_nomad.sh && sudo bash uninstall_nomad.sh
```

================================================
FILE: admin/.editorconfig
================================================
# http://editorconfig.org

[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true

[*.json]
insert_final_newline = unset

[**.min.js]
indent_style = unset
insert_final_newline = unset

[MakeFile]
indent_style = space

[*.md]
trim_trailing_whitespace = false


================================================
FILE: admin/ace.js
================================================
/*
|--------------------------------------------------------------------------
| JavaScript entrypoint for running ace commands
|--------------------------------------------------------------------------
|
| DO NOT MODIFY THIS FILE AS IT WILL BE OVERRIDDEN DURING THE BUILD
| PROCESS.
|
| See docs.adonisjs.com/guides/typescript-build-process#creating-production-build
|
| Since, we cannot run TypeScript source code using "node" binary, we need
| a JavaScript entrypoint to run ace commands.
|
| This file registers the "ts-node/esm" hook with the Node.js module system
| and then imports the "bin/console.ts" file.
|
*/

/**
 * Register hook to process TypeScript files using ts-node-maintained
 */
import 'ts-node-maintained/register/esm'

/**
 * Import ace console entrypoint
 */
await import('./bin/console.js')


================================================
FILE: admin/adonisrc.ts
================================================
import { defineConfig } from '@adonisjs/core/app'

export default defineConfig({
  /*
  |--------------------------------------------------------------------------
  | Experimental flags
  |--------------------------------------------------------------------------
  |
  | The following features will be enabled by default in the next major release
  | of AdonisJS. You can opt into them today to avoid any breaking changes
  | during upgrade.
  |
  */
  experimental: {
    mergeMultipartFieldsAndFiles: true,
    shutdownInReverseOrder: true,
  },

  /*
  |--------------------------------------------------------------------------
  | Commands
  |--------------------------------------------------------------------------
  |
  | List of ace commands to register from packages. The application commands
  | will be scanned automatically from the "./commands" directory.
  |
  */
  commands: [() => import('@adonisjs/core/commands'), () => import('@adonisjs/lucid/commands')],

  /*
  |--------------------------------------------------------------------------
  | Service providers
  |--------------------------------------------------------------------------
  |
  | List of service providers to import and register when booting the
  | application
  |
  */
  providers: [
    () => import('@adonisjs/core/providers/app_provider'),
    () => import('@adonisjs/core/providers/hash_provider'),
    {
      file: () => import('@adonisjs/core/providers/repl_provider'),
      environment: ['repl', 'test'],
    },
    () => import('@adonisjs/core/providers/vinejs_provider'),
    () => import('@adonisjs/core/providers/edge_provider'),
    () => import('@adonisjs/session/session_provider'),
    () => import('@adonisjs/vite/vite_provider'),
    () => import('@adonisjs/shield/shield_provider'),
    () => import('@adonisjs/static/static_provider'),
    () => import('@adonisjs/cors/cors_provider'),
    () => import('@adonisjs/lucid/database_provider'),
    () => import('@adonisjs/inertia/inertia_provider'),
    () => import('@adonisjs/transmit/transmit_provider'),
    () => import('#providers/map_static_provider')
  ],

  /*
  |--------------------------------------------------------------------------
  | Preloads
  |--------------------------------------------------------------------------
  |
  | List of modules to import before starting the application.
  |
  */
  preloads: [() => import('#start/routes'), () => import('#start/kernel')],

  /*
  |--------------------------------------------------------------------------
  | Tests
  |--------------------------------------------------------------------------
  |
  | List of test suites to organize tests by their type. Feel free to remove
  | and add additional suites.
  |
  */
  tests: {
    suites: [
      {
        files: ['tests/unit/**/*.spec(.ts|.js)'],
        name: 'unit',
        timeout: 2000,
      },
      {
        files: ['tests/functional/**/*.spec(.ts|.js)'],
        name: 'functional',
        timeout: 30000,
      },
    ],
    forceExit: false,
  },

  /*
  |--------------------------------------------------------------------------
  | Metafiles
  |--------------------------------------------------------------------------
  |
  | A collection of files you want to copy to the build folder when creating
  | the production build.
  |
  */
  metaFiles: [
    {
      pattern: 'resources/views/**/*.edge',
      reloadServer: false,
    },
    {
      pattern: 'public/**',
      reloadServer: false,
    },
  ],

  assetsBundler: false,
  hooks: {
    onBuildStarting: [() => import('@adonisjs/vite/build_hook')],
  },
})


================================================
FILE: admin/app/controllers/benchmark_controller.ts
================================================
import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'
import { BenchmarkService } from '#services/benchmark_service'
import { runBenchmarkValidator, submitBenchmarkValidator } from '#validators/benchmark'
import { RunBenchmarkJob } from '#jobs/run_benchmark_job'
import type { BenchmarkType } from '../../types/benchmark.js'
import { randomUUID } from 'node:crypto'

@inject()
export default class BenchmarkController {
  constructor(private benchmarkService: BenchmarkService) {}

  /**
   * Start a benchmark run (async via job queue, or sync if specified)
   */
  async run({ request, response }: HttpContext) {
    const payload = await request.validateUsing(runBenchmarkValidator)
    const benchmarkType: BenchmarkType = payload.benchmark_type || 'full'
    const runSync = request.input('sync') === 'true' || request.input('sync') === true

    // Check if a benchmark is already running
    const status = this.benchmarkService.getStatus()
    if (status.status !== 'idle') {
      return response.status(409).send({
        success: false,
        error: 'A benchmark is already running',
        current_benchmark_id: status.benchmarkId,
      })
    }

    // Run synchronously if requested (useful for local dev without Redis)
    if (runSync) {
      try {
        let result
        switch (benchmarkType) {
          case 'full':
            result = await this.benchmarkService.runFullBenchmark()
            break
          case 'system':
            result = await this.benchmarkService.runSystemBenchmarks()
            break
          case 'ai':
            result = await this.benchmarkService.runAIBenchmark()
            break
          default:
            result = await this.benchmarkService.runFullBenchmark()
        }
        return response.send({
          success: true,
          benchmark_id: result.benchmark_id,
          nomad_score: result.nomad_score,
          result,
        })
      } catch (error) {
        return response.status(500).send({
          success: false,
          error: error.message,
        })
      }
    }

    // Generate benchmark ID and dispatch job (async)
    const benchmarkId = randomUUID()
    const { job, created } = await RunBenchmarkJob.dispatch({
      benchmark_id: benchmarkId,
      benchmark_type: benchmarkType,
      include_ai: benchmarkType === 'full' || benchmarkType === 'ai',
    })

    return response.status(201).send({
      success: true,
      job_id: job?.id || benchmarkId,
      benchmark_id: benchmarkId,
      message: created
        ? `${benchmarkType} benchmark started`
        : 'Benchmark job already exists',
    })
  }

  /**
   * Run a system-only benchmark (CPU, memory, disk)
   */
  async runSystem({ response }: HttpContext) {
    const status = this.benchmarkService.getStatus()
    if (status.status !== 'idle') {
      return response.status(409).send({
        success: false,
        error: 'A benchmark is already running',
      })
    }

    const benchmarkId = randomUUID()
    await RunBenchmarkJob.dispatch({
      benchmark_id: benchmarkId,
      benchmark_type: 'system',
      include_ai: false,
    })

    return response.status(201).send({
      success: true,
      benchmark_id: benchmarkId,
      message: 'System benchmark started',
    })
  }

  /**
   * Run an AI-only benchmark
   */
  async runAI({ response }: HttpContext) {
    const status = this.benchmarkService.getStatus()
    if (status.status !== 'idle') {
      return response.status(409).send({
        success: false,
        error: 'A benchmark is already running',
      })
    }

    const benchmarkId = randomUUID()
    await RunBenchmarkJob.dispatch({
      benchmark_id: benchmarkId,
      benchmark_type: 'ai',
      include_ai: true,
    })

    return response.status(201).send({
      success: true,
      benchmark_id: benchmarkId,
      message: 'AI benchmark started',
    })
  }

  /**
   * Get all benchmark results
   */
  async results({}: HttpContext) {
    const results = await this.benchmarkService.getAllResults()
    return {
      results,
      total: results.length,
    }
  }

  /**
   * Get the latest benchmark result
   */
  async latest({}: HttpContext) {
    const result = await this.benchmarkService.getLatestResult()
    if (!result) {
      return { result: null }
    }
    return { result }
  }

  /**
   * Get a specific benchmark result by ID
   */
  async show({ params, response }: HttpContext) {
    const result = await this.benchmarkService.getResultById(params.id)
    if (!result) {
      return response.status(404).send({
        error: 'Benchmark result not found',
      })
    }
    return { result }
  }

  /**
   * Submit benchmark results to central repository
   */
  async submit({ request, response }: HttpContext) {
    const payload = await request.validateUsing(submitBenchmarkValidator)
    const anonymous = request.input('anonymous') === true || request.input('anonymous') === 'true'

    try {
      const submitResult = await this.benchmarkService.submitToRepository(payload.benchmark_id, anonymous)
      return response.send({
        success: true,
        repository_id: submitResult.repository_id,
        percentile: submitResult.percentile,
      })
    } catch (error) {
      // Pass through the status code from the service if available, otherwise default to 400
      const statusCode = (error as any).statusCode || 400
      return response.status(statusCode).send({
        success: false,
        error: error.message,
      })
    }
  }

  /**
   * Update builder tag for a benchmark result
   */
  async updateBuilderTag({ request, response }: HttpContext) {
    const benchmarkId = request.input('benchmark_id')
    const builderTag = request.input('builder_tag')

    if (!benchmarkId) {
      return response.status(400).send({
        success: false,
        error: 'benchmark_id is required',
      })
    }

    const result = await this.benchmarkService.getResultById(benchmarkId)
    if (!result) {
      return response.status(404).send({
        success: false,
        error: 'Benchmark result not found',
      })
    }

    // Validate builder tag format if provided
    if (builderTag) {
      const tagPattern = /^[A-Za-z]+-[A-Za-z]+-\d{4}$/
      if (!tagPattern.test(builderTag)) {
        return response.status(400).send({
          success: false,
          error: 'Invalid builder tag format. Expected: Word-Word-0000',
        })
      }
    }

    result.builder_tag = builderTag || null
    await result.save()

    return response.send({
      success: true,
      builder_tag: result.builder_tag,
    })
  }

  /**
   * Get comparison stats from central repository
   */
  async comparison({}: HttpContext) {
    const stats = await this.benchmarkService.getComparisonStats()
    return { stats }
  }

  /**
   * Get current benchmark status
   */
  async status({}: HttpContext) {
    return this.benchmarkService.getStatus()
  }

  /**
   * Get benchmark settings
   */
  async settings({}: HttpContext) {
    const { default: BenchmarkSetting } = await import('#models/benchmark_setting')
    return await BenchmarkSetting.getAllSettings()
  }

  /**
   * Update benchmark settings
   */
  async updateSettings({ request, response }: HttpContext) {
    const { default: BenchmarkSetting } = await import('#models/benchmark_setting')
    const body = request.body()

    if (body.allow_anonymous_submission !== undefined) {
      await BenchmarkSetting.setValue(
        'allow_anonymous_submission',
        body.allow_anonymous_submission ? 'true' : 'false'
      )
    }

    return response.send({
      success: true,
      settings: await BenchmarkSetting.getAllSettings(),
    })
  }
}


================================================
FILE: admin/app/controllers/chats_controller.ts
================================================
import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'
import { ChatService } from '#services/chat_service'
import { createSessionSchema, updateSessionSchema, addMessageSchema } from '#validators/chat'
import KVStore from '#models/kv_store'
import { SystemService } from '#services/system_service'
import { SERVICE_NAMES } from '../../constants/service_names.js'

@inject()
export default class ChatsController {
  constructor(private chatService: ChatService, private systemService: SystemService) {}

  async inertia({ inertia, response }: HttpContext) {
    const aiAssistantInstalled = await this.systemService.checkServiceInstalled(SERVICE_NAMES.OLLAMA)
    if (!aiAssistantInstalled) {
      return response.status(404).json({ error: 'AI Assistant service not installed' })
    }
    
    const chatSuggestionsEnabled = await KVStore.getValue('chat.suggestionsEnabled')
    return inertia.render('chat', {
      settings: {
        chatSuggestionsEnabled: chatSuggestionsEnabled ?? false,
      },
    })
  }

  async index({}: HttpContext) {
    return await this.chatService.getAllSessions()
  }

  async show({ params, response }: HttpContext) {
    const sessionId = parseInt(params.id)
    const session = await this.chatService.getSession(sessionId)

    if (!session) {
      return response.status(404).json({ error: 'Session not found' })
    }

    return session
  }

  async store({ request, response }: HttpContext) {
    try {
      const data = await request.validateUsing(createSessionSchema)
      const session = await this.chatService.createSession(data.title, data.model)
      return response.status(201).json(session)
    } catch (error) {
      return response.status(500).json({
        error: error instanceof Error ? error.message : 'Failed to create session',
      })
    }
  }

  async suggestions({ response }: HttpContext) {
    try {
      const suggestions = await this.chatService.getChatSuggestions()
      return response.status(200).json({ suggestions })
    } catch (error) {
      return response.status(500).json({
        error: error instanceof Error ? error.message : 'Failed to get suggestions',
      })
    }
  }

  async update({ params, request, response }: HttpContext) {
    try {
      const sessionId = parseInt(params.id)
      const data = await request.validateUsing(updateSessionSchema)
      const session = await this.chatService.updateSession(sessionId, data)
      return session
    } catch (error) {
      return response.status(500).json({
        error: error instanceof Error ? error.message : 'Failed to update session',
      })
    }
  }

  async destroy({ params, response }: HttpContext) {
    try {
      const sessionId = parseInt(params.id)
      await this.chatService.deleteSession(sessionId)
      return response.status(204)
    } catch (error) {
      return response.status(500).json({
        error: error instanceof Error ? error.message : 'Failed to delete session',
      })
    }
  }

  async addMessage({ params, request, response }: HttpContext) {
    try {
      const sessionId = parseInt(params.id)
      const data = await request.validateUsing(addMessageSchema)
      const message = await this.chatService.addMessage(sessionId, data.role, data.content)
      return response.status(201).json(message)
    } catch (error) {
      return response.status(500).json({
        error: error instanceof Error ? error.message : 'Failed to add message',
      })
    }
  }

  async destroyAll({ response }: HttpContext) {
    try {
      const result = await this.chatService.deleteAllSessions()
      return response.status(200).json(result)
    } catch (error) {
      return response.status(500).json({
        error: error instanceof Error ? error.message : 'Failed to delete all sessions',
      })
    }
  }
}


================================================
FILE: admin/app/controllers/collection_updates_controller.ts
================================================
import { CollectionUpdateService } from '#services/collection_update_service'
import {
  assertNotPrivateUrl,
  applyContentUpdateValidator,
  applyAllContentUpdatesValidator,
} from '#validators/common'
import type { HttpContext } from '@adonisjs/core/http'

export default class CollectionUpdatesController {
  async checkForUpdates({}: HttpContext) {
    const service = new CollectionUpdateService()
    return await service.checkForUpdates()
  }

  async applyUpdate({ request }: HttpContext) {
    const update = await request.validateUsing(applyContentUpdateValidator)
    assertNotPrivateUrl(update.download_url)
    const service = new CollectionUpdateService()
    return await service.applyUpdate(update)
  }

  async applyAllUpdates({ request }: HttpContext) {
    const { updates } = await request.validateUsing(applyAllContentUpdatesValidator)
    for (const update of updates) {
      assertNotPrivateUrl(update.download_url)
    }
    const service = new CollectionUpdateService()
    return await service.applyAllUpdates(updates)
  }
}


================================================
FILE: admin/app/controllers/docs_controller.ts
================================================
import { DocsService } from '#services/docs_service'
import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'

@inject()
export default class DocsController {
    constructor(
        private docsService: DocsService
    ) { }

    async list({ }: HttpContext) {
        return await this.docsService.getDocs();
    }

    async show({ params, inertia }: HttpContext) {
        const content = await this.docsService.parseFile(params.slug);
        return inertia.render('docs/show', {
            content,
        });
    }
}

================================================
FILE: admin/app/controllers/downloads_controller.ts
================================================
import type { HttpContext } from '@adonisjs/core/http'
import { DownloadService } from '#services/download_service'
import { downloadJobsByFiletypeSchema } from '#validators/download'
import { inject } from '@adonisjs/core'

@inject()
export default class DownloadsController {
  constructor(private downloadService: DownloadService) {}

  async index() {
    return this.downloadService.listDownloadJobs()
  }

  async filetype({ request }: HttpContext) {
    const payload = await request.validateUsing(downloadJobsByFiletypeSchema)
    return this.downloadService.listDownloadJobs(payload.params.filetype)
  }

  async removeJob({ params }: HttpContext) {
    await this.downloadService.removeFailedJob(params.jobId)
    return { success: true }
  }
}


================================================
FILE: admin/app/controllers/easy_setup_controller.ts
================================================
import { SystemService } from '#services/system_service'
import { ZimService } from '#services/zim_service'
import { CollectionManifestService } from '#services/collection_manifest_service'
import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'

@inject()
export default class EasySetupController {
  constructor(
    private systemService: SystemService,
    private zimService: ZimService
  ) {}

  async index({ inertia }: HttpContext) {
    const services = await this.systemService.getServices({ installedOnly: false })
    return inertia.render('easy-setup/index', {
      system: {
        services: services,
      },
    })
  }

  async complete({ inertia }: HttpContext) {
    return inertia.render('easy-setup/complete')
  }

  async listCuratedCategories({}: HttpContext) {
    return await this.zimService.listCuratedCategories()
  }

  async refreshManifests({}: HttpContext) {
    const manifestService = new CollectionManifestService()
    const [zimChanged, mapsChanged, wikiChanged] = await Promise.all([
      manifestService.fetchAndCacheSpec('zim_categories'),
      manifestService.fetchAndCacheSpec('maps'),
      manifestService.fetchAndCacheSpec('wikipedia'),
    ])

    return {
      success: true,
      changed: {
        zim_categories: zimChanged,
        maps: mapsChanged,
        wikipedia: wikiChanged,
      },
    }
  }
}


================================================
FILE: admin/app/controllers/home_controller.ts
================================================
import { SystemService } from '#services/system_service'
import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'

@inject()
export default class HomeController {
    constructor(
        private systemService: SystemService,
    ) { }

    async index({ response }: HttpContext) {
        // Redirect / to /home
        return response.redirect().toPath('/home');
    }

    async home({ inertia }: HttpContext) {
        const services = await this.systemService.getServices({ installedOnly: true });
        return inertia.render('home', {
            system: {
                services
            }
        })
    }
}

================================================
FILE: admin/app/controllers/maps_controller.ts
================================================
import { MapService } from '#services/map_service'
import {
  assertNotPrivateUrl,
  downloadCollectionValidator,
  filenameParamValidator,
  remoteDownloadValidator,
  remoteDownloadValidatorOptional,
} from '#validators/common'
import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'

@inject()
export default class MapsController {
  constructor(private mapService: MapService) {}

  async index({ inertia }: HttpContext) {
    const baseAssetsCheck = await this.mapService.ensureBaseAssets()
    const regionFiles = await this.mapService.listRegions()
    return inertia.render('maps', {
      maps: {
        baseAssetsExist: baseAssetsCheck,
        regionFiles: regionFiles.files,
      },
    })
  }

  async downloadBaseAssets({ request }: HttpContext) {
    const payload = await request.validateUsing(remoteDownloadValidatorOptional)
    if (payload.url) assertNotPrivateUrl(payload.url)
    await this.mapService.downloadBaseAssets(payload.url)
    return { success: true }
  }

  async downloadRemote({ request }: HttpContext) {
    const payload = await request.validateUsing(remoteDownloadValidator)
    assertNotPrivateUrl(payload.url)
    const filename = await this.mapService.downloadRemote(payload.url)
    return {
      message: 'Download started successfully',
      filename,
      url: payload.url,
    }
  }

  async downloadCollection({ request }: HttpContext) {
    const payload = await request.validateUsing(downloadCollectionValidator)
    const resources = await this.mapService.downloadCollection(payload.slug)
    return {
      message: 'Collection download started successfully',
      slug: payload.slug,
      resources,
    }
  }

  // For providing a "preflight" check in the UI before actually starting a background download
  async downloadRemotePreflight({ request }: HttpContext) {
    const payload = await request.validateUsing(remoteDownloadValidator)
    assertNotPrivateUrl(payload.url)
    const info = await this.mapService.downloadRemotePreflight(payload.url)
    return info
  }

  async fetchLatestCollections({}: HttpContext) {
    const success = await this.mapService.fetchLatestCollections()
    return { success }
  }

  async listCuratedCollections({}: HttpContext) {
    return await this.mapService.listCuratedCollections()
  }

  async listRegions({}: HttpContext) {
    return await this.mapService.listRegions()
  }

  async styles({ request, response }: HttpContext) {
    // Automatically ensure base assets are present before generating styles
    const baseAssetsExist = await this.mapService.ensureBaseAssets()
    if (!baseAssetsExist) {
      return response.status(500).send({
        message:
          'Base map assets are missing and could not be downloaded. Please check your connection and try again.',
      })
    }

    const styles = await this.mapService.generateStylesJSON(request.host(), request.protocol())
    return response.json(styles)
  }

  async delete({ request, response }: HttpContext) {
    const payload = await request.validateUsing(filenameParamValidator)

    try {
      await this.mapService.delete(payload.params.filename)
    } catch (error) {
      if (error.message === 'not_found') {
        return response.status(404).send({
          message: `Map file with key ${payload.params.filename} not found`,
        })
      }
      throw error // Re-throw any other errors and let the global error handler catch
    }

    return {
      message: 'Map file deleted successfully',
    }
  }
}


================================================
FILE: admin/app/controllers/ollama_controller.ts
================================================
import { ChatService } from '#services/chat_service'
import { OllamaService } from '#services/ollama_service'
import { RagService } from '#services/rag_service'
import { modelNameSchema } from '#validators/download'
import { chatSchema, getAvailableModelsSchema } from '#validators/ollama'
import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'
import { DEFAULT_QUERY_REWRITE_MODEL, RAG_CONTEXT_LIMITS, SYSTEM_PROMPTS } from '../../constants/ollama.js'
import logger from '@adonisjs/core/services/logger'
import type { Message } from 'ollama'

@inject()
export default class OllamaController {
  constructor(
    private chatService: ChatService,
    private ollamaService: OllamaService,
    private ragService: RagService
  ) { }

  async availableModels({ request }: HttpContext) {
    const reqData = await request.validateUsing(getAvailableModelsSchema)
    return await this.ollamaService.getAvailableModels({
      sort: reqData.sort,
      recommendedOnly: reqData.recommendedOnly,
      query: reqData.query || null,
      limit: reqData.limit || 15,
      force: reqData.force,
    })
  }

  async chat({ request, response }: HttpContext) {
    const reqData = await request.validateUsing(chatSchema)

    // Flush SSE headers immediately so the client connection is open while
    // pre-processing (query rewriting, RAG lookup) runs in the background.
    if (reqData.stream) {
      response.response.setHeader('Content-Type', 'text/event-stream')
      response.response.setHeader('Cache-Control', 'no-cache')
      response.response.setHeader('Connection', 'keep-alive')
      response.response.flushHeaders()
    }

    try {
      // If there are no system messages in the chat inject system prompts
      const hasSystemMessage = reqData.messages.some((msg) => msg.role === 'system')
      if (!hasSystemMessage) {
        const systemPrompt = {
          role: 'system' as const,
          content: SYSTEM_PROMPTS.default,
        }
        logger.debug('[OllamaController] Injecting system prompt')
        reqData.messages.unshift(systemPrompt)
      }

      // Query rewriting for better RAG retrieval with manageable context
      // Will return user's latest message if no rewriting is needed
      const rewrittenQuery = await this.rewriteQueryWithContext(reqData.messages)

      logger.debug(`[OllamaController] Rewritten query for RAG: "${rewrittenQuery}"`)
      if (rewrittenQuery) {
        const relevantDocs = await this.ragService.searchSimilarDocuments(
          rewrittenQuery,
          5, // Top 5 most relevant chunks
          0.3 // Minimum similarity score of 0.3
        )

        logger.debug(`[RAG] Retrieved ${relevantDocs.length} relevant documents for query: "${rewrittenQuery}"`)

        // If relevant context is found, inject as a system message with adaptive limits
        if (relevantDocs.length > 0) {
          // Determine context budget based on model size
          const { maxResults, maxTokens } = this.getContextLimitsForModel(reqData.model)
          let trimmedDocs = relevantDocs.slice(0, maxResults)

          // Apply token cap if set (estimate ~4 chars per token)
          // Always include the first (most relevant) result — the cap only gates subsequent results
          if (maxTokens > 0) {
            const charCap = maxTokens * 4
            let totalChars = 0
            trimmedDocs = trimmedDocs.filter((doc, idx) => {
              totalChars += doc.text.length
              return idx === 0 || totalChars <= charCap
            })
          }

          logger.debug(
            `[RAG] Injecting ${trimmedDocs.length}/${relevantDocs.length} results (model: ${reqData.model}, maxResults: ${maxResults}, maxTokens: ${maxTokens || 'unlimited'})`
          )

          const contextText = trimmedDocs
            .map((doc, idx) => `[Context ${idx + 1}] (Relevance: ${(doc.score * 100).toFixed(1)}%)\n${doc.text}`)
            .join('\n\n')

          const systemMessage = {
            role: 'system' as const,
            content: SYSTEM_PROMPTS.rag_context(contextText),
          }

          // Insert system message at the beginning (after any existing system messages)
          const firstNonSystemIndex = reqData.messages.findIndex((msg) => msg.role !== 'system')
          const insertIndex = firstNonSystemIndex === -1 ? 0 : firstNonSystemIndex
          reqData.messages.splice(insertIndex, 0, systemMessage)
        }
      }

      // Check if the model supports "thinking" capability for enhanced response generation
      // If gpt-oss model, it requires a text param for "think" https://docs.ollama.com/api/chat
      const thinkingCapability = await this.ollamaService.checkModelHasThinking(reqData.model)
      const think: boolean | 'medium' = thinkingCapability ? (reqData.model.startsWith('gpt-oss') ? 'medium' : true) : false

      // Separate sessionId from the Ollama request payload — Ollama rejects unknown fields
      const { sessionId, ...ollamaRequest } = reqData

      // Save user message to DB before streaming if sessionId provided
      let userContent: string | null = null
      if (sessionId) {
        const lastUserMsg = [...reqData.messages].reverse().find((m) => m.role === 'user')
        if (lastUserMsg) {
          userContent = lastUserMsg.content
          await this.chatService.addMessage(sessionId, 'user', userContent)
        }
      }

      if (reqData.stream) {
        logger.debug(`[OllamaController] Initiating streaming response for model: "${reqData.model}" with think: ${think}`)
        // Headers already flushed above
        const stream = await this.ollamaService.chatStream({ ...ollamaRequest, think })
        let fullContent = ''
        for await (const chunk of stream) {
          if (chunk.message?.content) {
            fullContent += chunk.message.content
          }
          response.response.write(`data: ${JSON.stringify(chunk)}\n\n`)
        }
        response.response.end()

        // Save assistant message and optionally generate title
        if (sessionId && fullContent) {
          await this.chatService.addMessage(sessionId, 'assistant', fullContent)
          const messageCount = await this.chatService.getMessageCount(sessionId)
          if (messageCount <= 2 && userContent) {
            this.chatService.generateTitle(sessionId, userContent, fullContent).catch((err) => {
              logger.error(`[OllamaController] Title generation failed: ${err instanceof Error ? err.message : err}`)
            })
          }
        }
        return
      }

      // Non-streaming (legacy) path
      const result = await this.ollamaService.chat({ ...ollamaRequest, think })

      if (sessionId && result?.message?.content) {
        await this.chatService.addMessage(sessionId, 'assistant', result.message.content)
        const messageCount = await this.chatService.getMessageCount(sessionId)
        if (messageCount <= 2 && userContent) {
          this.chatService.generateTitle(sessionId, userContent, result.message.content).catch((err) => {
            logger.error(`[OllamaController] Title generation failed: ${err instanceof Error ? err.message : err}`)
          })
        }
      }

      return result
    } catch (error) {
      if (reqData.stream) {
        response.response.write(`data: ${JSON.stringify({ error: true })}\n\n`)
        response.response.end()
        return
      }
      throw error
    }
  }

  async deleteModel({ request }: HttpContext) {
    const reqData = await request.validateUsing(modelNameSchema)
    await this.ollamaService.deleteModel(reqData.model)
    return {
      success: true,
      message: `Model deleted: ${reqData.model}`,
    }
  }

  async dispatchModelDownload({ request }: HttpContext) {
    const reqData = await request.validateUsing(modelNameSchema)
    await this.ollamaService.dispatchModelDownload(reqData.model)
    return {
      success: true,
      message: `Download job dispatched for model: ${reqData.model}`,
    }
  }

  async installedModels({ }: HttpContext) {
    return await this.ollamaService.getModels()
  }

  /**
   * Determines RAG context limits based on model size extracted from the model name.
   * Parses size indicators like "1b", "3b", "8b", "70b" from model names/tags.
   */
  private getContextLimitsForModel(modelName: string): { maxResults: number; maxTokens: number } {
    // Extract parameter count from model name (e.g., "llama3.2:3b", "qwen2.5:1.5b", "gemma:7b")
    const sizeMatch = modelName.match(/(\d+\.?\d*)[bB]/)
    const paramBillions = sizeMatch ? parseFloat(sizeMatch[1]) : 8 // default to 8B if unknown

    for (const tier of RAG_CONTEXT_LIMITS) {
      if (paramBillions <= tier.maxParams) {
        return { maxResults: tier.maxResults, maxTokens: tier.maxTokens }
      }
    }

    // Fallback: no limits
    return { maxResults: 5, maxTokens: 0 }
  }

  private async rewriteQueryWithContext(
    messages: Message[]
  ): Promise<string | null> {
    try {
      // Get recent conversation history (last 6 messages for 3 turns)
      const recentMessages = messages.slice(-6)

      // Skip rewriting for short conversations. Rewriting adds latency with
      // little RAG benefit until there is enough context to matter.
      const userMessages = recentMessages.filter(msg => msg.role === 'user')
      if (userMessages.length <= 2) {
        return userMessages[userMessages.length - 1]?.content || null
      }

      const conversationContext = recentMessages
        .map(msg => {
          const role = msg.role === 'user' ? 'User' : 'Assistant'
          // Truncate assistant messages to first 200 chars to keep context manageable
          const content = msg.role === 'assistant'
            ? msg.content.slice(0, 200) + (msg.content.length > 200 ? '...' : '')
            : msg.content
          return `${role}: "${content}"`
        })
        .join('\n')

      const installedModels = await this.ollamaService.getModels(true)
      const rewriteModelAvailable = installedModels?.some(model => model.name === DEFAULT_QUERY_REWRITE_MODEL)
      if (!rewriteModelAvailable) {
        logger.warn(`[RAG] Query rewrite model "${DEFAULT_QUERY_REWRITE_MODEL}" not available. Skipping query rewriting.`)
        const lastUserMessage = [...messages].reverse().find(msg => msg.role === 'user')
        return lastUserMessage?.content || null
      }

      // FUTURE ENHANCEMENT: allow the user to specify which model to use for rewriting
      const response = await this.ollamaService.chat({
        model: DEFAULT_QUERY_REWRITE_MODEL,
        messages: [
          {
            role: 'system',
            content: SYSTEM_PROMPTS.query_rewrite,
          },
          {
            role: 'user',
            content: `Conversation:\n${conversationContext}\n\nRewritten Query:`,
          },
        ],
      })

      const rewrittenQuery = response.message.content.trim()
      logger.info(`[RAG] Query rewritten: "${rewrittenQuery}"`)
      return rewrittenQuery
    } catch (error) {
      logger.error(
        `[RAG] Query rewriting failed: ${error instanceof Error ? error.message : error}`
      )
      // Fallback to last user message if rewriting fails
      const lastUserMessage = [...messages].reverse().find(msg => msg.role === 'user')
      return lastUserMessage?.content || null
    }
  }
}


================================================
FILE: admin/app/controllers/rag_controller.ts
================================================
import { RagService } from '#services/rag_service'
import { EmbedFileJob } from '#jobs/embed_file_job'
import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'
import app from '@adonisjs/core/services/app'
import { randomBytes } from 'node:crypto'
import { sanitizeFilename } from '../utils/fs.js'
import { deleteFileSchema, getJobStatusSchema } from '#validators/rag'

@inject()
export default class RagController {
  constructor(private ragService: RagService) { }

  public async upload({ request, response }: HttpContext) {
    const uploadedFile = request.file('file')
    if (!uploadedFile) {
      return response.status(400).json({ error: 'No file uploaded' })
    }

    const randomSuffix = randomBytes(6).toString('hex')
    const sanitizedName = sanitizeFilename(uploadedFile.clientName)

    const fileName = `${sanitizedName}-${randomSuffix}.${uploadedFile.extname || 'txt'}`
    const fullPath = app.makePath(RagService.UPLOADS_STORAGE_PATH, fileName)

    await uploadedFile.move(app.makePath(RagService.UPLOADS_STORAGE_PATH), {
      name: fileName,
    })

    // Dispatch background job for embedding
    const result = await EmbedFileJob.dispatch({
      filePath: fullPath,
      fileName,
    })

    return response.status(202).json({
      message: result.message,
      jobId: result.jobId,
      fileName,
      filePath: `/${RagService.UPLOADS_STORAGE_PATH}/${fileName}`,
      alreadyProcessing: !result.created,
    })
  }

  public async getActiveJobs({ response }: HttpContext) {
    const jobs = await EmbedFileJob.listActiveJobs()
    return response.status(200).json(jobs)
  }

  public async getJobStatus({ request, response }: HttpContext) {
    const reqData = await request.validateUsing(getJobStatusSchema)

    const fullPath = app.makePath(RagService.UPLOADS_STORAGE_PATH, reqData.filePath)
    const status = await EmbedFileJob.getStatus(fullPath)

    if (!status.exists) {
      return response.status(404).json({ error: 'Job not found for this file' })
    }

    return response.status(200).json(status)
  }

  public async getStoredFiles({ response }: HttpContext) {
    const files = await this.ragService.getStoredFiles()
    return response.status(200).json({ files })
  }

  public async deleteFile({ request, response }: HttpContext) {
    const { source } = await request.validateUsing(deleteFileSchema)
    const result = await this.ragService.deleteFileBySource(source)
    if (!result.success) {
      return response.status(500).json({ error: result.message })
    }
    return response.status(200).json({ message: result.message })
  }

  public async scanAndSync({ response }: HttpContext) {
    try {
      const syncResult = await this.ragService.scanAndSyncStorage()
      return response.status(200).json(syncResult)
    } catch (error) {
      return response.status(500).json({ error: 'Error scanning and syncing storage', details: error.message })
    }
  }
}


================================================
FILE: admin/app/controllers/settings_controller.ts
================================================
import KVStore from '#models/kv_store';
import { BenchmarkService } from '#services/benchmark_service';
import { MapService } from '#services/map_service';
import { OllamaService } from '#services/ollama_service';
import { SystemService } from '#services/system_service';
import { updateSettingSchema } from '#validators/settings';
import { inject } from '@adonisjs/core';
import type { HttpContext } from '@adonisjs/core/http'
import type { KVStoreKey } from '../../types/kv_store.js';

@inject()
export default class SettingsController {
    constructor(
        private systemService: SystemService,
        private mapService: MapService,
        private benchmarkService: BenchmarkService,
        private ollamaService: OllamaService
    ) { }

    async system({ inertia }: HttpContext) {
        const systemInfo = await this.systemService.getSystemInfo();
        return inertia.render('settings/system', {
            system: {
                info: systemInfo
            }
        });
    }

    async apps({ inertia }: HttpContext) {
        const services = await this.systemService.getServices({ installedOnly: false });
        return inertia.render('settings/apps', {
            system: {
                services
            }
        });
    }
    
    async legal({ inertia }: HttpContext) {
        return inertia.render('settings/legal');
    }

    async support({ inertia }: HttpContext) {
        return inertia.render('settings/support');
    }

    async maps({ inertia }: HttpContext) {
        const baseAssetsCheck = await this.mapService.ensureBaseAssets();
        const regionFiles = await this.mapService.listRegions();
        return inertia.render('settings/maps', {
            maps: {
                baseAssetsExist: baseAssetsCheck,
                regionFiles: regionFiles.files
            }
        });
    }

    async models({ inertia }: HttpContext) {
        const availableModels = await this.ollamaService.getAvailableModels({ sort: 'pulls', recommendedOnly: false, query: null, limit: 15 });
        const installedModels = await this.ollamaService.getModels();
        const chatSuggestionsEnabled = await KVStore.getValue('chat.suggestionsEnabled')
        const aiAssistantCustomName = await KVStore.getValue('ai.assistantCustomName')
        return inertia.render('settings/models', {
            models: {
                availableModels: availableModels?.models || [],
                installedModels: installedModels || [],
                settings: {
                    chatSuggestionsEnabled: chatSuggestionsEnabled ?? false,
                    aiAssistantCustomName: aiAssistantCustomName ?? '',
                }
            }
        });
    }

    async update({ inertia }: HttpContext) {
        const updateInfo = await this.systemService.checkLatestVersion();
        return inertia.render('settings/update', {
            system: {
                updateAvailable: updateInfo.updateAvailable,
                latestVersion: updateInfo.latestVersion,
                currentVersion: updateInfo.currentVersion
            }
        });
    }

    async zim({ inertia }: HttpContext) {
        return inertia.render('settings/zim/index')
    }

    async zimRemote({ inertia }: HttpContext) {
        return inertia.render('settings/zim/remote-explorer');
    }

    async benchmark({ inertia }: HttpContext) {
        const latestResult = await this.benchmarkService.getLatestResult();
        const status = this.benchmarkService.getStatus();
        return inertia.render('settings/benchmark', {
            benchmark: {
                latestResult,
                status: status.status,
                currentBenchmarkId: status.benchmarkId
            }
        });
    }

    async getSetting({ request, response }: HttpContext) {
        const key = request.qs().key;
        const value = await KVStore.getValue(key as KVStoreKey);
        return response.status(200).send({ key, value });
    }

    async updateSetting({ request, response }: HttpContext) {
        const reqData = await request.validateUsing(updateSettingSchema);
        await this.systemService.updateSetting(reqData.key, reqData.value);
        return response.status(200).send({ success: true, message: 'Setting updated successfully' });
    }
}

================================================
FILE: admin/app/controllers/system_controller.ts
================================================
import { DockerService } from '#services/docker_service';
import { SystemService } from '#services/system_service'
import { SystemUpdateService } from '#services/system_update_service'
import { ContainerRegistryService } from '#services/container_registry_service'
import { CheckServiceUpdatesJob } from '#jobs/check_service_updates_job'
import { affectServiceValidator, checkLatestVersionValidator, installServiceValidator, subscribeToReleaseNotesValidator, updateServiceValidator } from '#validators/system';
import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'

@inject()
export default class SystemController {
    constructor(
        private systemService: SystemService,
        private dockerService: DockerService,
        private systemUpdateService: SystemUpdateService,
        private containerRegistryService: ContainerRegistryService
    ) { }

    async getInternetStatus({ }: HttpContext) {
        return await this.systemService.getInternetStatus();
    }

    async getSystemInfo({ }: HttpContext) {
        return await this.systemService.getSystemInfo();
    }

    async getServices({ }: HttpContext) {
        return await this.systemService.getServices({ installedOnly: true });
    }

    async installService({ request, response }: HttpContext) {
        const payload = await request.validateUsing(installServiceValidator);

        const result = await this.dockerService.createContainerPreflight(payload.service_name);
        if (result.success) {
            response.send({ success: true, message: result.message });
        } else {
            response.status(400).send({ error: result.message });
        }
    }

    async affectService({ request, response }: HttpContext) {
        const payload = await request.validateUsing(affectServiceValidator);
        const result = await this.dockerService.affectContainer(payload.service_name, payload.action);
        if (!result) {
            response.internalServerError({ error: 'Failed to affect service' });
            return;
        }
        response.send({ success: result.success, message: result.message });
    }

    async checkLatestVersion({ request }: HttpContext) {
        const payload = await request.validateUsing(checkLatestVersionValidator)
        return await this.systemService.checkLatestVersion(payload.force);
    }

    async forceReinstallService({ request, response }: HttpContext) {
        const payload = await request.validateUsing(installServiceValidator);
        const result = await this.dockerService.forceReinstall(payload.service_name);
        if (!result) {
            response.internalServerError({ error: 'Failed to force reinstall service' });
            return;
        }
        response.send({ success: result.success, message: result.message });
    }

    async requestSystemUpdate({ response }: HttpContext) {
        if (!this.systemUpdateService.isSidecarAvailable()) {
            response.status(503).send({
                success: false,
                error: 'Update sidecar is not available. Ensure the updater container is running.',
            });
            return;
        }

        const result = await this.systemUpdateService.requestUpdate();

        if (result.success) {
            response.send({
                success: true,
                message: result.message,
                note: 'Monitor update progress via GET /api/system/update/status. The connection may drop during container restart.',
            });
        } else {
            response.status(409).send({
                success: false,
                error: result.message,
            });
        }
    }

    async getSystemUpdateStatus({ response }: HttpContext) {
        const status = this.systemUpdateService.getUpdateStatus();

        if (!status) {
            response.status(500).send({
                error: 'Failed to retrieve update status',
            });
            return;
        }

        response.send(status);
    }

    async getSystemUpdateLogs({ response }: HttpContext) {
        const logs = this.systemUpdateService.getUpdateLogs();
        response.send({ logs });
    }


    async subscribeToReleaseNotes({ request }: HttpContext) {
        const reqData = await request.validateUsing(subscribeToReleaseNotesValidator);
        return await this.systemService.subscribeToReleaseNotes(reqData.email);
    }

    async getDebugInfo({}: HttpContext) {
        const debugInfo = await this.systemService.getDebugInfo()
        return { debugInfo }
    }

    async checkServiceUpdates({ response }: HttpContext) {
        await CheckServiceUpdatesJob.dispatch()
        response.send({ success: true, message: 'Service update check dispatched' })
    }

    async getAvailableVersions({ params, response }: HttpContext) {
        const serviceName = params.name
        const service = await (await import('#models/service')).default
            .query()
            .where('service_name', serviceName)
            .where('installed', true)
            .first()

        if (!service) {
            return response.status(404).send({ error: `Service ${serviceName} not found or not installed` })
        }

        try {
            const hostArch = await this.getHostArch()
            const updates = await this.containerRegistryService.getAvailableUpdates(
                service.container_image,
                hostArch,
                service.source_repo
            )
            response.send({ versions: updates })
        } catch (error) {
            response.status(500).send({ error: `Failed to fetch versions: ${error.message}` })
        }
    }

    async updateService({ request, response }: HttpContext) {
        const payload = await request.validateUsing(updateServiceValidator)
        const result = await this.dockerService.updateContainer(
            payload.service_name,
            payload.target_version
        )

        if (result.success) {
            response.send({ success: true, message: result.message })
        } else {
            response.status(400).send({ error: result.message })
        }
    }

    private async getHostArch(): Promise<string> {
        try {
            const info = await this.dockerService.docker.info()
            const arch = info.Architecture || ''
            const archMap: Record<string, string> = {
                x86_64: 'amd64',
                aarch64: 'arm64',
                armv7l: 'arm',
                amd64: 'amd64',
                arm64: 'arm64',
            }
            return archMap[arch] || arch.toLowerCase()
        } catch {
            return 'amd64'
        }
    }
}

================================================
FILE: admin/app/controllers/zim_controller.ts
================================================
import { ZimService } from '#services/zim_service'
import {
  assertNotPrivateUrl,
  downloadCategoryTierValidator,
  filenameParamValidator,
  remoteDownloadWithMetadataValidator,
  selectWikipediaValidator,
} from '#validators/common'
import { listRemoteZimValidator } from '#validators/zim'
import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'

@inject()
export default class ZimController {
  constructor(private zimService: ZimService) {}

  async list({}: HttpContext) {
    return await this.zimService.list()
  }

  async listRemote({ request }: HttpContext) {
    const payload = await request.validateUsing(listRemoteZimValidator)
    const { start = 0, count = 12, query } = payload
    return await this.zimService.listRemote({ start, count, query })
  }

  async downloadRemote({ request }: HttpContext) {
    const payload = await request.validateUsing(remoteDownloadWithMetadataValidator)
    assertNotPrivateUrl(payload.url)
    const { filename, jobId } = await this.zimService.downloadRemote(payload.url)

    return {
      message: 'Download started successfully',
      filename,
      jobId,
      url: payload.url,
    }
  }

  async listCuratedCategories({}: HttpContext) {
    return await this.zimService.listCuratedCategories()
  }

  async downloadCategoryTier({ request }: HttpContext) {
    const payload = await request.validateUsing(downloadCategoryTierValidator)
    const resources = await this.zimService.downloadCategoryTier(
      payload.categorySlug,
      payload.tierSlug
    )

    return {
      message: 'Download started successfully',
      categorySlug: payload.categorySlug,
      tierSlug: payload.tierSlug,
      resources,
    }
  }

  async delete({ request, response }: HttpContext) {
    const payload = await request.validateUsing(filenameParamValidator)

    try {
      await this.zimService.delete(payload.params.filename)
    } catch (error) {
      if (error.message === 'not_found') {
        return response.status(404).send({
          message: `ZIM file with key ${payload.params.filename} not found`,
        })
      }
      throw error // Re-throw any other errors and let the global error handler catch
    }

    return {
      message: 'ZIM file deleted successfully',
    }
  }

  // Wikipedia selector endpoints

  async getWikipediaState({}: HttpContext) {
    return this.zimService.getWikipediaState()
  }

  async selectWikipedia({ request }: HttpContext) {
    const payload = await request.validateUsing(selectWikipediaValidator)
    return this.zimService.selectWikipedia(payload.optionId)
  }
}


================================================
FILE: admin/app/exceptions/handler.ts
================================================
import app from '@adonisjs/core/services/app'
import { HttpContext, ExceptionHandler } from '@adonisjs/core/http'
import type { StatusPageRange, StatusPageRenderer } from '@adonisjs/core/types/http'

export default class HttpExceptionHandler extends ExceptionHandler {
  /**
   * In debug mode, the exception handler will display verbose errors
   * with pretty printed stack traces.
   */
  protected debug = !app.inProduction

  /**
   * Status pages are used to display a custom HTML pages for certain error
   * codes. You might want to enable them in production only, but feel
   * free to enable them in development as well.
   */
  protected renderStatusPages = app.inProduction

  /**
   * Status pages is a collection of error code range and a callback
   * to return the HTML contents to send as a response.
   */
  protected statusPages: Record<StatusPageRange, StatusPageRenderer> = {
    '404': (error, { inertia }) => inertia.render('errors/not_found', { error }),
    '500..599': (error, { inertia }) => inertia.render('errors/server_error', { error }),
  }

  /**
   * The method is used for handling errors and returning
   * response to the client
   */
  async handle(error: unknown, ctx: HttpContext) {
    return super.handle(error, ctx)
  }

  /**
   * The method is used to report error to the logging service or
   * the a third party error monitoring service.
   *
   * @note You should not attempt to send a response from this method.
   */
  async report(error: unknown, ctx: HttpContext) {
    return super.report(error, ctx)
  }
}


================================================
FILE: admin/app/exceptions/internal_server_error_exception.ts
================================================
import { Exception } from '@adonisjs/core/exceptions'

export default class InternalServerErrorException extends Exception {
  static status = 500
  static code = 'E_INTERNAL_SERVER_ERROR'
}

================================================
FILE: admin/app/jobs/check_service_updates_job.ts
================================================
import { Job } from 'bullmq'
import { QueueService } from '#services/queue_service'
import { DockerService } from '#services/docker_service'
import { ContainerRegistryService } from '#services/container_registry_service'
import Service from '#models/service'
import logger from '@adonisjs/core/services/logger'
import transmit from '@adonisjs/transmit/services/main'
import { BROADCAST_CHANNELS } from '../../constants/broadcast.js'
import { DateTime } from 'luxon'

export class CheckServiceUpdatesJob {
  static get queue() {
    return 'service-updates'
  }

  static get key() {
    return 'check-service-updates'
  }

  async handle(_job: Job) {
    logger.info('[CheckServiceUpdatesJob] Checking for service updates...')

    const dockerService = new DockerService()
    const registryService = new ContainerRegistryService()

    // Determine host architecture
    const hostArch = await this.getHostArch(dockerService)

    const installedServices = await Service.query().where('installed', true)
    let updatesFound = 0

    for (const service of installedServices) {
      try {
        const updates = await registryService.getAvailableUpdates(
          service.container_image,
          hostArch,
          service.source_repo
        )

        const latestUpdate = updates.length > 0 ? updates[0].tag : null

        service.available_update_version = latestUpdate
        service.update_checked_at = DateTime.now()
        await service.save()

        if (latestUpdate) {
          updatesFound++
          logger.info(
            `[CheckServiceUpdatesJob] Update available for ${service.service_name}: ${service.container_image} → ${latestUpdate}`
          )
        }
      } catch (error) {
        logger.error(
          `[CheckServiceUpdatesJob] Failed to check updates for ${service.service_name}: ${error.message}`
        )
        // Continue checking other services
      }
    }

    logger.info(
      `[CheckServiceUpdatesJob] Completed. ${updatesFound} update(s) found for ${installedServices.length} service(s).`
    )

    // Broadcast completion so the frontend can refresh
    transmit.broadcast(BROADCAST_CHANNELS.SERVICE_UPDATES, {
      status: 'completed',
      updatesFound,
      timestamp: new Date().toISOString(),
    })

    return { updatesFound }
  }

  private async getHostArch(dockerService: DockerService): Promise<string> {
    try {
      const info = await dockerService.docker.info()
      const arch = info.Architecture || ''

      // Map Docker architecture names to OCI names
      const archMap: Record<string, string> = {
        x86_64: 'amd64',
        aarch64: 'arm64',
        armv7l: 'arm',
        amd64: 'amd64',
        arm64: 'arm64',
      }

      return archMap[arch] || arch.toLowerCase()
    } catch (error) {
      logger.warn(
        `[CheckServiceUpdatesJob] Could not detect host architecture: ${error.message}. Defaulting to amd64.`
      )
      return 'amd64'
    }
  }

  static async scheduleNightly() {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)

    await queue.upsertJobScheduler(
      'nightly-service-update-check',
      { pattern: '0 3 * * *' },
      {
        name: this.key,
        opts: {
          removeOnComplete: { count: 7 },
          removeOnFail: { count: 5 },
        },
      }
    )

    logger.info('[CheckServiceUpdatesJob] Service update check scheduled with cron: 0 3 * * *')
  }

  static async dispatch() {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)

    const job = await queue.add(
      this.key,
      {},
      {
        attempts: 3,
        backoff: { type: 'exponential', delay: 60000 },
        removeOnComplete: { count: 7 },
        removeOnFail: { count: 5 },
      }
    )

    logger.info(`[CheckServiceUpdatesJob] Dispatched ad-hoc service update check job ${job.id}`)
    return job
  }
}


================================================
FILE: admin/app/jobs/check_update_job.ts
================================================
import { Job } from 'bullmq'
import { QueueService } from '#services/queue_service'
import { DockerService } from '#services/docker_service'
import { SystemService } from '#services/system_service'
import logger from '@adonisjs/core/services/logger'
import KVStore from '#models/kv_store'

export class CheckUpdateJob {
  static get queue() {
    return 'system'
  }

  static get key() {
    return 'check-update'
  }

  async handle(_job: Job) {
    logger.info('[CheckUpdateJob] Running update check...')

    const dockerService = new DockerService()
    const systemService = new SystemService(dockerService)

    try {
      const result = await systemService.checkLatestVersion()

      if (result.updateAvailable) {
        logger.info(
          `[CheckUpdateJob] Update available: ${result.currentVersion} → ${result.latestVersion}`
        )
      } else {
        await KVStore.setValue('system.updateAvailable', false)
        logger.info(
          `[CheckUpdateJob] System is up to date (${result.currentVersion})`
        )
      }

      return result
    } catch (error) {
      logger.error(`[CheckUpdateJob] Update check failed: ${error.message}`)
      throw error
    }
  }

  static async scheduleNightly() {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)

    await queue.upsertJobScheduler(
      'nightly-update-check',
      { pattern: '0 2,14 * * *' }, // Every 12 hours at 2am and 2pm
      {
        name: this.key,
        opts: {
          removeOnComplete: { count: 7 },
          removeOnFail: { count: 5 },
        },
      }
    )

    logger.info('[CheckUpdateJob] Update check scheduled with cron: 0 2,14 * * *')
  }

  static async dispatch() {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)

    const job = await queue.add(this.key, {}, {
      attempts: 3,
      backoff: { type: 'exponential', delay: 60000 },
      removeOnComplete: { count: 7 },
      removeOnFail: { count: 5 },
    })

    logger.info(`[CheckUpdateJob] Dispatched ad-hoc update check job ${job.id}`)
    return job
  }
}


================================================
FILE: admin/app/jobs/download_model_job.ts
================================================
import { Job } from 'bullmq'
import { QueueService } from '#services/queue_service'
import { createHash } from 'crypto'
import logger from '@adonisjs/core/services/logger'
import { OllamaService } from '#services/ollama_service'

export interface DownloadModelJobParams {
  modelName: string
}

export class DownloadModelJob {
  static get queue() {
    return 'model-downloads'
  }

  static get key() {
    return 'download-model'
  }

  static getJobId(modelName: string): string {
    return createHash('sha256').update(modelName).digest('hex').slice(0, 16)
  }

  async handle(job: Job) {
    const { modelName } = job.data as DownloadModelJobParams

    logger.info(`[DownloadModelJob] Attempting to download model: ${modelName}`)

    const ollamaService = new OllamaService()

    // Even if no models are installed, this should return an empty array if ready
    const existingModels = await ollamaService.getModels()
    if (!existingModels) {
      logger.warn(
        `[DownloadModelJob] Ollama service not ready yet for model ${modelName}. Will retry...`
      )
      throw new Error('Ollama service not ready yet')
    }

    logger.info(
      `[DownloadModelJob] Ollama service is ready. Initiating download for ${modelName}`
    )

    // Services are ready, initiate the download with progress tracking
    const result = await ollamaService.downloadModel(modelName, (progressPercent) => {
      if (progressPercent) {
        job.updateProgress(Math.floor(progressPercent))
        logger.info(
          `[DownloadModelJob] Model ${modelName}: ${progressPercent}%`
        )
      }

      // Store detailed progress in job data for clients to query
      job.updateData({
        ...job.data,
        status: 'downloading',
        progress: progressPercent,
        progress_timestamp: new Date().toISOString(),
      })
    })

    if (!result.success) {
      logger.error(
        `[DownloadModelJob] Failed to initiate download for model ${modelName}: ${result.message}`
      )
      throw new Error(`Failed to initiate download for model: ${result.message}`)
    }

    logger.info(`[DownloadModelJob] Successfully completed download for model ${modelName}`)
    return {
      modelName,
      message: result.message,
    }
  }

  static async getByModelName(modelName: string): Promise<Job | undefined> {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)
    const jobId = this.getJobId(modelName)
    return await queue.getJob(jobId)
  }

  static async dispatch(params: DownloadModelJobParams) {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)
    const jobId = this.getJobId(params.modelName)

    try {
      const job = await queue.add(this.key, params, {
        jobId,
        attempts: 40, // Many attempts since services may take considerable time to install
        backoff: {
          type: 'fixed',
          delay: 60000, // Check every 60 seconds
        },
        removeOnComplete: false, // Keep for status checking
        removeOnFail: false, // Keep failed jobs for debugging
      })

      return {
        job,
        created: true,
        message: `Dispatched model download job for ${params.modelName}`,
      }
    } catch (error) {
      if (error.message.includes('job already exists')) {
        const existing = await queue.getJob(jobId)
        return {
          job: existing,
          created: false,
          message: `Job already exists for model ${params.modelName}`,
        }
      }
      throw error
    }
  }
}


================================================
FILE: admin/app/jobs/embed_file_job.ts
================================================
import { Job, UnrecoverableError } from 'bullmq'
import { QueueService } from '#services/queue_service'
import { EmbedJobWithProgress } from '../../types/rag.js'
import { RagService } from '#services/rag_service'
import { DockerService } from '#services/docker_service'
import { OllamaService } from '#services/ollama_service'
import { createHash } from 'crypto'
import logger from '@adonisjs/core/services/logger'

export interface EmbedFileJobParams {
  filePath: string
  fileName: string
  fileSize?: number
  // Batch processing for large ZIM files
  batchOffset?: number  // Current batch offset (for ZIM files)
  totalArticles?: number // Total articles in ZIM (for progress tracking)
  isFinalBatch?: boolean // Whether this is the last batch (prevents premature deletion)
}

export class EmbedFileJob {
  static get queue() {
    return 'file-embeddings'
  }

  static get key() {
    return 'embed-file'
  }

  static getJobId(filePath: string): string {
    return createHash('sha256').update(filePath).digest('hex').slice(0, 16)
  }

  async handle(job: Job) {
    const { filePath, fileName, batchOffset, totalArticles } = job.data as EmbedFileJobParams

    const isZimBatch = batchOffset !== undefined
    const batchInfo = isZimBatch ? ` (batch offset: ${batchOffset})` : ''
    logger.info(`[EmbedFileJob] Starting embedding process for: ${fileName}${batchInfo}`)

    const dockerService = new DockerService()
    const ollamaService = new OllamaService()
    const ragService = new RagService(dockerService, ollamaService)

    try {
      // Check if Ollama and Qdrant services are installed and ready
      // Use UnrecoverableError for "not installed" so BullMQ won't retry —
      // retrying 30x when the service doesn't exist just wastes Redis connections
      const ollamaUrl = await dockerService.getServiceURL('nomad_ollama')
      if (!ollamaUrl) {
        logger.warn('[EmbedFileJob] Ollama is not installed. Skipping embedding for: %s', fileName)
        throw new UnrecoverableError('Ollama service is not installed. Install AI Assistant to enable file embeddings.')
      }

      const existingModels = await ollamaService.getModels()
      if (!existingModels) {
        logger.warn('[EmbedFileJob] Ollama service not ready yet. Will retry...')
        throw new Error('Ollama service not ready yet')
      }

      const qdrantUrl = await dockerService.getServiceURL('nomad_qdrant')
      if (!qdrantUrl) {
        logger.warn('[EmbedFileJob] Qdrant is not installed. Skipping embedding for: %s', fileName)
        throw new UnrecoverableError('Qdrant service is not installed. Install AI Assistant to enable file embeddings.')
      }

      logger.info(`[EmbedFileJob] Services ready. Processing file: ${fileName}`)

      // Update progress starting
      await job.updateProgress(5)
      await job.updateData({
        ...job.data,
        status: 'processing',
        startedAt: job.data.startedAt || Date.now(),
      })

      logger.info(`[EmbedFileJob] Processing file: ${filePath}`)

      // Progress callback: maps service-reported 0-100% into the 5-95% job range
      const onProgress = async (percent: number) => {
        await job.updateProgress(Math.min(95, Math.round(5 + percent * 0.9)))
      }

      // Process and embed the file
      // Only allow deletion if explicitly marked as final batch
      const allowDeletion = job.data.isFinalBatch === true
      const result = await ragService.processAndEmbedFile(
        filePath,
        allowDeletion,
        batchOffset,
        onProgress
      )

      if (!result.success) {
        logger.error(`[EmbedFileJob] Failed to process file ${fileName}: ${result.message}`)
        throw new Error(result.message)
      }

      // For ZIM files with batching, check if more batches are needed
      if (result.hasMoreBatches) {
        const nextOffset = (batchOffset || 0) + (result.articlesProcessed || 0)
        logger.info(
          `[EmbedFileJob] Batch complete. Dispatching next batch at offset ${nextOffset}`
        )

        // Dispatch next batch (not final yet)
        await EmbedFileJob.dispatch({
          filePath,
          fileName,
          batchOffset: nextOffset,
          totalArticles: totalArticles || result.totalArticles,
          isFinalBatch: false, // Explicitly not final
        })

        // Calculate progress based on articles processed
        const progress = totalArticles
          ? Math.round((nextOffset / totalArticles) * 100)
          : 50

        await job.updateProgress(progress)
        await job.updateData({
          ...job.data,
          status: 'batch_completed',
          lastBatchAt: Date.now(),
          chunks: (job.data.chunks || 0) + (result.chunks || 0),
        })

        return {
          success: true,
          fileName,
          filePath,
          chunks: result.chunks,
          hasMoreBatches: true,
          nextOffset,
          message: `Batch embedded ${result.chunks} chunks, next batch queued`,
        }
      }

      // Final batch or non-batched file - mark as complete
      const totalChunks = (job.data.chunks || 0) + (result.chunks || 0)
      await job.updateProgress(100)
      await job.updateData({
        ...job.data,
        status: 'completed',
        completedAt: Date.now(),
        chunks: totalChunks,
      })

      const batchMsg = isZimBatch ? ` (final batch, total chunks: ${totalChunks})` : ''
      logger.info(
        `[EmbedFileJob] Successfully embedded ${result.chunks} chunks from file: ${fileName}${batchMsg}`
      )

      return {
        success: true,
        fileName,
        filePath,
        chunks: result.chunks,
        message: `Successfully embedded ${result.chunks} chunks`,
      }
    } catch (error) {
      logger.error(`[EmbedFileJob] Error embedding file ${fileName}:`, error)

      await job.updateData({
        ...job.data,
        status: 'failed',
        failedAt: Date.now(),
        error: error instanceof Error ? error.message : 'Unknown error',
      })

      throw error
    }
  }

  static async listActiveJobs(): Promise<EmbedJobWithProgress[]> {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)
    const jobs = await queue.getJobs(['waiting', 'active', 'delayed'])

    return jobs.map((job) => ({
      jobId: job.id!.toString(),
      fileName: (job.data as EmbedFileJobParams).fileName,
      filePath: (job.data as EmbedFileJobParams).filePath,
      progress: typeof job.progress === 'number' ? job.progress : 0,
      status: ((job.data as any).status as string) ?? 'waiting',
    }))
  }

  static async getByFilePath(filePath: string): Promise<Job | undefined> {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)
    const jobId = this.getJobId(filePath)
    return await queue.getJob(jobId)
  }

  static async dispatch(params: EmbedFileJobParams) {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)
    const jobId = this.getJobId(params.filePath)

    try {
      const job = await queue.add(this.key, params, {
        jobId,
        attempts: 30,
        backoff: {
          type: 'fixed',
          delay: 60000, // Check every 60 seconds for service readiness
        },
        removeOnComplete: { count: 50 }, // Keep last 50 completed jobs for history
        removeOnFail: { count: 20 } // Keep last 20 failed jobs for debugging
      })

      logger.info(`[EmbedFileJob] Dispatched embedding job for file: ${params.fileName}`)

      return {
        job,
        created: true,
        jobId,
        message: `File queued for embedding: ${params.fileName}`,
      }
    } catch (error) {
      if (error.message && error.message.includes('job already exists')) {
        const existing = await queue.getJob(jobId)
        logger.info(`[EmbedFileJob] Job already exists for file: ${params.fileName}`)
        return {
          job: existing,
          created: false,
          jobId,
          message: `Embedding job already exists for: ${params.fileName}`,
        }
      }
      throw error
    }
  }

  static async getStatus(filePath: string): Promise<{
    exists: boolean
    status?: string
    progress?: number
    chunks?: number
    error?: string
  }> {
    const job = await this.getByFilePath(filePath)

    if (!job) {
      return { exists: false }
    }

    const state = await job.getState()
    const data = job.data

    return {
      exists: true,
      status: data.status || state,
      progress: typeof job.progress === 'number' ? job.progress : undefined,
      chunks: data.chunks,
      error: data.error,
    }
  }
}


================================================
FILE: admin/app/jobs/run_benchmark_job.ts
================================================
import { Job } from 'bullmq'
import { QueueService } from '#services/queue_service'
import { BenchmarkService } from '#services/benchmark_service'
import type { RunBenchmarkJobParams } from '../../types/benchmark.js'
import logger from '@adonisjs/core/services/logger'
import { DockerService } from '#services/docker_service'

export class RunBenchmarkJob {
  static get queue() {
    return 'benchmarks'
  }

  static get key() {
    return 'run-benchmark'
  }

  async handle(job: Job) {
    const { benchmark_id, benchmark_type } = job.data as RunBenchmarkJobParams

    logger.info(`[RunBenchmarkJob] Starting benchmark ${benchmark_id} of type ${benchmark_type}`)

    const dockerService = new DockerService()
    const benchmarkService = new BenchmarkService(dockerService)

    try {
      let result

      switch (benchmark_type) {
        case 'full':
          result = await benchmarkService.runFullBenchmark()
          break
        case 'system':
          result = await benchmarkService.runSystemBenchmarks()
          break
        case 'ai':
          result = await benchmarkService.runAIBenchmark()
          break
        default:
          throw new Error(`Unknown benchmark type: ${benchmark_type}`)
      }

      logger.info(`[RunBenchmarkJob] Benchmark ${benchmark_id} completed with NOMAD score: ${result.nomad_score}`)

      return {
        success: true,
        benchmark_id: result.benchmark_id,
        nomad_score: result.nomad_score,
      }
    } catch (error) {
      logger.error(`[RunBenchmarkJob] Benchmark ${benchmark_id} failed: ${error.message}`)
      throw error
    }
  }

  static async dispatch(params: RunBenchmarkJobParams) {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)

    try {
      const job = await queue.add(this.key, params, {
        jobId: params.benchmark_id,
        attempts: 1, // Benchmarks shouldn't be retried automatically
        removeOnComplete: {
          count: 10, // Keep last 10 completed jobs
        },
        removeOnFail: {
          count: 5, // Keep last 5 failed jobs
        },
      })

      logger.info(`[RunBenchmarkJob] Dispatched benchmark job ${params.benchmark_id}`)

      return {
        job,
        created: true,
        message: `Benchmark job ${params.benchmark_id} dispatched successfully`,
      }
    } catch (error) {
      if (error.message.includes('job already exists')) {
        const existing = await queue.getJob(params.benchmark_id)
        return {
          job: existing,
          created: false,
          message: `Benchmark job ${params.benchmark_id} already exists`,
        }
      }
      throw error
    }
  }

  static async getJob(benchmarkId: string): Promise<Job | undefined> {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)
    return await queue.getJob(benchmarkId)
  }

  static async getJobState(benchmarkId: string): Promise<string | undefined> {
    const job = await this.getJob(benchmarkId)
    return job ? await job.getState() : undefined
  }
}


================================================
FILE: admin/app/jobs/run_download_job.ts
================================================
import { Job } from 'bullmq'
import { RunDownloadJobParams } from '../../types/downloads.js'
import { QueueService } from '#services/queue_service'
import { doResumableDownload } from '../utils/downloads.js'
import { createHash } from 'crypto'
import { DockerService } from '#services/docker_service'
import { ZimService } from '#services/zim_service'
import { MapService } from '#services/map_service'
import { EmbedFileJob } from './embed_file_job.js'

export class RunDownloadJob {
  static get queue() {
    return 'downloads'
  }

  static get key() {
    return 'run-download'
  }

  static getJobId(url: string): string {
    return createHash('sha256').update(url).digest('hex').slice(0, 16)
  }

  async handle(job: Job) {
    const { url, filepath, timeout, allowedMimeTypes, forceNew, filetype, resourceMetadata } =
      job.data as RunDownloadJobParams

    await doResumableDownload({
      url,
      filepath,
      timeout,
      allowedMimeTypes,
      forceNew,
      onProgress(progress) {
        const progressPercent = (progress.downloadedBytes / (progress.totalBytes || 1)) * 100
        job.updateProgress(Math.floor(progressPercent))
      },
      async onComplete(url) {
        try {
          // Create InstalledResource entry if metadata was provided
          if (resourceMetadata) {
            const { default: InstalledResource } = await import('#models/installed_resource')
            const { DateTime } = await import('luxon')
            const { getFileStatsIfExists, deleteFileIfExists } = await import('../utils/fs.js')
            const stats = await getFileStatsIfExists(filepath)

            // Look up the old entry so we can clean up the previous file after updating
            const oldEntry = await InstalledResource.query()
              .where('resource_id', resourceMetadata.resource_id)
              .where('resource_type', filetype as 'zim' | 'map')
              .first()
            const oldFilePath = oldEntry?.file_path ?? null

            await InstalledResource.updateOrCreate(
              { resource_id: resourceMetadata.resource_id, resource_type: filetype as 'zim' | 'map' },
              {
                version: resourceMetadata.version,
                collection_ref: resourceMetadata.collection_ref,
                url: url,
                file_path: filepath,
                file_size_bytes: stats ? Number(stats.size) : null,
                installed_at: DateTime.now(),
              }
            )

            // Delete the old file if it differs from the new one
            if (oldFilePath && oldFilePath !== filepath) {
              try {
                await deleteFileIfExists(oldFilePath)
                console.log(`[RunDownloadJob] Deleted old file: ${oldFilePath}`)
              } catch (deleteError) {
                console.warn(
                  `[RunDownloadJob] Failed to delete old file ${oldFilePath}:`,
                  deleteError
                )
              }
            }
          }

          if (filetype === 'zim') {
            const dockerService = new DockerService()
            const zimService = new ZimService(dockerService)
            await zimService.downloadRemoteSuccessCallback([url], true)

            // Only dispatch embedding job if AI Assistant (Ollama) is installed
            const ollamaUrl = await dockerService.getServiceURL('nomad_ollama')
            if (ollamaUrl) {
              try {
                await EmbedFileJob.dispatch({
                  fileName: url.split('/').pop() || '',
                  filePath: filepath,
                })
              } catch (error) {
                console.error(`[RunDownloadJob] Error dispatching EmbedFileJob for URL ${url}:`, error)
              }
            }
          } else if (filetype === 'map') {
            const mapsService = new MapService()
            await mapsService.downloadRemoteSuccessCallback([url], false)
          }
        } catch (error) {
          console.error(
            `[RunDownloadJob] Error in download success callback for URL ${url}:`,
            error
          )
        }
        job.updateProgress(100)
      },
    })

    return {
      url,
      filepath,
    }
  }

  static async getByUrl(url: string): Promise<Job | undefined> {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)
    const jobId = this.getJobId(url)
    return await queue.getJob(jobId)
  }

  static async dispatch(params: RunDownloadJobParams) {
    const queueService = new QueueService()
    const queue = queueService.getQueue(this.queue)
    const jobId = this.getJobId(params.url)

    try {
      const job = await queue.add(this.key, params, {
        jobId,
        attempts: 3,
        backoff: { type: 'exponential', delay: 2000 },
        removeOnComplete: true,
      })

      return {
        job,
        created: true,
        message: `Dispatched download job for URL ${params.url}`,
      }
    } catch (error) {
      if (error.message.includes('job already exists')) {
        const existing = await queue.getJob(jobId)
        return {
          job: existing,
          created: false,
          message: `Job already exists for URL ${params.url}`,
        }
      }
      throw error
    }
  }
}


================================================
FILE: admin/app/middleware/container_bindings_middleware.ts
================================================
import { Logger } from '@adonisjs/core/logger'
import { HttpContext } from '@adonisjs/core/http'
import { NextFn } from '@adonisjs/core/types/http'

/**
 * The container bindings middleware binds classes to their request
 * specific value using the container resolver.
 *
 * - We bind "HttpContext" class to the "ctx" object
 * - And bind "Logger" class to the "ctx.logger" object
 */
export default class ContainerBindingsMiddleware {
  handle(ctx: HttpContext, next: NextFn) {
    ctx.containerResolver.bindValue(HttpContext, ctx)
    ctx.containerResolver.bindValue(Logger, ctx.logger)

    return next()
  }
}


================================================
FILE: admin/app/middleware/force_json_response_middleware.ts
================================================
import type { HttpContext } from '@adonisjs/core/http'
import type { NextFn } from '@adonisjs/core/types/http'

/**
 * Updating the "Accept" header to always accept "application/json" response
 * from the server. This will force the internals of the framework like
 * validator errors or auth errors to return a JSON response.
 */
export default class ForceJsonResponseMiddleware {
  async handle({ request }: HttpContext, next: NextFn) {
    const headers = request.headers()
    headers.accept = 'application/json'

    return next()
  }
}


================================================
FILE: admin/app/middleware/maps_static_middleware.ts
================================================
import type { HttpContext } from '@adonisjs/core/http'
import type { NextFn } from '@adonisjs/core/types/http'
import StaticMiddleware from '@adonisjs/static/static_middleware'
import { AssetsConfig } from '@adonisjs/static/types'

/**
 * See #providers/map_static_provider.ts for explanation
 * of why this middleware exists.
 */
export default class MapsStaticMiddleware {
  constructor(
    private path: string,
    private config: AssetsConfig
  ) {}

  async handle(ctx: HttpContext, next: NextFn) {
    const staticMiddleware = new StaticMiddleware(this.path, this.config)
    return staticMiddleware.handle(ctx, next)
  }
}


================================================
FILE: admin/app/models/benchmark_result.ts
================================================
import { DateTime } from 'luxon'
import { BaseModel, column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'
import type { BenchmarkType, DiskType } from '../../types/benchmark.js'

export default class BenchmarkResult extends BaseModel {
  static namingStrategy = new SnakeCaseNamingStrategy()

  @column({ isPrimary: true })
  declare id: number

  @column()
  declare benchmark_id: string

  @column()
  declare benchmark_type: BenchmarkType

  // Hardware information
  @column()
  declare cpu_model: string

  @column()
  declare cpu_cores: number

  @column()
  declare cpu_threads: number

  @column()
  declare ram_bytes: number

  @column()
  declare disk_type: DiskType

  @column()
  declare gpu_model: string | null

  // System benchmark scores
  @column()
  declare cpu_score: number

  @column()
  declare memory_score: number

  @column()
  declare disk_read_score: number

  @column()
  declare disk_write_score: number

  // AI benchmark scores (nullable for system-only benchmarks)
  @column()
  declare ai_tokens_per_second: number | null

  @column()
  declare ai_model_used: string | null

  @column()
  declare ai_time_to_first_token: number | null

  // Composite NOMAD score (0-100)
  @column()
  declare nomad_score: number

  // Repository submission tracking
  @column({
    serialize(value) {
      return Boolean(value)
    },
  })
  declare submitted_to_repository: boolean

  @column.dateTime()
  declare submitted_at: DateTime | null

  @column()
  declare repository_id: string | null

  @column()
  declare builder_tag: string | null

  @column.dateTime({ autoCreate: true })
  declare created_at: DateTime

  @column.dateTime({ autoCreate: true, autoUpdate: true })
  declare updated_at: DateTime
}


================================================
FILE: admin/app/models/benchmark_setting.ts
================================================
import { DateTime } from 'luxon'
import { BaseModel, column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'
import type { BenchmarkSettingKey } from '../../types/benchmark.js'

export default class BenchmarkSetting extends BaseModel {
  static namingStrategy = new SnakeCaseNamingStrategy()

  @column({ isPrimary: true })
  declare id: number

  @column()
  declare key: BenchmarkSettingKey

  @column()
  declare value: string | null

  @column.dateTime({ autoCreate: true })
  declare created_at: DateTime

  @column.dateTime({ autoCreate: true, autoUpdate: true })
  declare updated_at: DateTime

  /**
   * Get a setting value by key
   */
  static async getValue(key: BenchmarkSettingKey): Promise<string | null> {
    const setting = await this.findBy('key', key)
    return setting?.value ?? null
  }

  /**
   * Set a setting value by key (creates if not exists)
   */
  static async setValue(key: BenchmarkSettingKey, value: string | null): Promise<BenchmarkSetting> {
    const setting = await this.firstOrCreate({ key }, { key, value })
    if (setting.value !== value) {
      setting.value = value
      await setting.save()
    }
    return setting
  }

  /**
   * Get all benchmark settings as a typed object
   */
  static async getAllSettings(): Promise<{
    allow_anonymous_submission: boolean
    installation_id: string | null
    last_benchmark_run: string | null
  }> {
    const settings = await this.all()
    const map = new Map(settings.map((s) => [s.key, s.value]))

    return {
      allow_anonymous_submission: map.get('allow_anonymous_submission') === 'true',
      installation_id: map.get('installation_id') ?? null,
      last_benchmark_run: map.get('last_benchmark_run') ?? null,
    }
  }
}


================================================
FILE: admin/app/models/chat_message.ts
================================================
import { DateTime } from 'luxon'
import { BaseModel, column, belongsTo, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'
import type { BelongsTo } from '@adonisjs/lucid/types/relations'
import ChatSession from './chat_session.js'

export default class ChatMessage extends BaseModel {
  static namingStrategy = new SnakeCaseNamingStrategy()

  @column({ isPrimary: true })
  declare id: number

  @column()
  declare session_id: number

  @column()
  declare role: 'system' | 'user' | 'assistant'

  @column()
  declare content: string

  @belongsTo(() => ChatSession, { foreignKey: 'id', localKey: 'session_id' })
  declare session: BelongsTo<typeof ChatSession>

  @column.dateTime({ autoCreate: true })
  declare created_at: DateTime

  @column.dateTime({ autoCreate: true, autoUpdate: true })
  declare updated_at: DateTime
}


================================================
FILE: admin/app/models/chat_session.ts
================================================
import { DateTime } from 'luxon'
import { BaseModel, column, hasMany, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'
import type { HasMany } from '@adonisjs/lucid/types/relations'
import ChatMessage from './chat_message.js'

export default class ChatSession extends BaseModel {
  static namingStrategy = new SnakeCaseNamingStrategy()

  @column({ isPrimary: true })
  declare id: number

  @column()
  declare title: string

  @column()
  declare model: string | null

  @hasMany(() => ChatMessage, {
    foreignKey: 'session_id',
    localKey: 'id',
  })
  declare messages: HasMany<typeof ChatMessage>

  @column.dateTime({ autoCreate: true })
  declare created_at: DateTime

  @column.dateTime({ autoCreate: true, autoUpdate: true })
  declare updated_at: DateTime
}

================================================
FILE: admin/app/models/collection_manifest.ts
================================================
import { DateTime } from 'luxon'
import { BaseModel, column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'
import type { ManifestType } from '../../types/collections.js'

export default class CollectionManifest extends BaseModel {
  static namingStrategy = new SnakeCaseNamingStrategy()

  @column({ isPrimary: true })
  declare type: ManifestType

  @column()
  declare spec_version: string

  @column({
    consume: (value: string) => (typeof value === 'string' ? JSON.parse(value) : value),
    prepare: (value: any) => JSON.stringify(value),
  })
  declare spec_data: any

  @column.dateTime()
  declare fetched_at: DateTime
}


================================================
FILE: admin/app/models/installed_resource.ts
================================================
import { DateTime } from 'luxon'
import { BaseModel, column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'

export default class InstalledResource extends BaseModel {
  static namingStrategy = new SnakeCaseNamingStrategy()

  @column({ isPrimary: true })
  declare id: number

  @column()
  declare resource_id: string

  @column()
  declare resource_type: 'zim' | 'map'

  @column()
  declare collection_ref: string | null

  @column()
  declare version: string

  @column()
  declare url: string

  @column()
  declare file_path: string

  @column()
  declare file_size_bytes: number | null

  @column.dateTime()
  declare installed_at: DateTime
}


================================================
FILE: admin/app/models/kv_store.ts
================================================
import { DateTime } from 'luxon'
import { BaseModel, column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'
import { KV_STORE_SCHEMA, type KVStoreKey, type KVStoreValue } from '../../types/kv_store.js'
import { parseBoolean } from '../utils/misc.js'

/**
 * Generic key-value store model for storing various settings
 * that don't necessitate their own dedicated models.
 */
export default class KVStore extends BaseModel {
  static table = 'kv_store'
  static namingStrategy = new SnakeCaseNamingStrategy()

  @column({ isPrimary: true })
  declare id: number

  @column()
  declare key: KVStoreKey

  @column()
  declare value: string | null

  @column.dateTime({ autoCreate: true })
  declare created_at: DateTime

  @column.dateTime({ autoCreate: true, autoUpdate: true })
  declare updated_at: DateTime

  /**
   * Get a setting value by key, automatically deserializing to the correct type.
   */
  static async getValue<K extends KVStoreKey>(key: K): Promise<KVStoreValue<K> | null> {
    const setting = await this.findBy('key', key)
    if (!setting || setting.value === undefined || setting.value === null) {
      return null
    }
    const raw = String(setting.value)
    return (KV_STORE_SCHEMA[key] === 'boolean' ? parseBoolean(raw) : raw) as KVStoreValue<K>
  }

  /**
   * Set a setting value by key (creates if not exists), automatically serializing to string.
   */
  static async setValue<K extends KVStoreKey>(key: K, value: KVStoreValue<K>): Promise<KVStore> {
    const serialized = String(value)
    const setting = await this.firstOrCreate({ key }, { key, value: serialized })
    if (setting.value !== serialized) {
      setting.value = serialized
      await setting.save()
    }
    return setting
  }

  /**
   * Clear a setting value by key, storing null so getValue returns null.
   */
  static async clearValue<K extends KVStoreKey>(key: K): Promise<void> {
    const setting = await this.findBy('key', key)
    if (setting && setting.value !== null) {
      setting.value = null
      await setting.save()
    }
  }
}


================================================
FILE: admin/app/models/service.ts
================================================
import { BaseModel, belongsTo, column, hasMany, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'
import type { BelongsTo, HasMany } from '@adonisjs/lucid/types/relations'
import { DateTime } from 'luxon'

export default class Service extends BaseModel {
  static namingStrategy = new SnakeCaseNamingStrategy()

  @column({ isPrimary: true })
  declare id: number

  @column()
  declare service_name: string

  @column()
  declare container_image: string

  @column()
  declare container_command: string | null

  @column()
  declare container_config: string | null

  @column()
  declare friendly_name: string | null

  @column()
  declare description: string | null

  @column()
  declare powered_by: string | null

  @column()
  declare display_order: number | null

  @column()
  declare icon: string | null // must be a TablerIcons name to be properly rendered in the UI (e.g. "IconBrandDocker")

  @column({
    serialize(value) {
      return Boolean(value)
    },
  })
  declare installed: boolean

  @column()
  declare installation_status: 'idle' | 'installing' | 'error'

  @column()
  declare depends_on: string | null

  // For services that are dependencies for other services - not intended to be installed directly by users
  @column({
    serialize(value) {
      return Boolean(value)
    },
  })
  declare is_dependency_service: boolean

  @column()
  declare ui_location: string | null

  @column()
  declare metadata: string | null

  @column()
  declare source_repo: string | null

  @column()
  declare available_update_version: string | null

  @column.dateTime()
  declare update_checked_at: DateTime | null

  @column.dateTime({ autoCreate: true })
  declare created_at: DateTime

  @column.dateTime({ autoCreate: true, autoUpdate: true })
  declare updated_at: DateTime | null

  // Define a self-referential relationship for dependencies
  @belongsTo(() => Service, {
    foreignKey: 'depends_on',
  })
  declare dependency: BelongsTo<typeof Service>

  @hasMany(() => Service, {
    foreignKey: 'depends_on',
  })
  declare dependencies: HasMany<typeof Service>
}


================================================
FILE: admin/app/models/wikipedia_selection.ts
================================================
import { DateTime } from 'luxon'
import { BaseModel, column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'

export default class WikipediaSelection extends BaseModel {
  static namingStrategy = new SnakeCaseNamingStrategy()

  @column({ isPrimary: true })
  declare id: number

  @column()
  declare option_id: string

  @column()
  declare url: string | null

  @column()
  declare filename: string | null

  @column()
  declare status: 'none' | 'downloading' | 'installed' | 'failed'

  @column.dateTime({ autoCreate: true })
  declare created_at: DateTime

  @column.dateTime({ autoCreate: true, autoUpdate: true })
  declare updated_at: DateTime
}


================================================
FILE: admin/app/services/benchmark_service.ts
================================================
import { inject } from '@adonisjs/core'
import logger from '@adonisjs/core/services/logger'
import transmit from '@adonisjs/transmit/services/main'
import si from 'systeminformation'
import axios from 'axios'
import { DateTime } from 'luxon'
import BenchmarkResult from '#models/benchmark_result'
import BenchmarkSetting from '#models/benchmark_setting'
import { SystemService } from '#services/system_service'
import type {
  BenchmarkType,
  BenchmarkStatus,
  BenchmarkProgress,
  HardwareInfo,
  DiskType,
  SystemScores,
  AIScores,
  SysbenchCpuResult,
  SysbenchMemoryResult,
  SysbenchDiskResult,
  RepositorySubmission,
  RepositorySubmitResponse,
  RepositoryStats,
} from '../../types/benchmark.js'
import { randomUUID, createHmac } from 'node:crypto'
import { DockerService } from './docker_service.js'
import { SERVICE_NAMES } from '../../constants/service_names.js'
import { BROADCAST_CHANNELS } from '../../constants/broadcast.js'
import Dockerode from 'dockerode'

// HMAC secret for signing submissions to the benchmark repository
// This provides basic protection against casual API abuse.
// Note: Since NOMAD is open source, a determined attacker could extract this.
// For stronger protection, see challenge-response authentication.
const BENCHMARK_HMAC_SECRET = '778ba65d0bc0e23119e5ffce4b3716648a7d071f0a47ec3f'

// Re-export default weights for use in service
const SCORE_WEIGHTS = {
  ai_tokens_per_second: 0.30,
  cpu: 0.25,
  memory: 0.15,
  ai_ttft: 0.10,
  disk_read: 0.10,
  disk_write: 0.10,
}

// Benchmark configuration constants
const SYSBENCH_IMAGE = 'severalnines/sysbench:latest'
const SYSBENCH_CONTAINER_NAME = 'nomad_benchmark_sysbench'

// Reference model for AI benchmark - small but meaningful
const AI_BENCHMARK_MODEL = 'llama3.2:1b'
const AI_BENCHMARK_PROMPT = 'Explain recursion in programming in exactly 100 words.'

// Reference scores for normalization (calibrated to 0-100 scale)
// These represent "expected" scores for a mid-range system (score ~50)
const REFERENCE_SCORES = {
  cpu_events_per_second: 5000, // sysbench cpu events/sec for ~50 score
  memory_ops_per_second: 5000000, // sysbench memory ops/sec for ~50 score
  disk_read_mb_per_sec: 500, // 500 MB/s read for ~50 score
  disk_write_mb_per_sec: 400, // 400 MB/s write for ~50 score
  ai_tokens_per_second: 30, // 30 tok/s for ~50 score
  ai_ttft_ms: 500, // 500ms time to first token for ~50 score (lower is better)
}

@inject()
export class BenchmarkService {
  private currentBenchmarkId: string | null = null
  private currentStatus: BenchmarkStatus = 'idle'

  constructor(private dockerService: DockerService) {}

  /**
   * Run a full benchmark suite
   */
  async runFullBenchmark(): Promise<BenchmarkResult> {
    return this._runBenchmark('full', true)
  }

  /**
   * Run system benchmarks only (CPU, memory, disk)
   */
  async runSystemBenchmarks(): Promise<BenchmarkResult> {
    return this._runBenchmark('system', false)
  }

  /**
   * Run AI benchmark only
   */
  async runAIBenchmark(): Promise<BenchmarkResult> {
    return this._runBenchmark('ai', true)
  }

  /**
   * Get the latest benchmark result
   */
  async getLatestResult(): Promise<BenchmarkResult | null> {
    return await BenchmarkResult.query().orderBy('created_at', 'desc').first()
  }

  /**
   * Get all benchmark results
   */
  async getAllResults(): Promise<BenchmarkResult[]> {
    return await BenchmarkResult.query().orderBy('created_at', 'desc')
  }

  /**
   * Get a specific benchmark result by ID
   */
  async getResultById(benchmarkId: string): Promise<BenchmarkResult | null> {
    return await BenchmarkResult.findBy('benchmark_id', benchmarkId)
  }

  /**
   * Submit benchmark results to central repository
   */
  async submitToRepository(benchmarkId?: string, anonymous?: boolean): Promise<RepositorySubmitResponse> {
    const result = benchmarkId
      ? await this.getResultById(benchmarkId)
      : await this.getLatestResult()

    if (!result) {
      throw new Error('No benchmark result found to submit')
    }

    // Only allow full benchmarks with AI data to be submitted to repository
    if (result.benchmark_type !== 'full') {
      throw new Error('Only full benchmarks can be shared with the community. Run a Full Benchmark to share your results.')
    }

    if (!result.ai_tokens_per_second || result.ai_tokens_per_second <= 0) {
      throw new Error('Benchmark must include AI performance data. Ensure AI Assistant is installed and run a Full Benchmark.')
    }

    if (result.submitted_to_repository) {
      throw new Error('Benchmark result has already been submitted')
    }

    const submission: RepositorySubmission = {
      cpu_model: result.cpu_model,
      cpu_cores: result.cpu_cores,
      cpu_threads: result.cpu_threads,
      ram_gb: Math.round(result.ram_bytes / (1024 * 1024 * 1024)),
      disk_type: result.disk_type,
      gpu_model: result.gpu_model,
      cpu_score: result.cpu_score,
      memory_score: result.memory_score,
      disk_read_score: result.disk_read_score,
      disk_write_score: result.disk_write_score,
      ai_tokens_per_second: result.ai_tokens_per_second,
      ai_time_to_first_token: result.ai_time_to_first_token,
      nomad_score: result.nomad_score,
      nomad_version: SystemService.getAppVersion(),
      benchmark_version: '1.0.0',
      builder_tag: anonymous ? null : result.builder_tag,
    }

    try {
      // Generate HMAC signature for submission verification
      const timestamp = Date.now().toString()
      const payload = timestamp + JSON.stringify(submission)
      const signature = createHmac('sha256', BENCHMARK_HMAC_SECRET)
        .update(payload)
        .digest('hex')

      const response = await axios.post(
        'https://benchmark.projectnomad.us/api/v1/submit',
        submission,
        {
          timeout: 30000,
          headers: {
            'X-NOMAD-Timestamp': timestamp,
            'X-NOMAD-Signature': signature,
          },
        }
      )

      if (response.data.success) {
        result.submitted_to_repository = true
        result.submitted_at = DateTime.now()
        result.repository_id = response.data.repository_id
        await result.save()

        await BenchmarkSetting.setValue('last_benchmark_run', new Date().toISOString())
      }

      return response.data as RepositorySubmitResponse
    } catch (error) {
      const detail = error.response?.data?.error || error.message || 'Unknown error'
      const statusCode = error.response?.status
      logger.error(`Failed to submit benchmark to repository: ${detail} (Status: ${statusCode})`)
      
      // Create an error with the status code attached for proper handling upstream
      const err: any = new Error(`Failed to submit benchmark: ${detail}`)
      err.statusCode = statusCode
      throw err
    }
  }

  /**
   * Get comparison stats from central repository
   */
  async getComparisonStats(): Promise<RepositoryStats | null> {
    try {
      const response = await axios.get('https://benchmark.projectnomad.us/api/v1/stats', {
        timeout: 10000,
      })
      return response.data as RepositoryStats
    } catch (error) {
      logger.warn(`Failed to fetch comparison stats: ${error.message}`)
      return null
    }
  }

  /**
   * Get current benchmark status
   */
  getStatus(): { status: BenchmarkStatus; benchmarkId: string | null } {
    return {
      status: this.currentStatus,
      benchmarkId: this.currentBenchmarkId,
    }
  }

  /**
   * Detect system hardware information
   */
  async getHardwareInfo(): Promise<HardwareInfo> {
    this._updateStatus('detecting_hardware', 'Detecting system hardware...')

    try {
      const [cpu, mem, diskLayout, graphics] = await Promise.all([
        si.cpu(),
        si.mem(),
        si.diskLayout(),
        si.graphics(),
      ])

      // Determine disk type from primary disk
      let diskType: DiskType = 'unknown'
      if (diskLayout.length > 0) {
        const primaryDisk = diskLayout[0]
        if (primaryDisk.type?.toLowerCase().includes('nvme')) {
          diskType = 'nvme'
        } else if (primaryDisk.type?.toLowerCase().includes('ssd')) {
          diskType = 'ssd'
        } else if (primaryDisk.type?.toLowerCase().includes('hdd') || primaryDisk.interfaceType === 'SATA') {
          // SATA could be SSD or HDD, check if it's rotational
          diskType = 'hdd'
        }
      }

      // Get GPU model (prefer discrete GPU with dedicated VRAM)
      let gpuModel: string | null = null
      if (graphics.controllers && graphics.controllers.length > 0) {
        // First, look for discrete GPUs (NVIDIA, AMD discrete, or any with significant VRAM)
        const discreteGpu = graphics.controllers.find((g) => {
          const vendor = g.vendor?.toLowerCase() || ''
          const model = g.model?.toLowerCase() || ''
          // NVIDIA GPUs are always discrete
          if (vendor.includes('nvidia') || model.includes('geforce') || model.includes('rtx') || model.includes('quadro')) {
            return true
          }
          // AMD discrete GPUs (Radeon, not integrated APU graphics)
          if ((vendor.includes('amd') || vendor.includes('ati')) &&
              (model.includes('radeon') || model.includes('rx ') || model.includes('vega')) &&
              !model.includes('graphics')) {
            return true
          }
          // Any GPU with dedicated VRAM > 512MB is likely discrete
          if (g.vram && g.vram > 512) {
            return true
          }
          return false
        })
        gpuModel = discreteGpu?.model || graphics.controllers[0]?.model || null
      }

      // Fallback: Check Docker for nvidia runtime and query GPU model via nvidia-smi
      if (!gpuModel) {
        try {
          const dockerInfo = await this.dockerService.docker.info()
          const runtimes = dockerInfo.Runtimes || {}
          if ('nvidia' in runtimes) {
            logger.info('[BenchmarkService] NVIDIA container runtime detected, querying GPU model via nvidia-smi')

            const systemService = new (await import('./system_service.js')).SystemService(this.dockerService)
            const nvidiaInfo = await systemService.getNvidiaSmiInfo()
            if (Array.isArray(nvidiaInfo) && nvidiaInfo.length > 0) {
              gpuModel = nvidiaInfo[0].model
            } else {
              logger.warn(`[BenchmarkService] NVIDIA runtime detected but failed to get GPU info: ${typeof nvidiaInfo === 'string' ? nvidiaInfo : JSON.stringify(nvidiaInfo)}`)
            }
          }
        } catch (dockerError) {
          logger.warn(`[BenchmarkService] Could not query Docker info for GPU detection: ${dockerError.message}`)
        }
      }

      // Fallback: Extract integrated GPU from CPU model name
      if (!gpuModel) {
        const cpuFullName = `${cpu.manufacturer} ${cpu.brand}`

        // AMD APUs: e.g., "AMD Ryzen AI 9 HX 370 w/ Radeon 890M" -> "Radeon 890M"
        const radeonMatch = cpuFullName.match(/w\/\s*(Radeon\s+\d+\w*)/i)
        if (radeonMatch) {
          gpuModel = radeonMatch[1]
        }

        // Intel Core Ultra: These have Intel Arc Graphics integrated
        // e.g., "Intel Core Ultra 9 285HX" -> "Intel Arc Graphics (Integrated)"
        if (!gpuModel && cpu.manufacturer?.toLowerCase().includes('intel')) {
          if (cpu.brand?.toLowerCase().includes('core ultra')) {
            gpuModel = 'Intel Arc Graphics (Integrated)'
          }
        }
      }

      return {
        cpu_model: `${cpu.manufacturer} ${cpu.brand}`,
        cpu_cores: cpu.physicalCores,
        cpu_threads: cpu.cores,
        ram_bytes: mem.total,
        disk_type: diskType,
        gpu_model: gpuModel,
      }
    } catch (error) {
      logger.error(`Error detecting hardware: ${error.message}`)
      throw new Error(`Failed to detect hardware: ${error.message}`)
    }
  }

  /**
   * Main benchmark execution method
   */
  private async _runBenchmark(type: BenchmarkType, includeAI: boolean): Promise<BenchmarkResult> {
    if (this.currentStatus !== 'idle') {
      throw new Error('A benchmark is already running')
    }

    this.currentBenchmarkId = randomUUID()
    this._updateStatus('starting', 'Starting benchmark...')

    try {
      // Detect hardware
      const hardware = await this.getHardwareInfo()

      // Run system benchmarks
      let systemScores: SystemScores = {
        cpu_score: 0,
        memory_score: 0,
        disk_read_score: 0,
        disk_write_score: 0,
      }

      if (type === 'full' || type === 'system') {
        systemScores = await this._runSystemBenchmarks()
      }

      // Run AI benchmark if requested and Ollama is available
      let aiScores: Partial<AIScores> = {}
      if (includeAI && (type === 'full' || type === 'ai')) {
        try {
          aiScores = await this._runAIBenchmark()
        } catch (error) {
          // For AI-only benchmarks, failing is fatal - don't save useless results with all zeros
          if (type === 'ai') {
            throw new Error(`AI benchmark failed: ${error.message}. Make sure AI Assistant is installed and running.`)
          }
          // For full benchmarks, AI is optional - continue without it
          logger.warn(`AI benchmark skipped: ${error.message}`)
        }
      }

      // Calculate NOMAD score
      this._updateStatus('calculating_score', 'Calculating NOMAD score...')
      const nomadScore = this._calculateNomadScore(systemScores, aiScores)

      // Save result
      const result = await BenchmarkResult.create({
        benchmark_id: this.currentBenchmarkId,
        benchmark_type: type,
        cpu_model: hardware.cpu_model,
        cpu_cores: hardware.cpu_cores,
        cpu_threads: hardware.cpu_threads,
        ram_bytes: hardware.ram_bytes,
        disk_type: hardware.disk_type,
        gpu_model: hardware.gpu_model,
        cpu_score: systemScores.cpu_score,
        memory_score: systemScores.memory_score,
        disk_read_score: systemScores.disk_read_score,
        disk_write_score: systemScores.disk_write_score,
        ai_tokens_per_second: aiScores.ai_tokens_per_second || null,
        ai_model_used: aiScores.ai_model_used || null,
        ai_time_to_first_token: aiScores.ai_time_to_first_token || null,
        nomad_score: nomadScore,
        submitted_to_repository: false,
      })

      this._updateStatus('completed', 'Benchmark completed successfully')
      this.currentStatus = 'idle'
      this.currentBenchmarkId = null

      return result
    } catch (error) {
      this._updateStatus('error', `Benchmark failed: ${error.message}`)
      this.currentStatus = 'idle'
      this.currentBenchmarkId = null
      throw error
    }
  }

  /**
   * Run system benchmarks using sysbench in Docker
   */
  private async _runSystemBenchmarks(): Promise<SystemScores> {
    // Ensure sysbench image is available
    await this._ensureSysbenchImage()

    // Run CPU benchmark
    this._updateStatus('running_cpu', 'Running CPU benchmark...')
    const cpuResult = await this._runSysbenchCpu()

    // Run memory benchmark
    this._updateStatus('running_memory', 'Running memory benchmark...')
    const memoryResult = await this._runSysbenchMemory()

    // Run disk benchmarks
    this._updateStatus('running_disk_read', 'Running disk read benchmark...')
    const diskReadResult = await this._runSysbenchDiskRead()

    this._updateStatus('running_disk_write', 'Running disk write benchmark...')
    const diskWriteResult = await this._runSysbenchDiskWrite()

    // Normalize scores to 0-100 scale
    return {
      cpu_score: this._normalizeScore(cpuResult.events_per_second, REFERENCE_SCORES.cpu_events_per_second),
      memory_score: this._normalizeScore(memoryResult.operations_per_second, REFERENCE_SCORES.memory_ops_per_second),
      disk_read_score: this._normalizeScore(diskReadResult.read_mb_per_sec, REFERENCE_SCORES.disk_read_mb_per_sec),
      disk_write_score: this._normalizeScore(diskWriteResult.write_mb_per_sec, REFERENCE_SCORES.disk_write_mb_per_sec),
    }
  }

  /**
   * Run AI benchmark using Ollama
   */
  private async _runAIBenchmark(): Promise<AIScores> {
    try {

    this._updateStatus('running_ai', 'Running AI benchmark...')

    const ollamaAPIURL = await this.dockerService.getServiceURL(SERVICE_NAMES.OLLAMA)
    if (!ollamaAPIURL) {
      throw new Error('AI Assistant service location could not be determined. Ensure AI Assistant is installed and running.')
    }

    // Check if Ollama is available
    try {
      await axios.get(`${ollamaAPIURL}/api/tags`, { timeout: 5000 })
    } catch (error) {
      const errorCode = error.code || error.response?.status || 'unknown'
      throw new Error(`Ollama is not running or not accessible (${errorCode}). Ensure AI Assistant is installed and running.`)
    }

    // Check if the benchmark model is available, pull if not
    const ollamaService = new (await import('./ollama_service.js')).OllamaService()
    const modelResponse = await ollamaService.downloadModel(AI_BENCHMARK_MODEL)
    if (!modelResponse.success) {
      throw new Error(`Model does not exist and failed to download: ${modelResponse.message}`)
    }

    // Run inference benchmark
    const startTime = Date.now()

      const response = await axios.post(
        `${ollamaAPIURL}/api/generate`,
        {
          model: AI_BENCHMARK_MODEL,
          prompt: AI_BENCHMARK_PROMPT,
          stream: false,
        },
        { timeout: 120000 }
      )

      const endTime = Date.now()
      const totalTime = (endTime - startTime) / 1000 // seconds

      // Ollama returns eval_count (tokens generated) and eval_duration (nanoseconds)
      if (response.data.eval_count && response.data.eval_duration) {
        const tokenCount = response.data.eval_count
        const evalDurationSeconds = response.data.eval_duration / 1e9
        const tokensPerSecond = tokenCount / evalDurationSeconds

        // Time to first token from prompt_eval_duration
        const ttft = response.data.prompt_eval_duration
          ? response.data.prompt_eval_duration / 1e6 // Convert to ms
          : (totalTime * 1000) / 2 // Estimate if not available

        return {
          ai_tokens_per_second: Math.round(tokensPerSecond * 100) / 100,
          ai_model_used: AI_BENCHMARK_MODEL,
          ai_time_to_first_token: Math.round(ttft * 100) / 100,
        }
      }

      // Fallback calculation
      const estimatedTokens = response.data.response?.split(' ').length * 1.3 || 100
      const tokensPerSecond = estimatedTokens / totalTime

      return {
        ai_tokens_per_second: Math.round(tokensPerSecond * 100) / 100,
        ai_model_used: AI_BENCHMARK_MODEL,
        ai_time_to_first_token: Math.round((totalTime * 1000) / 2),
      }
    } catch (error) {
      throw new Error(`AI benchmark failed: ${error.message}`)
    }
  }

  /**
   * Calculate weighted NOMAD score
   */
  private _calculateNomadScore(systemScores: SystemScores, aiScores: Partial<AIScores>): number {
    let totalWeight = 0
    let weightedSum = 0

    // CPU score
    weightedSum += systemScores.cpu_score * SCORE_WEIGHTS.cpu
    totalWeight += SCORE_WEIGHTS.cpu

    // Memory score
    weightedSum += systemScores.memory_score * SCORE_WEIGHTS.memory
    totalWeight += SCORE_WEIGHTS.memory

    // Disk scores
    weightedSum += systemScores.disk_read_score * SCORE_WEIGHTS.disk_read
    totalWeight += SCORE_WEIGHTS.disk_read
    weightedSum += systemScores.disk_write_score * SCORE_WEIGHTS.disk_write
    totalWeight += SCORE_WEIGHTS.disk_write

    // AI scores (if available)
    if (aiScores.ai_tokens_per_second !== undefined && aiScores.ai_tokens_per_second !== null) {
      const aiScore = this._normalizeScore(
        aiScores.ai_tokens_per_second,
        REFERENCE_SCORES.ai_tokens_per_second
      )
      weightedSum += aiScore * SCORE_WEIGHTS.ai_tokens_per_second
      totalWeight += SCORE_WEIGHTS.ai_tokens_per_second
    }

    if (aiScores.ai_time_to_first_token !== undefined && aiScores.ai_time_to_first_token !== null) {
      // For TTFT, lower is better, so we invert the score
      const ttftScore = this._normalizeScoreInverse(
        aiScores.ai_time_to_first_token,
        REFERENCE_SCORES.ai_ttft_ms
      )
      weightedSum += ttftScore * SCORE_WEIGHTS.ai_ttft
      totalWeight += SCORE_WEIGHTS.ai_ttft
    }

    // Normalize by actual weight used (in case AI benchmarks were skipped)
    const nomadScore = totalWeight > 0 ? (weightedSum / totalWeight) * 100 : 0

    return Math.round(Math.min(100, Math.max(0, nomadScore)) * 100) / 100
  }

  /**
   * Normalize a raw score to 0-100 scale using log scaling
   * This provides diminishing returns for very high scores
   */
  private _normalizeScore(value: number, reference: number): number {
    if (value <= 0) return 0
    // Log scale: score = 50 * (1 + log2(value/reference))
    // This gives 50 at reference value, scales logarithmically
    const ratio = value / reference
    const score = 50 * (1 + Math.log2(Math.max(0.01, ratio)))
    return Math.min(100, Math.max(0, score)) / 100
  }

  /**
   * Normalize a score where lower is better (like latency)
   */
  private _normalizeScoreInverse(value: number, reference: number): number {
    if (value <= 0) return 1
    // Inverse: lower values = higher scores
    const ratio = reference / value
    const score = 50 * (1 + Math.log2(Math.max(0.01, ratio)))
    return Math.min(100, Math.max(0, score)) / 100
  }

  /**
   * Ensure sysbench Docker image is available
   */
  private async _ensureSysbenchImage(): Promise<void> {
    try {
      await this.dockerService.docker.getImage(SYSBENCH_IMAGE).inspect()
    } catch {
      this._updateStatus('starting', `Pulling sysbench image...`)
      const pullStream = await this.dockerService.docker.pull(SYSBENCH_IMAGE)
      await new Promise((resolve) => this.dockerService.docker.modem.followProgress(pullStream, resolve))
    }
  }

  /**
   * Run sysbench CPU benchmark
   */
  private async _runSysbenchCpu(): Promise<SysbenchCpuResult> {
    const output = await this._runSysbenchCommand([
      'sysbench',
      'cpu',
      '--cpu-max-prime=20000',
      '--threads=4',
      '--time=30',
      'run',
    ])

    // Parse output for events per second
    const eventsMatch = output.match(/events per second:\s*([\d.]+)/i)
    const totalTimeMatch = output.match(/total time:\s*([\d.]+)s/i)
    const totalEventsMatch = output.match(/total number of events:\s*(\d+)/i)

    return {
      events_per_second: eventsMatch ? parseFloat(eventsMatch[1]) : 0,
      total_time: totalTimeMatch ? parseFloat(totalTimeMatch[1]) : 30,
      total_events: totalEventsMatch ? parseInt(totalEventsMatch[1]) : 0,
    }
  }

  /**
   * Run sysbench memory benchmark
   */
  private async _runSysbenchMemory(): Promise<SysbenchMemoryResult> {
    const output = await this._runSysbenchCommand([
      'sysbench',
      'memory',
      '--memory-block-size=1K',
      '--memory-total-size=10G',
      '--threads=4',
      'run',
    ])

    // Parse output
    const opsMatch = output.match(/Total operations:\s*\d+\s*\(([\d.]+)\s*per second\)/i)
    const transferMatch = output.match(/([\d.]+)\s*MiB\/sec/i)
    const timeMatch = output.match(/total time:\s*([\d.]+)s/i)

    return {
      operations_per_second: opsMatch ? parseFloat(opsMatch[1]) : 0,
      transfer_rate_mb_per_sec: transferMatch ? parseFloat(transferMatch[1]) : 0,
      total_time: timeMatch ? parseFloat(timeMatch[1]) : 0,
    }
  }

  /**
   * Run sysbench disk read benchmark
   */
  private async _runSysbenchDiskRead(): Promise<SysbenchDiskResult> {
    // Run prepare, test, and cleanup in a single container
    // This is necessary because each container has its own filesystem
    const output = await this._runSysbenchCommand([
      'sh',
      '-c',
      'sysbench fileio --file-total-size=1G --file-num=4 prepare && ' +
        'sysbench fileio --file-total-size=1G --file-num=4 --file-test-mode=seqrd --time=30 run && ' +
        'sysbench fileio --file-total-size=1G --file-num=4 cleanup',
    ])

    // Parse output - look for the Throughput section
    const readMatch = output.match(/read,\s*MiB\/s:\s*([\d.]+)/i)
    const readsPerSecMatch = output.match(/reads\/s:\s*([\d.]+)/i)

    logger.debug(`[BenchmarkService] Disk read output parsing - read: ${readMatch?.[1]}, reads/s: ${readsPerSecMatch?.[1]}`)

    return {
      reads_per_second: readsPerSecMatch ? parseFloat(readsPerSecMatch[1]) : 0,
      writes_per_second: 0,
      read_mb_per_sec: readMatch ? parseFloat(readMatch[1]) : 0,
      write_mb_per_sec: 0,
      total_time: 30,
    }
  }

  /**
   * Run sysbench disk write benchmark
   */
  private async _runSysbenchDiskWrite(): Promise<SysbenchDiskResult> {
    // Run prepare, test, and cleanup in a single container
    // This is necessary because each container has its own filesystem
    const output = await this._runSysbenchCommand([
      'sh',
      '-c',
      'sysbench fileio --file-total-size=1G --file-num=4 prepare && ' +
        'sysbench fileio --file-total-size=1G --file-num=4 --file-test-mode=seqwr --time=30 run && ' +
        'sysbench fileio --file-total-size=1G --file-num=4 cleanup',
    ])

    // Parse output - look for the Throughput section
    const writeMatch = output.match(/written,\s*MiB\/s:\s*([\d.]+)/i)
    const writesPerSecMatch = output.match(/writes\/s:\s*([\d.]+)/i)

    logger.debug(`[BenchmarkService] Disk write output parsing - written: ${writeMatch?.[1]}, writes/s: ${writesPerSecMatch?.[1]}`)

    return {
      reads_per_second: 0,
      writes_per_second: writesPerSecMatch ? parseFloat(writesPerSecMatch[1]) : 0,
      read_mb_per_sec: 0,
      write_mb_per_sec: writeMatch ? parseFloat(writeMatch[1]) : 0,
      total_time: 30,
    }
  }

  /**
   * Run a sysbench command in a Docker container
   */
  private async _runSysbenchCommand(cmd: string[]): Promise<string> {
    let container: Dockerode.Container | null = null
    try {
      // Create container with TTY to avoid multiplexed output
      container = await this.dockerService.docker.createContainer({
        Image: SYSBENCH_IMAGE,
        Cmd: cmd,
        name: `${SYSBENCH_CONTAINER_NAME}_${Date.now()}`,
        Tty: true, // Important: prevents multiplexed stdout/stderr headers
        HostConfig: {
          AutoRemove: false, // Don't auto-remove to avoid race condition with fetching logs
        },
      })

      // Start container
      await container.start()

      // Wait for completion
      await container.wait()
      
      // Get logs after container has finished
      const logs = await container.logs({
        stdout: true,
        stderr: true,
      })

      // Parse logs (Docker logs include header bytes)
      const output = logs.toString('utf8')
        .replace(/[\x00-\x08]/g, '') // Remove control characters
        .trim()

      // Manually remove the container after getting logs
      try {
        await container.remove()
      } catch (removeError) {
        // Log but don't fail if removal fails (container might already be gone)
        logger.warn(`Failed to remove sysbench container: ${removeError.message}`)
      }

      return output
    } catch (error) {
      // Clean up container on error if it exists
      if (container) {
        try {
          await container.remove({ force: true })
        } catch (removeError) {
          // Ignore removal errors
        }
      }
      logger.error(`Sysbench command failed: ${error.message}`)
      throw new Error(`Sysbench command failed: ${error.message}`)
    }
  }

  /**
   * Broadcast benchmark progress update
   */
  private _updateStatus(status: BenchmarkStatus, message: string) {
    this.currentStatus = status

    const progress: BenchmarkProgress = {
      status,
      progress: this._getProgressPercent(status),
      message,
      current_stage: this._getStageLabel(status),
      timestamp: new Date().toISOString(),
    }

    transmit.broadcast(BROADCAST_CHANNELS.BENCHMARK_PROGRESS, {
      benchmark_id: this.currentBenchmarkId,
      ...progress,
    })

    logger.info(`[BenchmarkService] ${status}: ${message}`)
  }

  /**
   * Get progress percentage for a given status
   */
  private _getProgressPercent(status: BenchmarkStatus): number {
    const progressMap: Record<BenchmarkStatus, number> = {
      idle: 0,
      starting: 5,
      detecting_hardware: 10,
      running_cpu: 25,
      running_memory: 40,
      running_disk_read: 55,
      running_disk_write: 70,
      downloading_ai_model: 80,
      running_ai: 85,
      calculating_score: 95,
      completed: 100,
      error: 0,
    }
    return progressMap[status] || 0
  }

  /**
   * Get human-readable stage label
   */
  private _getStageLabel(status: BenchmarkStatus): string {
    const labelMap: Record<BenchmarkStatus, string> = {
      idle: 'Idle',
      starting: 'Starting',
      detecting_hardware: 'Detecting Hardware',
      running_cpu: 'CPU Benchmark',
      running_memory: 'Memory Benchmark',
      running_disk_read: 'Disk Read Test',
      running_disk_write: 'Disk Write Test',
      downloading_ai_model: 'Downloading AI Model',
      running_ai: 'AI Inference Test',
      calculating_score: 'Calculating Score',
      completed: 'Complete',
      error: 'Error',
    }
    return labelMap[status] || status
  }
}


================================================
FILE: admin/app/services/chat_service.ts
================================================
import ChatSession from '#models/chat_session'
import ChatMessage from '#models/chat_message'
import logger from '@adonisjs/core/services/logger'
import { DateTime } from 'luxon'
import { inject } from '@adonisjs/core'
import { OllamaService } from './ollama_service.js'
import { DEFAULT_QUERY_REWRITE_MODEL, SYSTEM_PROMPTS } from '../../constants/ollama.js'
import { toTitleCase } from '../utils/misc.js'

@inject()
export class ChatService {
  constructor(private ollamaService: OllamaService) {}

  async getAllSessions() {
    try {
      const sessions = await ChatSession.query().orderBy('updated_at', 'desc')
      return sessions.map((session) => ({
        id: session.id.toString(),
        title: session.title,
        model: session.model,
        timestamp: session.updated_at.toJSDate(),
        lastMessage: null, // Will be populated from messages if needed
      }))
    } catch (error) {
      logger.error(
        `[ChatService] Failed to get sessions: ${error instanceof Error ? error.message : error}`
      )
      return []
    }
  }

  async getChatSuggestion
Download .txt
gitextract_i1jwi_c6/

├── .dockerignore
├── .github/
│   ├── ISSUE_TEMPLATE/
│   │   ├── bug_report.yml
│   │   ├── config.yml
│   │   └── feature_request.yml
│   ├── dependabot.yaml
│   ├── scripts/
│   │   └── finalize-release-notes.sh
│   └── workflows/
│       ├── build-disk-collector.yml
│       ├── build-primary-image.yml
│       ├── build-sidecar-updater.yml
│       ├── release.yml
│       └── validate-collection-urls.yml
├── .gitignore
├── .releaserc.json
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Dockerfile
├── LICENSE
├── README.md
├── admin/
│   ├── .editorconfig
│   ├── ace.js
│   ├── adonisrc.ts
│   ├── app/
│   │   ├── controllers/
│   │   │   ├── benchmark_controller.ts
│   │   │   ├── chats_controller.ts
│   │   │   ├── collection_updates_controller.ts
│   │   │   ├── docs_controller.ts
│   │   │   ├── downloads_controller.ts
│   │   │   ├── easy_setup_controller.ts
│   │   │   ├── home_controller.ts
│   │   │   ├── maps_controller.ts
│   │   │   ├── ollama_controller.ts
│   │   │   ├── rag_controller.ts
│   │   │   ├── settings_controller.ts
│   │   │   ├── system_controller.ts
│   │   │   └── zim_controller.ts
│   │   ├── exceptions/
│   │   │   ├── handler.ts
│   │   │   └── internal_server_error_exception.ts
│   │   ├── jobs/
│   │   │   ├── check_service_updates_job.ts
│   │   │   ├── check_update_job.ts
│   │   │   ├── download_model_job.ts
│   │   │   ├── embed_file_job.ts
│   │   │   ├── run_benchmark_job.ts
│   │   │   └── run_download_job.ts
│   │   ├── middleware/
│   │   │   ├── container_bindings_middleware.ts
│   │   │   ├── force_json_response_middleware.ts
│   │   │   └── maps_static_middleware.ts
│   │   ├── models/
│   │   │   ├── benchmark_result.ts
│   │   │   ├── benchmark_setting.ts
│   │   │   ├── chat_message.ts
│   │   │   ├── chat_session.ts
│   │   │   ├── collection_manifest.ts
│   │   │   ├── installed_resource.ts
│   │   │   ├── kv_store.ts
│   │   │   ├── service.ts
│   │   │   └── wikipedia_selection.ts
│   │   ├── services/
│   │   │   ├── benchmark_service.ts
│   │   │   ├── chat_service.ts
│   │   │   ├── collection_manifest_service.ts
│   │   │   ├── collection_update_service.ts
│   │   │   ├── container_registry_service.ts
│   │   │   ├── docker_service.ts
│   │   │   ├── docs_service.ts
│   │   │   ├── download_service.ts
│   │   │   ├── map_service.ts
│   │   │   ├── ollama_service.ts
│   │   │   ├── queue_service.ts
│   │   │   ├── rag_service.ts
│   │   │   ├── system_service.ts
│   │   │   ├── system_update_service.ts
│   │   │   ├── zim_extraction_service.ts
│   │   │   └── zim_service.ts
│   │   ├── utils/
│   │   │   ├── downloads.ts
│   │   │   ├── fs.ts
│   │   │   ├── misc.ts
│   │   │   └── version.ts
│   │   └── validators/
│   │       ├── benchmark.ts
│   │       ├── chat.ts
│   │       ├── common.ts
│   │       ├── curated_collections.ts
│   │       ├── download.ts
│   │       ├── ollama.ts
│   │       ├── rag.ts
│   │       ├── settings.ts
│   │       ├── system.ts
│   │       └── zim.ts
│   ├── bin/
│   │   ├── console.ts
│   │   ├── server.ts
│   │   └── test.ts
│   ├── commands/
│   │   ├── benchmark/
│   │   │   ├── results.ts
│   │   │   ├── run.ts
│   │   │   └── submit.ts
│   │   └── queue/
│   │       └── work.ts
│   ├── config/
│   │   ├── app.ts
│   │   ├── bodyparser.ts
│   │   ├── cors.ts
│   │   ├── database.ts
│   │   ├── hash.ts
│   │   ├── inertia.ts
│   │   ├── logger.ts
│   │   ├── queue.ts
│   │   ├── session.ts
│   │   ├── shield.ts
│   │   ├── static.ts
│   │   ├── transmit.ts
│   │   └── vite.ts
│   ├── constants/
│   │   ├── broadcast.ts
│   │   ├── kv_store.ts
│   │   ├── misc.ts
│   │   ├── ollama.ts
│   │   ├── service_names.ts
│   │   └── zim_extraction.ts
│   ├── database/
│   │   ├── migrations/
│   │   │   ├── 1751086751801_create_services_table.ts
│   │   │   ├── 1763499145832_update_services_table.ts
│   │   │   ├── 1764912210741_create_curated_collections_table.ts
│   │   │   ├── 1764912270123_create_curated_collection_resources_table.ts
│   │   │   ├── 1768170944482_update_services_add_installation_statuses_table.ts
│   │   │   ├── 1768453747522_update_services_add_icon.ts
│   │   │   ├── 1769097600001_create_benchmark_results_table.ts
│   │   │   ├── 1769097600002_create_benchmark_settings_table.ts
│   │   │   ├── 1769300000001_add_powered_by_and_display_order_to_services.ts
│   │   │   ├── 1769300000002_update_services_friendly_names.ts
│   │   │   ├── 1769324448000_add_builder_tag_to_benchmark_results.ts
│   │   │   ├── 1769400000001_create_installed_tiers_table.ts
│   │   │   ├── 1769400000002_create_kv_store_table.ts
│   │   │   ├── 1769500000001_create_wikipedia_selection_table.ts
│   │   │   ├── 1769646771604_create_create_chat_sessions_table.ts
│   │   │   ├── 1769646798266_create_create_chat_messages_table.ts
│   │   │   ├── 1769700000001_create_zim_file_metadata_table.ts
│   │   │   ├── 1770269324176_add_unique_constraint_to_curated_collection_resources_table.ts
│   │   │   ├── 1770273423670_drop_installed_tiers_table.ts
│   │   │   ├── 1770849108030_create_create_collection_manifests_table.ts
│   │   │   ├── 1770849119787_create_create_installed_resources_table.ts
│   │   │   ├── 1770850092871_create_drop_legacy_curated_tables_table.ts
│   │   │   ├── 1771000000001_add_update_fields_to_services.ts
│   │   │   └── 1771000000002_pin_latest_service_images.ts
│   │   └── seeders/
│   │       └── service_seeder.ts
│   ├── docs/
│   │   ├── about.md
│   │   ├── faq.md
│   │   ├── getting-started.md
│   │   ├── home.md
│   │   ├── release-notes.md
│   │   └── use-cases.md
│   ├── eslint.config.js
│   ├── inertia/
│   │   ├── app/
│   │   │   └── app.tsx
│   │   ├── components/
│   │   │   ├── ActiveDownloads.tsx
│   │   │   ├── ActiveEmbedJobs.tsx
│   │   │   ├── ActiveModelDownloads.tsx
│   │   │   ├── Alert.tsx
│   │   │   ├── BouncingDots.tsx
│   │   │   ├── BouncingLogo.tsx
│   │   │   ├── BuilderTagSelector.tsx
│   │   │   ├── CategoryCard.tsx
│   │   │   ├── CuratedCollectionCard.tsx
│   │   │   ├── DebugInfoModal.tsx
│   │   │   ├── DownloadURLModal.tsx
│   │   │   ├── DynamicIcon.tsx
│   │   │   ├── Footer.tsx
│   │   │   ├── HorizontalBarChart.tsx
│   │   │   ├── InfoTooltip.tsx
│   │   │   ├── InstallActivityFeed.tsx
│   │   │   ├── LoadingSpinner.tsx
│   │   │   ├── MarkdocRenderer.tsx
│   │   │   ├── ProgressBar.tsx
│   │   │   ├── StorageProjectionBar.tsx
│   │   │   ├── StyledButton.tsx
│   │   │   ├── StyledModal.tsx
│   │   │   ├── StyledSectionHeader.tsx
│   │   │   ├── StyledSidebar.tsx
│   │   │   ├── StyledTable.tsx
│   │   │   ├── ThemeToggle.tsx
│   │   │   ├── TierSelectionModal.tsx
│   │   │   ├── UpdateServiceModal.tsx
│   │   │   ├── WikipediaSelector.tsx
│   │   │   ├── chat/
│   │   │   │   ├── ChatAssistantAvatar.tsx
│   │   │   │   ├── ChatButton.tsx
│   │   │   │   ├── ChatInterface.tsx
│   │   │   │   ├── ChatMessageBubble.tsx
│   │   │   │   ├── ChatModal.tsx
│   │   │   │   ├── ChatSidebar.tsx
│   │   │   │   ├── KnowledgeBaseModal.tsx
│   │   │   │   └── index.tsx
│   │   │   ├── file-uploader/
│   │   │   │   ├── index.css
│   │   │   │   └── index.tsx
│   │   │   ├── inputs/
│   │   │   │   ├── Input.tsx
│   │   │   │   └── Switch.tsx
│   │   │   ├── layout/
│   │   │   │   └── BackToHomeHeader.tsx
│   │   │   ├── maps/
│   │   │   │   └── MapComponent.tsx
│   │   │   ├── markdoc/
│   │   │   │   ├── Heading.tsx
│   │   │   │   ├── Image.tsx
│   │   │   │   ├── List.tsx
│   │   │   │   ├── ListItem.tsx
│   │   │   │   └── Table.tsx
│   │   │   └── systeminfo/
│   │   │       ├── CircularGauge.tsx
│   │   │       ├── InfoCard.tsx
│   │   │       └── StatusCard.tsx
│   │   ├── context/
│   │   │   ├── ModalContext.ts
│   │   │   └── NotificationContext.ts
│   │   ├── css/
│   │   │   └── app.css
│   │   ├── hooks/
│   │   │   ├── useDebounce.ts
│   │   │   ├── useDiskDisplayData.ts
│   │   │   ├── useDownloads.ts
│   │   │   ├── useEmbedJobs.ts
│   │   │   ├── useErrorNotification.ts
│   │   │   ├── useInternetStatus.ts
│   │   │   ├── useMapRegionFiles.ts
│   │   │   ├── useOllamaModelDownloads.ts
│   │   │   ├── useServiceInstallationActivity.ts
│   │   │   ├── useServiceInstalledStatus.tsx
│   │   │   ├── useSystemInfo.ts
│   │   │   ├── useSystemSetting.ts
│   │   │   ├── useTheme.ts
│   │   │   └── useUpdateAvailable.ts
│   │   ├── layouts/
│   │   │   ├── AppLayout.tsx
│   │   │   ├── DocsLayout.tsx
│   │   │   ├── MapsLayout.tsx
│   │   │   └── SettingsLayout.tsx
│   │   ├── lib/
│   │   │   ├── api.ts
│   │   │   ├── builderTagWords.ts
│   │   │   ├── classNames.ts
│   │   │   ├── collections.ts
│   │   │   ├── navigation.ts
│   │   │   └── util.ts
│   │   ├── pages/
│   │   │   ├── about.tsx
│   │   │   ├── chat.tsx
│   │   │   ├── docs/
│   │   │   │   └── show.tsx
│   │   │   ├── easy-setup/
│   │   │   │   ├── complete.tsx
│   │   │   │   └── index.tsx
│   │   │   ├── errors/
│   │   │   │   ├── not_found.tsx
│   │   │   │   └── server_error.tsx
│   │   │   ├── home.tsx
│   │   │   ├── maps.tsx
│   │   │   └── settings/
│   │   │       ├── apps.tsx
│   │   │       ├── benchmark.tsx
│   │   │       ├── legal.tsx
│   │   │       ├── maps.tsx
│   │   │       ├── models.tsx
│   │   │       ├── support.tsx
│   │   │       ├── system.tsx
│   │   │       ├── update.tsx
│   │   │       └── zim/
│   │   │           ├── index.tsx
│   │   │           └── remote-explorer.tsx
│   │   ├── providers/
│   │   │   ├── ModalProvider.tsx
│   │   │   ├── NotificationProvider.tsx
│   │   │   └── ThemeProvider.tsx
│   │   └── tsconfig.json
│   ├── package.json
│   ├── providers/
│   │   └── map_static_provider.ts
│   ├── resources/
│   │   └── views/
│   │       └── inertia_layout.edge
│   ├── start/
│   │   ├── env.ts
│   │   ├── kernel.ts
│   │   └── routes.ts
│   ├── tailwind.config.ts
│   ├── tests/
│   │   └── bootstrap.ts
│   ├── tsconfig.json
│   ├── types/
│   │   ├── benchmark.ts
│   │   ├── chat.ts
│   │   ├── collections.ts
│   │   ├── docker.ts
│   │   ├── downloads.ts
│   │   ├── files.ts
│   │   ├── kv_store.ts
│   │   ├── maps.ts
│   │   ├── ollama.ts
│   │   ├── rag.ts
│   │   ├── services.ts
│   │   ├── system.ts
│   │   ├── util.ts
│   │   └── zim.ts
│   ├── util/
│   │   ├── docs.ts
│   │   ├── files.ts
│   │   └── zim.ts
│   ├── views/
│   │   └── inertia_layout.edge
│   └── vite.config.ts
├── collections/
│   ├── CATEGORIES-TODO.md
│   ├── kiwix-categories.json
│   ├── maps.json
│   └── wikipedia.json
├── install/
│   ├── collect_disk_info.sh
│   ├── entrypoint.sh
│   ├── install_nomad.sh
│   ├── management_compose.yaml
│   ├── migrate-disk-collector.md
│   ├── migrate-disk-collector.sh
│   ├── run_updater_fixes.sh
│   ├── sidecar-disk-collector/
│   │   ├── Dockerfile
│   │   └── collect-disk-info.sh
│   ├── sidecar-updater/
│   │   ├── Dockerfile
│   │   └── update-watcher.sh
│   ├── start_nomad.sh
│   ├── stop_nomad.sh
│   ├── uninstall_nomad.sh
│   ├── update_nomad.sh
│   ├── wikipedia_en_100_mini_2025-06.zim
│   └── wikipedia_en_100_mini_2026-01.zim
└── package.json
Download .txt
SYMBOL INDEX (852 symbols across 200 files)

FILE: admin/app/controllers/benchmark_controller.ts
  class BenchmarkController (line 10) | class BenchmarkController {
    method constructor (line 11) | constructor(private benchmarkService: BenchmarkService) {}
    method run (line 16) | async run({ request, response }: HttpContext) {
    method runSystem (line 83) | async runSystem({ response }: HttpContext) {
    method runAI (line 109) | async runAI({ response }: HttpContext) {
    method results (line 135) | async results({}: HttpContext) {
    method latest (line 146) | async latest({}: HttpContext) {
    method show (line 157) | async show({ params, response }: HttpContext) {
    method submit (line 170) | async submit({ request, response }: HttpContext) {
    method updateBuilderTag (line 194) | async updateBuilderTag({ request, response }: HttpContext) {
    method comparison (line 236) | async comparison({}: HttpContext) {
    method status (line 244) | async status({}: HttpContext) {
    method settings (line 251) | async settings({}: HttpContext) {
    method updateSettings (line 259) | async updateSettings({ request, response }: HttpContext) {

FILE: admin/app/controllers/chats_controller.ts
  class ChatsController (line 10) | class ChatsController {
    method constructor (line 11) | constructor(private chatService: ChatService, private systemService: S...
    method inertia (line 13) | async inertia({ inertia, response }: HttpContext) {
    method index (line 27) | async index({}: HttpContext) {
    method show (line 31) | async show({ params, response }: HttpContext) {
    method store (line 42) | async store({ request, response }: HttpContext) {
    method suggestions (line 54) | async suggestions({ response }: HttpContext) {
    method update (line 65) | async update({ params, request, response }: HttpContext) {
    method destroy (line 78) | async destroy({ params, response }: HttpContext) {
    method addMessage (line 90) | async addMessage({ params, request, response }: HttpContext) {
    method destroyAll (line 103) | async destroyAll({ response }: HttpContext) {

FILE: admin/app/controllers/collection_updates_controller.ts
  class CollectionUpdatesController (line 9) | class CollectionUpdatesController {
    method checkForUpdates (line 10) | async checkForUpdates({}: HttpContext) {
    method applyUpdate (line 15) | async applyUpdate({ request }: HttpContext) {
    method applyAllUpdates (line 22) | async applyAllUpdates({ request }: HttpContext) {

FILE: admin/app/controllers/docs_controller.ts
  class DocsController (line 6) | class DocsController {
    method constructor (line 7) | constructor(
    method list (line 11) | async list({ }: HttpContext) {
    method show (line 15) | async show({ params, inertia }: HttpContext) {

FILE: admin/app/controllers/downloads_controller.ts
  class DownloadsController (line 7) | class DownloadsController {
    method constructor (line 8) | constructor(private downloadService: DownloadService) {}
    method index (line 10) | async index() {
    method filetype (line 14) | async filetype({ request }: HttpContext) {
    method removeJob (line 19) | async removeJob({ params }: HttpContext) {

FILE: admin/app/controllers/easy_setup_controller.ts
  class EasySetupController (line 8) | class EasySetupController {
    method constructor (line 9) | constructor(
    method index (line 14) | async index({ inertia }: HttpContext) {
    method complete (line 23) | async complete({ inertia }: HttpContext) {
    method listCuratedCategories (line 27) | async listCuratedCategories({}: HttpContext) {
    method refreshManifests (line 31) | async refreshManifests({}: HttpContext) {

FILE: admin/app/controllers/home_controller.ts
  class HomeController (line 6) | class HomeController {
    method constructor (line 7) | constructor(
    method index (line 11) | async index({ response }: HttpContext) {
    method home (line 16) | async home({ inertia }: HttpContext) {

FILE: admin/app/controllers/maps_controller.ts
  class MapsController (line 13) | class MapsController {
    method constructor (line 14) | constructor(private mapService: MapService) {}
    method index (line 16) | async index({ inertia }: HttpContext) {
    method downloadBaseAssets (line 27) | async downloadBaseAssets({ request }: HttpContext) {
    method downloadRemote (line 34) | async downloadRemote({ request }: HttpContext) {
    method downloadCollection (line 45) | async downloadCollection({ request }: HttpContext) {
    method downloadRemotePreflight (line 56) | async downloadRemotePreflight({ request }: HttpContext) {
    method fetchLatestCollections (line 63) | async fetchLatestCollections({}: HttpContext) {
    method listCuratedCollections (line 68) | async listCuratedCollections({}: HttpContext) {
    method listRegions (line 72) | async listRegions({}: HttpContext) {
    method styles (line 76) | async styles({ request, response }: HttpContext) {
    method delete (line 90) | async delete({ request, response }: HttpContext) {

FILE: admin/app/controllers/ollama_controller.ts
  class OllamaController (line 13) | class OllamaController {
    method constructor (line 14) | constructor(
    method availableModels (line 20) | async availableModels({ request }: HttpContext) {
    method chat (line 31) | async chat({ request, response }: HttpContext) {
    method deleteModel (line 174) | async deleteModel({ request }: HttpContext) {
    method dispatchModelDownload (line 183) | async dispatchModelDownload({ request }: HttpContext) {
    method installedModels (line 192) | async installedModels({ }: HttpContext) {
    method getContextLimitsForModel (line 200) | private getContextLimitsForModel(modelName: string): { maxResults: num...
    method rewriteQueryWithContext (line 215) | private async rewriteQueryWithContext(

FILE: admin/app/controllers/rag_controller.ts
  class RagController (line 11) | class RagController {
    method constructor (line 12) | constructor(private ragService: RagService) { }
    method upload (line 14) | public async upload({ request, response }: HttpContext) {
    method getActiveJobs (line 45) | public async getActiveJobs({ response }: HttpContext) {
    method getJobStatus (line 50) | public async getJobStatus({ request, response }: HttpContext) {
    method getStoredFiles (line 63) | public async getStoredFiles({ response }: HttpContext) {
    method deleteFile (line 68) | public async deleteFile({ request, response }: HttpContext) {
    method scanAndSync (line 77) | public async scanAndSync({ response }: HttpContext) {

FILE: admin/app/controllers/settings_controller.ts
  class SettingsController (line 12) | class SettingsController {
    method constructor (line 13) | constructor(
    method system (line 20) | async system({ inertia }: HttpContext) {
    method apps (line 29) | async apps({ inertia }: HttpContext) {
    method legal (line 38) | async legal({ inertia }: HttpContext) {
    method support (line 42) | async support({ inertia }: HttpContext) {
    method maps (line 46) | async maps({ inertia }: HttpContext) {
    method models (line 57) | async models({ inertia }: HttpContext) {
    method update (line 74) | async update({ inertia }: HttpContext) {
    method zim (line 85) | async zim({ inertia }: HttpContext) {
    method zimRemote (line 89) | async zimRemote({ inertia }: HttpContext) {
    method benchmark (line 93) | async benchmark({ inertia }: HttpContext) {
    method getSetting (line 105) | async getSetting({ request, response }: HttpContext) {
    method updateSetting (line 111) | async updateSetting({ request, response }: HttpContext) {

FILE: admin/app/controllers/system_controller.ts
  class SystemController (line 11) | class SystemController {
    method constructor (line 12) | constructor(
    method getInternetStatus (line 19) | async getInternetStatus({ }: HttpContext) {
    method getSystemInfo (line 23) | async getSystemInfo({ }: HttpContext) {
    method getServices (line 27) | async getServices({ }: HttpContext) {
    method installService (line 31) | async installService({ request, response }: HttpContext) {
    method affectService (line 42) | async affectService({ request, response }: HttpContext) {
    method checkLatestVersion (line 52) | async checkLatestVersion({ request }: HttpContext) {
    method forceReinstallService (line 57) | async forceReinstallService({ request, response }: HttpContext) {
    method requestSystemUpdate (line 67) | async requestSystemUpdate({ response }: HttpContext) {
    method getSystemUpdateStatus (line 92) | async getSystemUpdateStatus({ response }: HttpContext) {
    method getSystemUpdateLogs (line 105) | async getSystemUpdateLogs({ response }: HttpContext) {
    method subscribeToReleaseNotes (line 111) | async subscribeToReleaseNotes({ request }: HttpContext) {
    method getDebugInfo (line 116) | async getDebugInfo({}: HttpContext) {
    method checkServiceUpdates (line 121) | async checkServiceUpdates({ response }: HttpContext) {
    method getAvailableVersions (line 126) | async getAvailableVersions({ params, response }: HttpContext) {
    method updateService (line 151) | async updateService({ request, response }: HttpContext) {
    method getHostArch (line 165) | private async getHostArch(): Promise<string> {

FILE: admin/app/controllers/zim_controller.ts
  class ZimController (line 14) | class ZimController {
    method constructor (line 15) | constructor(private zimService: ZimService) {}
    method list (line 17) | async list({}: HttpContext) {
    method listRemote (line 21) | async listRemote({ request }: HttpContext) {
    method downloadRemote (line 27) | async downloadRemote({ request }: HttpContext) {
    method listCuratedCategories (line 40) | async listCuratedCategories({}: HttpContext) {
    method downloadCategoryTier (line 44) | async downloadCategoryTier({ request }: HttpContext) {
    method delete (line 59) | async delete({ request, response }: HttpContext) {
    method getWikipediaState (line 80) | async getWikipediaState({}: HttpContext) {
    method selectWikipedia (line 84) | async selectWikipedia({ request }: HttpContext) {

FILE: admin/app/exceptions/handler.ts
  class HttpExceptionHandler (line 5) | class HttpExceptionHandler extends ExceptionHandler {
    method handle (line 32) | async handle(error: unknown, ctx: HttpContext) {
    method report (line 42) | async report(error: unknown, ctx: HttpContext) {

FILE: admin/app/exceptions/internal_server_error_exception.ts
  class InternalServerErrorException (line 3) | class InternalServerErrorException extends Exception {

FILE: admin/app/jobs/check_service_updates_job.ts
  class CheckServiceUpdatesJob (line 11) | class CheckServiceUpdatesJob {
    method queue (line 12) | static get queue() {
    method key (line 16) | static get key() {
    method handle (line 20) | async handle(_job: Job) {
    method getHostArch (line 74) | private async getHostArch(dockerService: DockerService): Promise<strin...
    method scheduleNightly (line 97) | static async scheduleNightly() {
    method dispatch (line 116) | static async dispatch() {

FILE: admin/app/jobs/check_update_job.ts
  class CheckUpdateJob (line 8) | class CheckUpdateJob {
    method queue (line 9) | static get queue() {
    method key (line 13) | static get key() {
    method handle (line 17) | async handle(_job: Job) {
    method scheduleNightly (line 44) | static async scheduleNightly() {
    method dispatch (line 63) | static async dispatch() {

FILE: admin/app/jobs/download_model_job.ts
  type DownloadModelJobParams (line 7) | interface DownloadModelJobParams {
  class DownloadModelJob (line 11) | class DownloadModelJob {
    method queue (line 12) | static get queue() {
    method key (line 16) | static get key() {
    method getJobId (line 20) | static getJobId(modelName: string): string {
    method handle (line 24) | async handle(job: Job) {
    method getByModelName (line 76) | static async getByModelName(modelName: string): Promise<Job | undefine...
    method dispatch (line 83) | static async dispatch(params: DownloadModelJobParams) {

FILE: admin/app/jobs/embed_file_job.ts
  type EmbedFileJobParams (line 10) | interface EmbedFileJobParams {
  class EmbedFileJob (line 20) | class EmbedFileJob {
    method queue (line 21) | static get queue() {
    method key (line 25) | static get key() {
    method getJobId (line 29) | static getJobId(filePath: string): string {
    method handle (line 33) | async handle(job: Job) {
    method listActiveJobs (line 174) | static async listActiveJobs(): Promise<EmbedJobWithProgress[]> {
    method getByFilePath (line 188) | static async getByFilePath(filePath: string): Promise<Job | undefined> {
    method dispatch (line 195) | static async dispatch(params: EmbedFileJobParams) {
    method getStatus (line 235) | static async getStatus(filePath: string): Promise<{

FILE: admin/app/jobs/run_benchmark_job.ts
  class RunBenchmarkJob (line 8) | class RunBenchmarkJob {
    method queue (line 9) | static get queue() {
    method key (line 13) | static get key() {
    method handle (line 17) | async handle(job: Job) {
    method dispatch (line 55) | static async dispatch(params: RunBenchmarkJobParams) {
    method getJob (line 91) | static async getJob(benchmarkId: string): Promise<Job | undefined> {
    method getJobState (line 97) | static async getJobState(benchmarkId: string): Promise<string | undefi...

FILE: admin/app/jobs/run_download_job.ts
  class RunDownloadJob (line 11) | class RunDownloadJob {
    method queue (line 12) | static get queue() {
    method key (line 16) | static get key() {
    method getJobId (line 20) | static getJobId(url: string): string {
    method handle (line 24) | async handle(job: Job) {
    method getByUrl (line 117) | static async getByUrl(url: string): Promise<Job | undefined> {
    method dispatch (line 124) | static async dispatch(params: RunDownloadJobParams) {

FILE: admin/app/middleware/container_bindings_middleware.ts
  class ContainerBindingsMiddleware (line 12) | class ContainerBindingsMiddleware {
    method handle (line 13) | handle(ctx: HttpContext, next: NextFn) {

FILE: admin/app/middleware/force_json_response_middleware.ts
  class ForceJsonResponseMiddleware (line 9) | class ForceJsonResponseMiddleware {
    method handle (line 10) | async handle({ request }: HttpContext, next: NextFn) {

FILE: admin/app/middleware/maps_static_middleware.ts
  class MapsStaticMiddleware (line 10) | class MapsStaticMiddleware {
    method constructor (line 11) | constructor(
    method handle (line 16) | async handle(ctx: HttpContext, next: NextFn) {

FILE: admin/app/models/benchmark_result.ts
  class BenchmarkResult (line 5) | class BenchmarkResult extends BaseModel {
    method serialize (line 65) | serialize(value) {

FILE: admin/app/models/benchmark_setting.ts
  class BenchmarkSetting (line 5) | class BenchmarkSetting extends BaseModel {
    method getValue (line 26) | static async getValue(key: BenchmarkSettingKey): Promise<string | null> {
    method setValue (line 34) | static async setValue(key: BenchmarkSettingKey, value: string | null):...
    method getAllSettings (line 46) | static async getAllSettings(): Promise<{

FILE: admin/app/models/chat_message.ts
  class ChatMessage (line 6) | class ChatMessage extends BaseModel {

FILE: admin/app/models/chat_session.ts
  class ChatSession (line 6) | class ChatSession extends BaseModel {

FILE: admin/app/models/collection_manifest.ts
  class CollectionManifest (line 5) | class CollectionManifest extends BaseModel {

FILE: admin/app/models/installed_resource.ts
  class InstalledResource (line 4) | class InstalledResource extends BaseModel {

FILE: admin/app/models/kv_store.ts
  class KVStore (line 10) | class KVStore extends BaseModel {
    method getValue (line 32) | static async getValue<K extends KVStoreKey>(key: K): Promise<KVStoreVa...
    method setValue (line 44) | static async setValue<K extends KVStoreKey>(key: K, value: KVStoreValu...
    method clearValue (line 57) | static async clearValue<K extends KVStoreKey>(key: K): Promise<void> {

FILE: admin/app/models/service.ts
  class Service (line 5) | class Service extends BaseModel {
    method serialize (line 39) | serialize(value) {
    method serialize (line 53) | serialize(value) {

FILE: admin/app/models/wikipedia_selection.ts
  class WikipediaSelection (line 4) | class WikipediaSelection extends BaseModel {

FILE: admin/app/services/benchmark_service.ts
  constant BENCHMARK_HMAC_SECRET (line 35) | const BENCHMARK_HMAC_SECRET = '778ba65d0bc0e23119e5ffce4b3716648a7d071f0...
  constant SCORE_WEIGHTS (line 38) | const SCORE_WEIGHTS = {
  constant SYSBENCH_IMAGE (line 48) | const SYSBENCH_IMAGE = 'severalnines/sysbench:latest'
  constant SYSBENCH_CONTAINER_NAME (line 49) | const SYSBENCH_CONTAINER_NAME = 'nomad_benchmark_sysbench'
  constant AI_BENCHMARK_MODEL (line 52) | const AI_BENCHMARK_MODEL = 'llama3.2:1b'
  constant AI_BENCHMARK_PROMPT (line 53) | const AI_BENCHMARK_PROMPT = 'Explain recursion in programming in exactly...
  constant REFERENCE_SCORES (line 57) | const REFERENCE_SCORES = {
  class BenchmarkService (line 67) | class BenchmarkService {
    method constructor (line 71) | constructor(private dockerService: DockerService) {}
    method runFullBenchmark (line 76) | async runFullBenchmark(): Promise<BenchmarkResult> {
    method runSystemBenchmarks (line 83) | async runSystemBenchmarks(): Promise<BenchmarkResult> {
    method runAIBenchmark (line 90) | async runAIBenchmark(): Promise<BenchmarkResult> {
    method getLatestResult (line 97) | async getLatestResult(): Promise<BenchmarkResult | null> {
    method getAllResults (line 104) | async getAllResults(): Promise<BenchmarkResult[]> {
    method getResultById (line 111) | async getResultById(benchmarkId: string): Promise<BenchmarkResult | nu...
    method submitToRepository (line 118) | async submitToRepository(benchmarkId?: string, anonymous?: boolean): P...
    method getComparisonStats (line 204) | async getComparisonStats(): Promise<RepositoryStats | null> {
    method getStatus (line 219) | getStatus(): { status: BenchmarkStatus; benchmarkId: string | null } {
    method getHardwareInfo (line 229) | async getHardwareInfo(): Promise<HardwareInfo> {
    method _runBenchmark (line 337) | private async _runBenchmark(type: BenchmarkType, includeAI: boolean): ...
    method _runSystemBenchmarks (line 417) | private async _runSystemBenchmarks(): Promise<SystemScores> {
    method _runAIBenchmark (line 448) | private async _runAIBenchmark(): Promise<AIScores> {
    method _calculateNomadScore (line 524) | private _calculateNomadScore(systemScores: SystemScores, aiScores: Par...
    method _normalizeScore (line 572) | private _normalizeScore(value: number, reference: number): number {
    method _normalizeScoreInverse (line 584) | private _normalizeScoreInverse(value: number, reference: number): numb...
    method _ensureSysbenchImage (line 595) | private async _ensureSysbenchImage(): Promise<void> {
    method _runSysbenchCpu (line 608) | private async _runSysbenchCpu(): Promise<SysbenchCpuResult> {
    method _runSysbenchMemory (line 633) | private async _runSysbenchMemory(): Promise<SysbenchMemoryResult> {
    method _runSysbenchDiskRead (line 658) | private async _runSysbenchDiskRead(): Promise<SysbenchDiskResult> {
    method _runSysbenchDiskWrite (line 687) | private async _runSysbenchDiskWrite(): Promise<SysbenchDiskResult> {
    method _runSysbenchCommand (line 716) | private async _runSysbenchCommand(cmd: string[]): Promise<string> {
    method _updateStatus (line 773) | private _updateStatus(status: BenchmarkStatus, message: string) {
    method _getProgressPercent (line 795) | private _getProgressPercent(status: BenchmarkStatus): number {
    method _getStageLabel (line 816) | private _getStageLabel(status: BenchmarkStatus): string {

FILE: admin/app/services/chat_service.ts
  class ChatService (line 11) | class ChatService {
    method constructor (line 12) | constructor(private ollamaService: OllamaService) {}
    method getAllSessions (line 14) | async getAllSessions() {
    method getChatSuggestions (line 32) | async getChatSuggestions() {
    method getSession (line 99) | async getSession(sessionId: number) {
    method createSession (line 129) | async createSession(title: string, model?: string) {
    method updateSession (line 150) | async updateSession(sessionId: number, data: { title?: string; model?:...
    method addMessage (line 179) | async addMessage(sessionId: number, role: 'system' | 'user' | 'assista...
    method deleteSession (line 208) | async deleteSession(sessionId: number) {
    method getMessageCount (line 223) | async getMessageCount(sessionId: number): Promise<number> {
    method generateTitle (line 235) | async generateTitle(sessionId: number, userMessage: string, assistantM...
    method deleteAllSessions (line 276) | async deleteAllSessions() {

FILE: admin/app/services/collection_manifest_service.ts
  constant SPEC_URLS (line 25) | const SPEC_URLS: Record<ManifestType, string> = {
  constant VALIDATORS (line 31) | const VALIDATORS: Record<ManifestType, any> = {
  class CollectionManifestService (line 37) | class CollectionManifestService {
    method fetchAndCacheSpec (line 42) | async fetchAndCacheSpec(type: ManifestType): Promise<boolean> {
    method getCachedSpec (line 77) | async getCachedSpec<T>(type: ManifestType): Promise<T | null> {
    method getSpecWithFallback (line 83) | async getSpecWithFallback<T>(type: ManifestType): Promise<T | null> {
    method getCategoriesWithStatus (line 94) | async getCategoriesWithStatus(): Promise<CategoryWithStatus[]> {
    method getMapCollectionsWithStatus (line 107) | async getMapCollectionsWithStatus(): Promise<CollectionWithStatus[]> {
    method resolveTierResources (line 127) | static resolveTierResources(tier: SpecTier, allTiers: SpecTier[]): Spe...
    method _resolveTierResourcesInner (line 132) | private static _resolveTierResourcesInner(
    method getInstalledTierForCategory (line 153) | getInstalledTierForCategory(
    method parseZimFilename (line 175) | static parseZimFilename(filename: string): { resource_id: string; vers...
    method parseMapFilename (line 182) | static parseMapFilename(filename: string): { resource_id: string; vers...
    method reconcileFromFilesystem (line 191) | async reconcileFromFilesystem(): Promise<{ zim: number; map: number }> {

FILE: admin/app/services/collection_update_service.ts
  constant MAP_STORAGE_PATH (line 15) | const MAP_STORAGE_PATH = '/storage/maps'
  constant ZIM_MIME_TYPES (line 17) | const ZIM_MIME_TYPES = ['application/x-zim', 'application/x-openzim', 'a...
  constant PMTILES_MIME_TYPES (line 18) | const PMTILES_MIME_TYPES = ['application/vnd.pmtiles', 'application/octe...
  class CollectionUpdateService (line 20) | class CollectionUpdateService {
    method checkForUpdates (line 21) | async checkForUpdates(): Promise<ContentUpdateCheckResult> {
    method applyUpdate (line 82) | async applyUpdate(
    method applyAllUpdates (line 126) | async applyAllUpdates(
    method buildFilename (line 144) | private buildFilename(update: ResourceUpdateInfo): string {
    method buildFilepath (line 151) | private buildFilepath(update: ResourceUpdateInfo, filename: string): s...

FILE: admin/app/services/container_registry_service.ts
  type ParsedImageReference (line 4) | interface ParsedImageReference {
  type AvailableUpdate (line 13) | interface AvailableUpdate {
  type TokenCacheEntry (line 19) | interface TokenCacheEntry {
  constant SEMVER_TAG_PATTERN (line 24) | const SEMVER_TAG_PATTERN = /^v?(\d+\.\d+(?:\.\d+)?)$/
  constant PLATFORM_SUFFIXES (line 25) | const PLATFORM_SUFFIXES = ['-arm64', '-amd64', '-alpine', '-slim', '-cud...
  constant REJECTED_TAGS (line 26) | const REJECTED_TAGS = new Set(['latest', 'nightly', 'edge', 'dev', 'beta...
  class ContainerRegistryService (line 28) | class ContainerRegistryService {
    method parseImageReference (line 36) | parseImageReference(image: string): ParsedImageReference {
    method getToken (line 82) | private async getToken(registry: string, fullName: string): Promise<st...
    method listTags (line 123) | async listTags(parsed: ParsedImageReference): Promise<string[]> {
    method checkArchSupport (line 158) | async checkArchSupport(parsed: ParsedImageReference, tag: string, host...
    method getSourceUrl (line 208) | async getSourceUrl(parsed: ParsedImageReference): Promise<string | nul...
    method detectReleaseTagPrefix (line 297) | async detectReleaseTagPrefix(sourceUrl: string, sampleTag: string): Pr...
    method buildReleaseUrl (line 348) | buildReleaseUrl(sourceUrl: string, tag: string, releaseTagPrefix: stri...
    method filterCompatibleUpdates (line 366) | filterCompatibleUpdates(
    method getAvailableUpdates (line 395) | async getAvailableUpdates(
    method fetchWithRetry (line 459) | private async fetchWithRetry(

FILE: admin/app/services/docker_service.ts
  class DockerService (line 17) | class DockerService {
    method constructor (line 22) | constructor() {
    method affectContainer (line 34) | async affectContainer(
    method getServicesStatus (line 106) | async getServicesStatus(): Promise<
    method getServiceURL (line 138) | async getServiceURL(serviceName: string): Promise<string | null> {
    method createContainerPreflight (line 179) | async createContainerPreflight(
    method forceReinstall (line 250) | async forceReinstall(serviceName: string): Promise<{ success: boolean;...
    method _createContainer (line 382) | async _createContainer(
    method _checkIfServiceContainerExists (line 579) | async _checkIfServiceContainerExists(serviceName: string): Promise<boo...
    method _removeServiceContainer (line 589) | async _removeServiceContainer(
    method _runPreinstallActions__KiwixServe (line 612) | private async _runPreinstallActions__KiwixServe(): Promise<void> {
    method _cleanupFailedInstallation (line 661) | private async _cleanupFailedInstallation(serviceName: string): Promise...
    method _detectGPUType (line 686) | private async _detectGPUType(): Promise<{ type: 'nvidia' | 'amd' | 'no...
    method _persistGPUType (line 754) | private async _persistGPUType(type: 'nvidia' | 'amd'): Promise<void> {
    method updateContainer (line 823) | async updateContainer(
    method _broadcast (line 1038) | private _broadcast(service: string, status: string, message: string) {
    method _parseContainerConfig (line 1048) | private _parseContainerConfig(containerConfig: any): any {
    method _checkImageExists (line 1072) | private async _checkImageExists(imageName: string): Promise<boolean> {

FILE: admin/app/services/docs_service.ts
  class DocsService (line 8) | class DocsService {
    method getDocs (line 20) | async getDocs() {
    method parse (line 41) | parse(content: string) {
    method parseFile (line 61) | async parseFile(_filename: string) {
    method prettify (line 96) | private prettify(filename: string) {
    method getConfig (line 108) | private getConfig() {

FILE: admin/app/services/download_service.ts
  class DownloadService (line 9) | class DownloadService {
    method constructor (line 10) | constructor(private queueService: QueueService) {}
    method listDownloadJobs (line 12) | async listDownloadJobs(filetype?: string): Promise<DownloadJobWithProg...
    method removeFailedJob (line 54) | async removeFailedJob(jobId: string): Promise<void> {

FILE: admin/app/services/map_service.ts
  constant BASE_ASSETS_MIME_TYPES (line 24) | const BASE_ASSETS_MIME_TYPES = [
  constant PMTILES_ATTRIBUTION (line 30) | const PMTILES_ATTRIBUTION =
  constant PMTILES_MIME_TYPES (line 32) | const PMTILES_MIME_TYPES = ['application/vnd.pmtiles', 'application/octe...
  type IMapService (line 34) | interface IMapService {
  class MapService (line 38) | class MapService implements IMapService {
    method listRegions (line 46) | async listRegions() {
    method downloadBaseAssets (line 56) | async downloadBaseAssets(url?: string) {
    method downloadCollection (line 94) | async downloadCollection(slug: string): Promise<string[] | null> {
    method downloadRemoteSuccessCallback (line 145) | async downloadRemoteSuccessCallback(urls: string[], _: boolean) {
    method downloadRemote (line 176) | async downloadRemote(url: string): Promise<{ filename: string; jobId?:...
    method downloadRemotePreflight (line 232) | async downloadRemotePreflight(
    method generateStylesJSON (line 263) | async generateStylesJSON(host: string | null = null, protocol: string ...
    method listCuratedCollections (line 297) | async listCuratedCollections(): Promise<CollectionWithStatus[]> {
    method fetchLatestCollections (line 302) | async fetchLatestCollections(): Promise<boolean> {
    method ensureBaseAssets (line 307) | async ensureBaseAssets(): Promise<boolean> {
    method checkBaseAssetsExist (line 316) | private async checkBaseAssetsExist(useCache: boolean = true): Promise<...
    method listAllMapStorageItems (line 340) | private async listAllMapStorageItems(): Promise<FileEntry[]> {
    method generateSourcesArray (line 345) | private generateSourcesArray(host: string | null, regions: FileEntry[]...
    method generateStylesFile (line 369) | private async generateStylesFile(
    method delete (line 401) | async delete(file: string): Promise<void> {
    method getPublicFileBaseUrl (line 436) | private getPublicFileBaseUrl(specifiedHost: string | null, childPath: ...

FILE: admin/app/services/ollama_service.ts
  constant NOMAD_MODELS_API_PATH (line 17) | const NOMAD_MODELS_API_PATH = '/api/v1/ollama/models'
  constant MODELS_CACHE_FILE (line 18) | const MODELS_CACHE_FILE = path.join(process.cwd(), 'storage', 'ollama-mo...
  constant CACHE_MAX_AGE_MS (line 19) | const CACHE_MAX_AGE_MS = 24 * 60 * 60 * 1000 // 24 hours
  class OllamaService (line 22) | class OllamaService {
    method constructor (line 26) | constructor() { }
    method _initializeOllamaClient (line 28) | private async _initializeOllamaClient() {
    method _ensureDependencies (line 42) | private async _ensureDependencies() {
    method downloadModel (line 54) | async downloadModel(model: string, progressCallback?: (percent: number...
    method dispatchModelDownload (line 97) | async dispatchModelDownload(modelName: string): Promise<{ success: boo...
    method getClient (line 121) | public async getClient() {
    method chat (line 126) | public async chat(chatRequest: ChatRequest & { stream?: boolean }) {
    method chatStream (line 137) | public async chatStream(chatRequest: ChatRequest) {
    method checkModelHasThinking (line 148) | public async checkModelHasThinking(modelName: string): Promise<boolean> {
    method deleteModel (line 161) | public async deleteModel(modelName: string) {
    method getModels (line 172) | public async getModels(includeEmbeddings = false) {
    method getAvailableModels (line 185) | async getAvailableModels(
    method retrieveAndRefreshModels (line 246) | private async retrieveAndRefreshModels(
    method readModelsFromCache (line 295) | private async readModelsFromCache(): Promise<NomadOllamaModel[] | null> {
    method writeModelsToCache (line 325) | private async writeModelsToCache(models: NomadOllamaModel[]): Promise<...
    method sortModels (line 337) | private sortModels(models: NomadOllamaModel[], sort?: 'pulls' | 'name'...
    method broadcastDownloadProgress (line 382) | private broadcastDownloadProgress(model: string, percent: number) {
    method fuseSearchModels (line 391) | private fuseSearchModels(models: NomadOllamaModel[], query: string): N...

FILE: admin/app/services/queue_service.ts
  class QueueService (line 4) | class QueueService {
    method getQueue (line 7) | getQueue(name: string): Queue {
    method close (line 17) | async close() {

FILE: admin/app/services/rag_service.ts
  class RagService (line 22) | class RagService {
    method constructor (line 40) | constructor(
    method _initializeQdrantClient (line 45) | private async _initializeQdrantClient() {
    method _ensureDependencies (line 58) | private async _ensureDependencies() {
    method _ensureCollection (line 64) | private async _ensureCollection(
    method sanitizeText (line 104) | private sanitizeText(text: string): string {
    method estimateTokenCount (line 126) | private estimateTokenCount(text: string): number {
    method truncateToTokenLimit (line 135) | private truncateToTokenLimit(text: string, maxTokens: number): string {
    method preprocessQuery (line 196) | private preprocessQuery(query: string): string {
    method extractKeywords (line 223) | private extractKeywords(query: string): string[] {
    method embedAndStoreText (line 235) | public async embedAndStoreText(
    method preprocessImage (line 394) | private async preprocessImage(filebuffer: Buffer): Promise<Buffer> {
    method convertPDFtoImages (line 403) | private async convertPDFtoImages(filebuffer: Buffer): Promise<Buffer[]> {
    method extractPDFText (line 414) | private async extractPDFText(filebuffer: Buffer): Promise<string> {
    method extractTXTText (line 421) | private async extractTXTText(filebuffer: Buffer): Promise<string> {
    method extractImageText (line 425) | private async extractImageText(filebuffer: Buffer): Promise<string> {
    method processImageFile (line 432) | private async processImageFile(fileBuffer: Buffer): Promise<string> {
    method processPDFFile (line 441) | private async processPDFFile(fileBuffer: Buffer): Promise<string> {
    method processZIMFile (line 466) | private async processZIMFile(
    method processTextFile (line 563) | private async processTextFile(fileBuffer: Buffer): Promise<string> {
    method embedTextAndCleanup (line 567) | private async embedTextAndCleanup(
    method processAndEmbedFile (line 604) | public async processAndEmbedFile(
    method searchSimilarDocuments (line 670) | public async searchSimilarDocuments(
    method rerankResults (line 817) | private rerankResults(
    method applySourceDiversity (line 902) | private applySourceDiversity(
    method getStoredFiles (line 932) | public async getStoredFiles(): Promise<string[]> {
    method deleteFileBySource (line 975) | public async deleteFileBySource(source: string): Promise<{ success: bo...
    method discoverNomadDocs (line 1011) | public async discoverNomadDocs(force?: boolean): Promise<{ success: bo...
    method scanAndSyncStorage (line 1075) | public async scanAndSyncStorage(): Promise<{

FILE: admin/app/services/system_service.ts
  class SystemService (line 20) | class SystemService {
    method constructor (line 24) | constructor(private dockerService: DockerService) { }
    method checkServiceInstalled (line 26) | async checkServiceInstalled(serviceName: string): Promise<boolean> {
    method getInternetStatus (line 31) | async getInternetStatus(): Promise<boolean> {
    method getNvidiaSmiInfo (line 70) | async getNvidiaSmiInfo(): Promise<Array<{ vendor: string; model: strin...
    method getServices (line 130) | async getServices({ installedOnly = true }: { installedOnly?: boolean ...
    method getAppVersion (line 186) | static getAppVersion(): string {
    method getSystemInfo (line 211) | async getSystemInfo(): Promise<SystemInformationResponse | undefined> {
    method checkLatestVersion (line 315) | async checkLatestVersion(force?: boolean): Promise<{
    method subscribeToReleaseNotes (line 385) | async subscribeToReleaseNotes(email: string): Promise<{ success: boole...
    method getDebugInfo (line 413) | async getDebugInfo(): Promise<string> {
    method _formatUptime (line 507) | private _formatUptime(seconds: number): string {
    method _formatBytes (line 516) | private _formatBytes(bytes: number, decimals = 1): string {
    method updateSetting (line 524) | async updateSetting(key: KVStoreKey, value: any): Promise<void> {
    method _syncContainersWithDatabase (line 538) | private async _syncContainersWithDatabase() {
    method calculateDiskUsage (line 575) | private calculateDiskUsage(diskInfo: NomadDiskInfoRaw): NomadDiskInfo[] {

FILE: admin/app/services/system_update_service.ts
  type UpdateStatus (line 7) | interface UpdateStatus {
  class SystemUpdateService (line 14) | class SystemUpdateService {
    method requestUpdate (line 23) | async requestUpdate(): Promise<{ success: boolean; message: string }> {
    method getUpdateStatus (line 58) | getUpdateStatus(): UpdateStatus | null {
    method getUpdateLogs (line 77) | getUpdateLogs(): string {
    method isSidecarAvailable (line 93) | isSidecarAvailable(): boolean {

FILE: admin/app/services/zim_extraction_service.ts
  class ZIMExtractionService (line 9) | class ZIMExtractionService {
    method extractArchiveMetadata (line 11) | private extractArchiveMetadata(archive: Archive): ZIMArchiveMetadata {
    method extractZIMContent (line 42) | async extractZIMContent(filePath: string, opts: ExtractZIMContentOptio...
    method chooseChunkingStrategy (line 164) | private chooseChunkingStrategy(html: string, options = {
    method getCleanedHTMLString (line 178) | private getCleanedHTMLString(buff: Buffer<ArrayBufferLike>): string {
    method extractTextFromHTML (line 189) | private extractTextFromHTML(html: string): string | null {
    method extractStructuredContent (line 203) | private extractStructuredContent(html: string) {
    method hasStructuredHeadings (line 256) | private hasStructuredHeadings(html: string): boolean {
    method isArticleEntry (line 298) | private isArticleEntry(entry: Entry): boolean {

FILE: admin/app/services/zim_service.ts
  constant ZIM_MIME_TYPES (line 30) | const ZIM_MIME_TYPES = ['application/x-zim', 'application/x-openzim', 'a...
  constant WIKIPEDIA_OPTIONS_URL (line 31) | const WIKIPEDIA_OPTIONS_URL = 'https://raw.githubusercontent.com/Crossta...
  class ZimService (line 34) | class ZimService {
    method constructor (line 35) | constructor(private dockerService: DockerService) { }
    method list (line 37) | async list() {
    method listRemote (line 49) | async listRemote({
    method downloadRemote (line 140) | async downloadRemote(url: string): Promise<{ filename: string; jobId?:...
    method listCuratedCategories (line 188) | async listCuratedCategories(): Promise<CategoryWithStatus[]> {
    method downloadCategoryTier (line 193) | async downloadCategoryTier(categorySlug: string, tierSlug: string): Pr...
    method downloadRemoteSuccessCallback (line 252) | async downloadRemoteSuccessCallback(urls: string[], restart = true) {
    method delete (line 329) | async delete(file: string): Promise<void> {
    method getWikipediaOptions (line 363) | async getWikipediaOptions(): Promise<WikipediaOption[]> {
    method getWikipediaSelection (line 380) | async getWikipediaSelection(): Promise<WikipediaSelection | null> {
    method getWikipediaState (line 385) | async getWikipediaState(): Promise<WikipediaState> {
    method selectWikipedia (line 402) | async selectWikipedia(optionId: string): Promise<{ success: boolean; j...
    method onWikipediaDownloadComplete (line 521) | async onWikipediaDownloadComplete(url: string, success: boolean): Prom...

FILE: admin/app/utils/downloads.ts
  function doResumableDownload (line 17) | async function doResumableDownload({
  function doResumableDownloadWithRetry (line 181) | async function doResumableDownloadWithRetry({
  function delay (line 239) | async function delay(ms: number): Promise<void> {

FILE: admin/app/utils/fs.ts
  constant ZIM_STORAGE_PATH (line 7) | const ZIM_STORAGE_PATH = '/storage/zim'
  function listDirectoryContents (line 9) | async function listDirectoryContents(path: string): Promise<FileEntry[]> {
  function listDirectoryContentsRecursive (line 30) | async function listDirectoryContentsRecursive(path: string): Promise<Fil...
  function ensureDirectoryExists (line 49) | async function ensureDirectoryExists(path: string): Promise<void> {
  function getFile (line 65) | async function getFile(
  function getFileStatsIfExists (line 84) | async function getFileStatsIfExists(
  function deleteFileIfExists (line 101) | async function deleteFileIfExists(path: string): Promise<void> {
  function getAllFilesystems (line 111) | function getAllFilesystems(
  function matchesDevice (line 137) | function matchesDevice(fsPath: string, deviceName: string): boolean {
  function determineFileType (line 154) | function determineFileType(filename: string): 'image' | 'pdf' | 'text' |...
  function sanitizeFilename (line 174) | function sanitizeFilename(filename: string): string {

FILE: admin/app/utils/misc.ts
  function formatSpeed (line 1) | function formatSpeed(bytesPerSecond: number): string {
  function toTitleCase (line 7) | function toTitleCase(str: string): string {
  function parseBoolean (line 15) | function parseBoolean(value: any): boolean {

FILE: admin/app/utils/version.ts
  function isNewerVersion (line 7) | function isNewerVersion(version1: string, version2: string, includePreRe...
  function parseMajorVersion (line 45) | function parseMajorVersion(tag: string): number {

FILE: admin/app/validators/common.ts
  function assertNotPrivateUrl (line 14) | function assertNotPrivateUrl(urlString: string): void {

FILE: admin/bin/console.ts
  constant APP_ROOT (line 21) | const APP_ROOT = new URL('../', import.meta.url)

FILE: admin/bin/server.ts
  constant APP_ROOT (line 19) | const APP_ROOT = new URL('../', import.meta.url)

FILE: admin/bin/test.ts
  constant APP_ROOT (line 23) | const APP_ROOT = new URL('../', import.meta.url)

FILE: admin/commands/benchmark/results.ts
  class BenchmarkResults (line 4) | class BenchmarkResults extends BaseCommand {
    method run (line 21) | async run() {

FILE: admin/commands/benchmark/run.ts
  class BenchmarkRun (line 4) | class BenchmarkRun extends BaseCommand {
    method run (line 21) | async run() {

FILE: admin/commands/benchmark/submit.ts
  class BenchmarkSubmit (line 4) | class BenchmarkSubmit extends BaseCommand {
    method run (line 18) | async run() {

FILE: admin/commands/queue/work.ts
  class QueueWork (line 12) | class QueueWork extends BaseCommand {
    method run (line 27) | async run() {
    method loadJobHandlers (line 108) | private async loadJobHandlers(): Promise<[Map<string, any>, Map<string...
    method getConcurrencyForQueue (line 133) | private getConcurrencyForQueue(queueName: string): number {

FILE: admin/config/hash.ts
  type HashersList (line 23) | interface HashersList extends InferHashers<typeof hashConfig> {}

FILE: admin/config/inertia.ts
  type SharedProps (line 36) | interface SharedProps extends InferSharedProps<typeof inertiaConfig> {}

FILE: admin/config/logger.ts
  type LoggersList (line 35) | interface LoggersList extends InferLoggers<typeof loggerConfig> { }

FILE: admin/constants/broadcast.ts
  constant BROADCAST_CHANNELS (line 2) | const BROADCAST_CHANNELS = {

FILE: admin/constants/kv_store.ts
  constant SETTINGS_KEYS (line 3) | const SETTINGS_KEYS: KVStoreKey[] = ['chat.suggestionsEnabled', 'chat.la...

FILE: admin/constants/misc.ts
  constant NOMAD_API_DEFAULT_BASE_URL (line 2) | const NOMAD_API_DEFAULT_BASE_URL = 'https://api.projectnomad.us'

FILE: admin/constants/ollama.ts
  constant FALLBACK_RECOMMENDED_OLLAMA_MODELS (line 6) | const FALLBACK_RECOMMENDED_OLLAMA_MODELS: NomadOllamaModel[] = [
  constant DEFAULT_QUERY_REWRITE_MODEL (line 65) | const DEFAULT_QUERY_REWRITE_MODEL = 'qwen2.5:3b' // default to qwen2.5 f...
  constant RAG_CONTEXT_LIMITS (line 71) | const RAG_CONTEXT_LIMITS: { maxParams: number; maxResults: number; maxTo...
  constant SYSTEM_PROMPTS (line 77) | const SYSTEM_PROMPTS = {

FILE: admin/constants/service_names.ts
  constant SERVICE_NAMES (line 1) | const SERVICE_NAMES = {

FILE: admin/constants/zim_extraction.ts
  constant HTML_SELECTORS_TO_REMOVE (line 2) | const HTML_SELECTORS_TO_REMOVE = [
  constant NON_CONTENT_HEADING_PATTERNS (line 33) | const NON_CONTENT_HEADING_PATTERNS = [
  constant ZIM_BATCH_SIZE (line 48) | const ZIM_BATCH_SIZE = 50

FILE: admin/database/migrations/1751086751801_create_services_table.ts
  method up (line 6) | async up() {
  method down (line 23) | async down() {

FILE: admin/database/migrations/1763499145832_update_services_table.ts
  method up (line 6) | async up() {
  method down (line 13) | async down() {

FILE: admin/database/migrations/1764912210741_create_curated_collections_table.ts
  method up (line 6) | async up() {
  method down (line 19) | async down() {

FILE: admin/database/migrations/1764912270123_create_curated_collection_resources_table.ts
  method up (line 6) | async up() {
  method down (line 20) | async down() {

FILE: admin/database/migrations/1768170944482_update_services_add_installation_statuses_table.ts
  method up (line 6) | async up() {
  method down (line 12) | async down() {

FILE: admin/database/migrations/1768453747522_update_services_add_icon.ts
  method up (line 6) | async up() {
  method down (line 12) | async down() {

FILE: admin/database/migrations/1769097600001_create_benchmark_results_table.ts
  method up (line 6) | async up() {
  method down (line 44) | async down() {

FILE: admin/database/migrations/1769097600002_create_benchmark_settings_table.ts
  method up (line 6) | async up() {
  method down (line 16) | async down() {

FILE: admin/database/migrations/1769300000001_add_powered_by_and_display_order_to_services.ts
  method up (line 6) | async up() {
  method down (line 13) | async down() {

FILE: admin/database/migrations/1769300000002_update_services_friendly_names.ts
  method up (line 6) | async up() {
  method down (line 55) | async down() {

FILE: admin/database/migrations/1769324448000_add_builder_tag_to_benchmark_results.ts
  method up (line 6) | async up() {
  method down (line 12) | async down() {

FILE: admin/database/migrations/1769400000001_create_installed_tiers_table.ts
  method up (line 6) | async up() {
  method down (line 16) | async down() {

FILE: admin/database/migrations/1769400000002_create_kv_store_table.ts
  method up (line 6) | async up() {
  method down (line 16) | async down() {

FILE: admin/database/migrations/1769500000001_create_wikipedia_selection_table.ts
  method up (line 6) | async up() {
  method down (line 18) | async down() {

FILE: admin/database/migrations/1769646771604_create_create_chat_sessions_table.ts
  method up (line 6) | async up() {
  method down (line 16) | async down() {

FILE: admin/database/migrations/1769646798266_create_create_chat_messages_table.ts
  method up (line 6) | async up() {
  method down (line 17) | async down() {

FILE: admin/database/migrations/1769700000001_create_zim_file_metadata_table.ts
  method up (line 6) | async up() {
  method down (line 19) | async down() {

FILE: admin/database/migrations/1770269324176_add_unique_constraint_to_curated_collection_resources_table.ts
  method up (line 6) | async up() {
  method down (line 14) | async down() {

FILE: admin/database/migrations/1770273423670_drop_installed_tiers_table.ts
  method up (line 6) | async up() {
  method down (line 10) | async down() {

FILE: admin/database/migrations/1770849108030_create_create_collection_manifests_table.ts
  method up (line 6) | async up() {
  method down (line 15) | async down() {

FILE: admin/database/migrations/1770849119787_create_create_installed_resources_table.ts
  method up (line 6) | async up() {
  method down (line 22) | async down() {

FILE: admin/database/migrations/1770850092871_create_drop_legacy_curated_tables_table.ts
  method up (line 4) | async up() {
  method down (line 10) | async down() {

FILE: admin/database/migrations/1771000000001_add_update_fields_to_services.ts
  method up (line 6) | async up() {
  method down (line 14) | async down() {

FILE: admin/database/migrations/1771000000002_pin_latest_service_images.ts
  method up (line 6) | async up() {
  method down (line 43) | async down() {

FILE: admin/database/seeders/service_seeder.ts
  class ServiceSeeder (line 7) | class ServiceSeeder extends BaseSeeder {
    method run (line 164) | async run() {

FILE: admin/inertia/app/app.tsx
  method setup (line 37) | setup({ el, App, props }) {

FILE: admin/inertia/components/ActiveDownloads.tsx
  type ActiveDownloadProps (line 8) | interface ActiveDownloadProps {

FILE: admin/inertia/components/ActiveEmbedJobs.tsx
  type ActiveEmbedJobsProps (line 5) | interface ActiveEmbedJobsProps {

FILE: admin/inertia/components/ActiveModelDownloads.tsx
  type ActiveModelDownloadsProps (line 5) | interface ActiveModelDownloadsProps {

FILE: admin/inertia/components/Alert.tsx
  type AlertProps (line 6) | type AlertProps = React.HTMLAttributes<HTMLDivElement> & {
  function Alert (line 18) | function Alert({

FILE: admin/inertia/components/BouncingDots.tsx
  type BouncingDotsProps (line 3) | interface BouncingDotsProps {
  function BouncingDots (line 9) | function BouncingDots({ text, containerClassName, textClassName }: Bounc...

FILE: admin/inertia/components/BuilderTagSelector.tsx
  type BuilderTagSelectorProps (line 12) | interface BuilderTagSelectorProps {
  function BuilderTagSelector (line 18) | function BuilderTagSelector({

FILE: admin/inertia/components/CategoryCard.tsx
  type CategoryCardProps (line 7) | interface CategoryCardProps {

FILE: admin/inertia/components/CuratedCollectionCard.tsx
  type CuratedCollectionCardProps (line 7) | interface CuratedCollectionCardProps {

FILE: admin/inertia/components/DebugInfoModal.tsx
  type DebugInfoModalProps (line 6) | interface DebugInfoModalProps {
  function DebugInfoModal (line 11) | function DebugInfoModal({ open, onClose }: DebugInfoModalProps) {

FILE: admin/inertia/components/DownloadURLModal.tsx
  type DownloadURLModalProps (line 6) | type DownloadURLModalProps = Omit<
  function runPreflightCheck (line 23) | async function runPreflightCheck(downloadUrl: string) {

FILE: admin/inertia/components/DynamicIcon.tsx
  type DynamicIconName (line 4) | type DynamicIconName = keyof typeof TablerIcons
  type DynamicIconProps (line 6) | interface DynamicIconProps {

FILE: admin/inertia/components/Footer.tsx
  function Footer (line 8) | function Footer() {

FILE: admin/inertia/components/HorizontalBarChart.tsx
  type HorizontalBarChartProps (line 3) | interface HorizontalBarChartProps {
  function HorizontalBarChart (line 19) | function HorizontalBarChart({

FILE: admin/inertia/components/InfoTooltip.tsx
  type InfoTooltipProps (line 4) | interface InfoTooltipProps {
  function InfoTooltip (line 9) | function InfoTooltip({ text, className = '' }: InfoTooltipProps) {

FILE: admin/inertia/components/InstallActivityFeed.tsx
  type InstallActivityFeedProps (line 4) | type InstallActivityFeedProps = {

FILE: admin/inertia/components/LoadingSpinner.tsx
  type LoadingSpinnerProps (line 1) | interface LoadingSpinnerProps {

FILE: admin/inertia/components/MarkdocRenderer.tsx
  type MarkdocRendererProps (line 125) | interface MarkdocRendererProps {

FILE: admin/inertia/components/StorageProjectionBar.tsx
  type StorageProjectionBarProps (line 5) | interface StorageProjectionBarProps {
  function StorageProjectionBar (line 11) | function StorageProjectionBar({

FILE: admin/inertia/components/StyledButton.tsx
  type StyledButtonProps (line 6) | interface StyledButtonProps extends React.HTMLAttributes<HTMLButtonEleme...

FILE: admin/inertia/components/StyledModal.tsx
  type StyledModalProps (line 6) | type StyledModalProps = {

FILE: admin/inertia/components/StyledSectionHeader.tsx
  type StyledSectionHeaderProps (line 4) | interface StyledSectionHeaderProps {

FILE: admin/inertia/components/StyledSidebar.tsx
  type SidebarItem (line 11) | type SidebarItem = {
  type StyledSidebarProps (line 19) | interface StyledSidebarProps {

FILE: admin/inertia/components/StyledTable.tsx
  type StyledTableProps (line 6) | type StyledTableProps<T extends { [key: string]: any }> = {
  function StyledTable (line 34) | function StyledTable<T extends { [key: string]: any }>({

FILE: admin/inertia/components/ThemeToggle.tsx
  type ThemeToggleProps (line 4) | interface ThemeToggleProps {
  function ThemeToggle (line 8) | function ThemeToggle({ compact = false }: ThemeToggleProps) {

FILE: admin/inertia/components/TierSelectionModal.tsx
  type TierSelectionModalProps (line 10) | interface TierSelectionModalProps {

FILE: admin/inertia/components/UpdateServiceModal.tsx
  type UpdateServiceModalProps (line 8) | interface UpdateServiceModalProps {
  function UpdateServiceModal (line 17) | function UpdateServiceModal({

FILE: admin/inertia/components/WikipediaSelector.tsx
  type WikipediaSelectorProps (line 8) | interface WikipediaSelectorProps {

FILE: admin/inertia/components/chat/ChatAssistantAvatar.tsx
  function ChatAssistantAvatar (line 3) | function ChatAssistantAvatar() {

FILE: admin/inertia/components/chat/ChatButton.tsx
  type ChatButtonProps (line 3) | interface ChatButtonProps {
  function ChatButton (line 7) | function ChatButton({ onClick }: ChatButtonProps) {

FILE: admin/inertia/components/chat/ChatInterface.tsx
  type ChatInterfaceProps (line 14) | interface ChatInterfaceProps {
  function ChatInterface (line 24) | function ChatInterface({

FILE: admin/inertia/components/chat/ChatMessageBubble.tsx
  type ChatMessageBubbleProps (line 6) | interface ChatMessageBubbleProps {
  function ChatMessageBubble (line 10) | function ChatMessageBubble({ message }: ChatMessageBubbleProps) {

FILE: admin/inertia/components/chat/ChatModal.tsx
  type ChatModalProps (line 6) | interface ChatModalProps {
  function ChatModal (line 11) | function ChatModal({ open, onClose }: ChatModalProps) {

FILE: admin/inertia/components/chat/ChatSidebar.tsx
  type ChatSidebarProps (line 9) | interface ChatSidebarProps {
  function ChatSidebar (line 18) | function ChatSidebar({

FILE: admin/inertia/components/chat/KnowledgeBaseModal.tsx
  type KnowledgeBaseModalProps (line 14) | interface KnowledgeBaseModalProps {
  function sourceToDisplayName (line 19) | function sourceToDisplayName(source: string): string {
  function KnowledgeBaseModal (line 24) | function KnowledgeBaseModal({ aiAssistantName = "AI Assistant", onClose ...

FILE: admin/inertia/components/chat/index.tsx
  type ChatProps (line 15) | interface ChatProps {
  function Chat (line 23) | function Chat({

FILE: admin/inertia/components/file-uploader/index.tsx
  type FileUploaderProps (line 10) | interface FileUploaderProps {
  type FileUploaderRef (line 20) | interface FileUploaderRef {

FILE: admin/inertia/components/inputs/Input.tsx
  type InputProps (line 4) | interface InputProps extends InputHTMLAttributes<HTMLInputElement> {

FILE: admin/inertia/components/inputs/Switch.tsx
  type SwitchProps (line 3) | interface SwitchProps {
  function Switch (line 12) | function Switch({

FILE: admin/inertia/components/layout/BackToHomeHeader.tsx
  type BackToHomeHeaderProps (line 5) | interface BackToHomeHeaderProps {
  function BackToHomeHeader (line 10) | function BackToHomeHeader({ className, children }: BackToHomeHeaderProps) {

FILE: admin/inertia/components/maps/MapComponent.tsx
  function MapComponent (line 7) | function MapComponent() {

FILE: admin/inertia/components/markdoc/Heading.tsx
  function Heading (line 3) | function Heading({

FILE: admin/inertia/components/markdoc/Image.tsx
  function Image (line 1) | function Image({ src, alt, title }: { src: string; alt?: string; title?:...

FILE: admin/inertia/components/markdoc/List.tsx
  function List (line 1) | function List({

FILE: admin/inertia/components/markdoc/ListItem.tsx
  function ListItem (line 1) | function ListItem({ children }: { children: React.ReactNode }) {

FILE: admin/inertia/components/markdoc/Table.tsx
  function Table (line 1) | function Table({ children }: { children: React.ReactNode }) {
  function TableHead (line 11) | function TableHead({ children }: { children: React.ReactNode }) {
  function TableBody (line 15) | function TableBody({ children }: { children: React.ReactNode }) {
  function TableRow (line 19) | function TableRow({ children }: { children: React.ReactNode }) {
  function TableHeader (line 23) | function TableHeader({ children }: { children: React.ReactNode }) {
  function TableCell (line 31) | function TableCell({ children }: { children: React.ReactNode }) {

FILE: admin/inertia/components/systeminfo/CircularGauge.tsx
  type CircularGaugeProps (line 4) | interface CircularGaugeProps {
  function CircularGauge (line 14) | function CircularGauge({

FILE: admin/inertia/components/systeminfo/InfoCard.tsx
  type InfoCardProps (line 3) | interface InfoCardProps {
  function InfoCard (line 13) | function InfoCard({ title, icon, data, variant = 'default' }: InfoCardPr...

FILE: admin/inertia/components/systeminfo/StatusCard.tsx
  type StatusCardProps (line 1) | type StatusCardProps = {
  function StatusCard (line 6) | function StatusCard({ title, value }: StatusCardProps) {

FILE: admin/inertia/context/ModalContext.ts
  type ModalContextProps (line 3) | interface ModalContextProps {

FILE: admin/inertia/context/NotificationContext.ts
  type Notification (line 3) | interface Notification {
  type NotificationContextType (line 9) | interface NotificationContextType {

FILE: admin/inertia/hooks/useDiskDisplayData.ts
  type DiskDisplayItem (line 5) | type DiskDisplayItem = {
  function getAllDiskDisplayItems (line 16) | function getAllDiskDisplayItems(
  function getPrimaryDiskInfo (line 58) | function getPrimaryDiskInfo(

FILE: admin/inertia/hooks/useDownloads.ts
  type useDownloadsProps (line 5) | type useDownloadsProps = {

FILE: admin/inertia/hooks/useOllamaModelDownloads.ts
  type OllamaModelDownload (line 4) | type OllamaModelDownload = {
  function useOllamaModelDownloads (line 10) | function useOllamaModelDownloads() {

FILE: admin/inertia/hooks/useServiceInstallationActivity.ts
  function useServiceInstallationActivity (line 6) | function useServiceInstallationActivity() {

FILE: admin/inertia/hooks/useSystemInfo.ts
  type UseSystemInfoProps (line 5) | type UseSystemInfoProps = Omit<

FILE: admin/inertia/hooks/useSystemSetting.ts
  type UseSystemSettingProps (line 5) | type UseSystemSettingProps = Omit<

FILE: admin/inertia/hooks/useTheme.ts
  type Theme (line 4) | type Theme = 'light' | 'dark'
  constant STORAGE_KEY (line 6) | const STORAGE_KEY = 'nomad:theme'
  function getInitialTheme (line 8) | function getInitialTheme(): Theme {
  function useTheme (line 16) | function useTheme() {

FILE: admin/inertia/layouts/AppLayout.tsx
  function AppLayout (line 11) | function AppLayout({ children }: { children: React.ReactNode }) {

FILE: admin/inertia/layouts/DocsLayout.tsx
  function DocsLayout (line 6) | function DocsLayout({ children }: { children: React.ReactNode }) {

FILE: admin/inertia/layouts/MapsLayout.tsx
  function MapsLayout (line 3) | function MapsLayout({ children }: { children: React.ReactNode }) {

FILE: admin/inertia/layouts/SettingsLayout.tsx
  function SettingsLayout (line 20) | function SettingsLayout({ children }: { children: React.ReactNode }) {

FILE: admin/inertia/lib/api.ts
  class API (line 15) | class API {
    method constructor (line 18) | constructor() {
    method affectService (line 27) | async affectService(service_name: string, action: 'start' | 'stop' | '...
    method checkLatestVersion (line 37) | async checkLatestVersion(force: boolean = false) {
    method deleteModel (line 46) | async deleteModel(model: string): Promise<{ success: boolean; message:...
    method downloadBaseMapAssets (line 53) | async downloadBaseMapAssets() {
    method downloadMapCollection (line 60) | async downloadMapCollection(slug: string): Promise<{
    method downloadModel (line 71) | async downloadModel(model: string): Promise<{ success: boolean; messag...
    method downloadCategoryTier (line 78) | async downloadCategoryTier(categorySlug: string, tierSlug: string): Pr...
    method downloadRemoteMapRegion (line 90) | async downloadRemoteMapRegion(url: string) {
    method downloadRemoteMapRegionPreflight (line 100) | async downloadRemoteMapRegionPreflight(url: string) {
    method downloadRemoteZimFile (line 109) | async downloadRemoteZimFile(
    method fetchLatestMapCollections (line 122) | async fetchLatestMapCollections(): Promise<{ success: boolean } | unde...
    method checkForContentUpdates (line 131) | async checkForContentUpdates() {
    method applyContentUpdate (line 138) | async applyContentUpdate(update: ResourceUpdateInfo) {
    method applyAllContentUpdates (line 148) | async applyAllContentUpdates(updates: ResourceUpdateInfo[]) {
    method refreshManifests (line 157) | async refreshManifests(): Promise<{ success: boolean; changed: Record<...
    method checkServiceUpdates (line 166) | async checkServiceUpdates() {
    method getAvailableVersions (line 175) | async getAvailableVersions(serviceName: string) {
    method updateService (line 184) | async updateService(serviceName: string, targetVersion: string) {
    method forceReinstallService (line 194) | async forceReinstallService(service_name: string) {
    method getChatSuggestions (line 204) | async getChatSuggestions(signal?: AbortSignal) {
    method getDebugInfo (line 214) | async getDebugInfo() {
    method getInternetStatus (line 221) | async getInternetStatus() {
    method getInstalledModels (line 228) | async getInstalledModels() {
    method getAvailableModels (line 235) | async getAvailableModels(params: { query?: string; recommendedOnly?: b...
    method sendChatMessage (line 247) | async sendChatMessage(chatRequest: OllamaChatRequest) {
    method streamChatMessage (line 254) | async streamChatMessage(
    method getBenchmarkResults (line 305) | async getBenchmarkResults() {
    method getLatestBenchmarkResult (line 312) | async getLatestBenchmarkResult() {
    method getChatSessions (line 319) | async getChatSessions() {
    method getChatSession (line 334) | async getChatSession(sessionId: string) {
    method createChatSession (line 352) | async createChatSession(title: string, model?: string) {
    method updateChatSession (line 364) | async updateChatSession(sessionId: string, data: { title?: string; mod...
    method deleteChatSession (line 376) | async deleteChatSession(sessionId: string) {
    method deleteAllChatSessions (line 382) | async deleteAllChatSessions() {
    method addChatMessage (line 391) | async addChatMessage(sessionId: string, role: 'system' | 'user' | 'ass...
    method getActiveEmbedJobs (line 403) | async getActiveEmbedJobs(): Promise<EmbedJobWithProgress[] | undefined> {
    method getStoredRAGFiles (line 410) | async getStoredRAGFiles() {
    method deleteRAGFile (line 417) | async deleteRAGFile(source: string) {
    method getSystemInfo (line 424) | async getSystemInfo() {
    method getSystemServices (line 431) | async getSystemServices() {
    method getSystemUpdateStatus (line 438) | async getSystemUpdateStatus() {
    method getSystemUpdateLogs (line 445) | async getSystemUpdateLogs() {
    method healthCheck (line 452) | async healthCheck() {
    method installService (line 461) | async installService(service_name: string) {
    method listCuratedMapCollections (line 471) | async listCuratedMapCollections() {
    method listCuratedCategories (line 480) | async listCuratedCategories() {
    method listDocs (line 487) | async listDocs() {
    method listMapRegionFiles (line 494) | async listMapRegionFiles() {
    method listRemoteZimFiles (line 501) | async listRemoteZimFiles({
    method listZimFiles (line 521) | async listZimFiles() {
    method listDownloadJobs (line 527) | async listDownloadJobs(filetype?: string): Promise<DownloadJobWithProg...
    method removeDownloadJob (line 535) | async removeDownloadJob(jobId: string): Promise<void> {
    method runBenchmark (line 541) | async runBenchmark(type: BenchmarkType, sync: boolean = false) {
    method startSystemUpdate (line 551) | async startSystemUpdate() {
    method submitBenchmark (line 560) | async submitBenchmark(benchmark_id: string, anonymous: boolean) {
    method subscribeToReleaseNotes (line 577) | async subscribeToReleaseNotes(email: string) {
    method syncRAGStorage (line 587) | async syncRAGStorage() {
    method getWikipediaState (line 601) | async getWikipediaState(): Promise<WikipediaState | undefined> {
    method selectWikipedia (line 608) | async selectWikipedia(
    method updateBuilderTag (line 621) | async updateBuilderTag(benchmark_id: string, builder_tag: string) {
    method uploadDocument (line 631) | async uploadDocument(file: File) {
    method getSetting (line 648) | async getSetting(key: string) {
    method updateSetting (line 658) | async updateSetting(key: string, value: any) {

FILE: admin/inertia/lib/builderTagWords.ts
  constant ADJECTIVES (line 4) | const ADJECTIVES = [
  constant NOUNS (line 57) | const NOUNS = [
  type Adjective (line 110) | type Adjective = (typeof ADJECTIVES)[number]
  type Noun (line 111) | type Noun = (typeof NOUNS)[number]
  function generateRandomNumber (line 113) | function generateRandomNumber(): string {
  function generateRandomBuilderTag (line 117) | function generateRandomBuilderTag(): string {
  function parseBuilderTag (line 124) | function parseBuilderTag(tag: string): {
  function buildBuilderTag (line 143) | function buildBuilderTag(adjective: string, noun: string, number: string...

FILE: admin/inertia/lib/classNames.ts
  function classNames (line 2) | function classNames(...classes: (string | undefined)[]): string {

FILE: admin/inertia/lib/collections.ts
  function resolveTierResources (line 7) | function resolveTierResources(tier: SpecTier, allTiers: SpecTier[]): Spe...
  function resolveTierResourcesInner (line 12) | function resolveTierResourcesInner(

FILE: admin/inertia/lib/navigation.ts
  function getServiceLink (line 3) | function getServiceLink(ui_location: string): string {

FILE: admin/inertia/lib/util.ts
  function setGlobalNotificationCallback (line 6) | function setGlobalNotificationCallback(callback: (notification: Notifica...
  function capitalizeFirstLetter (line 10) | function capitalizeFirstLetter(str?: string | null): string {
  function formatBytes (line 15) | function formatBytes(bytes: number, decimals = 2): string {
  function generateRandomString (line 24) | function generateRandomString(length: number): string {
  function generateUUID (line 33) | function generateUUID(): string {
  function catchInternal (line 73) | function catchInternal<Fn extends (...args: any[]) => any>(fn: Fn): (......

FILE: admin/inertia/pages/about.tsx
  function About (line 3) | function About() {

FILE: admin/inertia/pages/chat.tsx
  function Chat (line 4) | function Chat(props: { settings: { chatSuggestionsEnabled: boolean } }) {

FILE: admin/inertia/pages/docs/show.tsx
  function Show (line 5) | function Show({ content }: { content: any; }) {

FILE: admin/inertia/pages/easy-setup/complete.tsx
  function EasySetupWizardComplete (line 11) | function EasySetupWizardComplete() {

FILE: admin/inertia/pages/easy-setup/index.tsx
  type Capability (line 26) | interface Capability {
  function buildCoreCapabilities (line 36) | function buildCoreCapabilities(aiAssistantName: string): Capability[] {
  constant ADDITIONAL_TOOLS (line 84) | const ADDITIONAL_TOOLS: Capability[] = [
  type WizardStep (line 109) | type WizardStep = 1 | 2 | 3 | 4
  constant CURATED_MAP_COLLECTIONS_KEY (line 111) | const CURATED_MAP_COLLECTIONS_KEY = 'curated-map-collections'
  constant CURATED_CATEGORIES_KEY (line 112) | const CURATED_CATEGORIES_KEY = 'curated-categories'
  constant WIKIPEDIA_STATE_KEY (line 113) | const WIKIPEDIA_STATE_KEY = 'wikipedia-state'
  function EasySetupWizard (line 115) | function EasySetupWizard(props: { system: { services: ServiceSlim[] } }) {

FILE: admin/inertia/pages/errors/not_found.tsx
  function NotFound (line 1) | function NotFound() {

FILE: admin/inertia/pages/errors/server_error.tsx
  function ServerError (line 1) | function ServerError(props: { error: any }) {

FILE: admin/inertia/pages/home.tsx
  constant MAPS_ITEM (line 20) | const MAPS_ITEM = {
  constant SYSTEM_ITEMS (line 32) | const SYSTEM_ITEMS = [
  type DashboardItem (line 76) | interface DashboardItem {
  function Home (line 87) | function Home(props: {

FILE: admin/inertia/pages/maps.tsx
  function Maps (line 9) | function Maps(props: {

FILE: admin/inertia/pages/settings/apps.tsx
  function extractTag (line 21) | function extractTag(containerImage: string): string {
  function SettingsPage (line 27) | function SettingsPage(props: { system: { services: ServiceSlim[] } }) {

FILE: admin/inertia/pages/settings/benchmark.tsx
  type BenchmarkProgressWithID (line 28) | type BenchmarkProgressWithID = BenchmarkProgress & { benchmark_id: string }
  function BenchmarkPage (line 30) | function BenchmarkPage(props: {

FILE: admin/inertia/pages/settings/legal.tsx
  function LegalPage (line 4) | function LegalPage() {

FILE: admin/inertia/pages/settings/maps.tsx
  constant CURATED_COLLECTIONS_KEY (line 20) | const CURATED_COLLECTIONS_KEY = 'curated-map-collections'
  function MapsManager (line 22) | function MapsManager(props: {

FILE: admin/inertia/pages/settings/models.tsx
  function ModelsPage (line 24) | function ModelsPage(props: {

FILE: admin/inertia/pages/settings/support.tsx
  function SupportPage (line 5) | function SupportPage() {

FILE: admin/inertia/pages/settings/system.tsx
  function SettingsPage (line 19) | function SettingsPage(props: {

FILE: admin/inertia/pages/settings/update.tsx
  type Props (line 19) | type Props = {
  function ContentUpdatesSection (line 26) | function ContentUpdatesSection() {
  function SystemUpdatePage (line 226) | function SystemUpdatePage(props: { system: Props }) {

FILE: admin/inertia/pages/settings/zim/index.tsx
  function ZimPage (line 14) | function ZimPage() {

FILE: admin/inertia/pages/settings/zim/remote-explorer.tsx
  constant CURATED_CATEGORIES_KEY (line 35) | const CURATED_CATEGORIES_KEY = 'curated-categories'
  constant WIKIPEDIA_STATE_KEY (line 36) | const WIKIPEDIA_STATE_KEY = 'wikipedia-state'
  function ZimRemoteExplorer (line 38) | function ZimRemoteExplorer() {

FILE: admin/inertia/providers/ModalProvider.tsx
  type ModalsProviderProps (line 4) | interface ModalsProviderProps {

FILE: admin/inertia/providers/ThemeProvider.tsx
  type ThemeContextType (line 4) | interface ThemeContextType {
  function ThemeProvider (line 16) | function ThemeProvider({ children }: { children: React.ReactNode }) {
  function useThemeContext (line 25) | function useThemeContext() {

FILE: admin/providers/map_static_provider.ts
  class MapStaticProvider (line 16) | class MapStaticProvider {
    method constructor (line 17) | constructor(protected app: ApplicationService) {}
    method register (line 18) | register() {

FILE: admin/types/benchmark.ts
  type BenchmarkType (line 4) | type BenchmarkType = 'full' | 'system' | 'ai'
  type BenchmarkStatus (line 7) | type BenchmarkStatus =
  type DiskType (line 22) | type DiskType = 'ssd' | 'hdd' | 'nvme' | 'unknown'
  type HardwareInfo (line 24) | type HardwareInfo = Pick<
  type SystemScores (line 30) | type SystemScores = Pick<
  type AIScores (line 35) | type AIScores = Pick<
  type BenchmarkResultSlim (line 41) | type BenchmarkResultSlim = Pick<
  type BenchmarkSettingKey (line 56) | type BenchmarkSettingKey =
  type BenchmarkSettings (line 61) | type BenchmarkSettings = {
  type BenchmarkProgress (line 68) | type BenchmarkProgress = {
  type RunBenchmarkParams (line 77) | type RunBenchmarkParams = {
  type SubmitBenchmarkParams (line 81) | type SubmitBenchmarkParams = {
  type RunBenchmarkResponse (line 86) | type RunBenchmarkResponse = {
  type BenchmarkResultsResponse (line 93) | type BenchmarkResultsResponse = {
  type SubmitBenchmarkResponse (line 98) | type SubmitBenchmarkResponse = {
  type UpdateBuilderTagResponse (line 107) | type UpdateBuilderTagResponse = {
  type RepositorySubmission (line 116) | type RepositorySubmission = Pick<
  type RepositorySubmitResponse (line 138) | type RepositorySubmitResponse = {
  type RepositoryStats (line 144) | type RepositoryStats = {
  type LeaderboardEntry (line 158) | type LeaderboardEntry = Pick<BenchmarkResult, 'cpu_model' | 'gpu_model' ...
  type ComparisonResponse (line 163) | type ComparisonResponse = {
  type ScoreWeights (line 170) | type ScoreWeights = {
  constant DEFAULT_SCORE_WEIGHTS (line 180) | const DEFAULT_SCORE_WEIGHTS: ScoreWeights = {
  type RunBenchmarkJobParams (line 190) | type RunBenchmarkJobParams = {
  type SysbenchCpuResult (line 197) | type SysbenchCpuResult = {
  type SysbenchMemoryResult (line 203) | type SysbenchMemoryResult = {
  type SysbenchDiskResult (line 209) | type SysbenchDiskResult = {

FILE: admin/types/chat.ts
  type ChatMessage (line 1) | interface ChatMessage {
  type ChatSession (line 12) | interface ChatSession {

FILE: admin/types/collections.ts
  type SpecResource (line 1) | type SpecResource = {
  type SpecTier (line 10) | type SpecTier = {
  type SpecCategory (line 19) | type SpecCategory = {
  type SpecCollection (line 28) | type SpecCollection = {
  type ZimCategoriesSpec (line 37) | type ZimCategoriesSpec = {
  type MapsSpec (line 42) | type MapsSpec = {
  type WikipediaOption (line 47) | type WikipediaOption = {
  type WikipediaSpec (line 56) | type WikipediaSpec = {
  type ManifestType (line 61) | type ManifestType = 'zim_categories' | 'maps' | 'wikipedia'
  type ResourceStatus (line 63) | type ResourceStatus = 'installed' | 'not_installed' | 'update_available'
  type CategoryWithStatus (line 65) | type CategoryWithStatus = SpecCategory & {
  type CollectionWithStatus (line 69) | type CollectionWithStatus = SpecCollection & {
  type ResourceUpdateCheckRequest (line 75) | type ResourceUpdateCheckRequest = {
  type ResourceUpdateInfo (line 83) | type ResourceUpdateInfo = {
  type ContentUpdateCheckResult (line 91) | type ContentUpdateCheckResult = {

FILE: admin/types/docker.ts
  type DockerComposeServiceConfig (line 2) | type DockerComposeServiceConfig = {

FILE: admin/types/downloads.ts
  type DoResumableDownloadParams (line 1) | type DoResumableDownloadParams = {
  type DoResumableDownloadWithRetryParams (line 12) | type DoResumableDownloadWithRetryParams = DoResumableDownloadParams & {
  type DoResumableDownloadProgress (line 18) | type DoResumableDownloadProgress = {
  type RunDownloadJobParams (line 26) | type RunDownloadJobParams = Omit<
  type DownloadJobWithProgress (line 38) | type DownloadJobWithProgress = {
  type WikipediaOption (line 49) | type WikipediaOption = {
  type WikipediaOptionsFile (line 57) | type WikipediaOptionsFile = {
  type WikipediaCurrentSelection (line 61) | type WikipediaCurrentSelection = {
  type WikipediaState (line 68) | type WikipediaState = {

FILE: admin/types/files.ts
  type FileEntry (line 3) | type FileEntry =
  type DownloadProgress (line 15) | type DownloadProgress = {
  type DownloadOptions (line 23) | type DownloadOptions = {
  type DownloadRemoteSuccessCallback (line 32) | type DownloadRemoteSuccessCallback = (urls: string[], restart: boolean) ...

FILE: admin/types/kv_store.ts
  constant KV_STORE_SCHEMA (line 2) | const KV_STORE_SCHEMA = {
  type KVTagToType (line 15) | type KVTagToType<T extends string> = T extends 'boolean' ? boolean : string
  type KVStoreKey (line 17) | type KVStoreKey = keyof typeof KV_STORE_SCHEMA
  type KVStoreValue (line 18) | type KVStoreValue<K extends KVStoreKey> = KVTagToType<(typeof KV_STORE_S...

FILE: admin/types/maps.ts
  type BaseStylesFile (line 1) | type BaseStylesFile = {
  type MapSource (line 11) | type MapSource = {
  type MapLayer (line 17) | type MapLayer = {

FILE: admin/types/ollama.ts
  type NomadOllamaModel (line 1) | type NomadOllamaModel = {
  type NomadOllamaModelTag (line 11) | type NomadOllamaModelTag = {
  type NomadOllamaModelAPIResponse (line 20) | type NomadOllamaModelAPIResponse = {
  type OllamaChatMessage (line 26) | type OllamaChatMessage = {
  type OllamaChatRequest (line 31) | type OllamaChatRequest = {
  type OllamaChatResponse (line 38) | type OllamaChatResponse = {

FILE: admin/types/rag.ts
  type EmbedJobWithProgress (line 1) | type EmbedJobWithProgress = {
  type ProcessAndEmbedFileResponse (line 9) | type ProcessAndEmbedFileResponse = {
  type ProcessZIMFileResponse (line 17) | type ProcessZIMFileResponse = ProcessAndEmbedFileResponse
  type RAGResult (line 19) | type RAGResult = {
  type RerankedRAGResult (line 34) | type RerankedRAGResult = Omit<RAGResult, 'keywords'> & {

FILE: admin/types/services.ts
  type ServiceSlim (line 3) | type ServiceSlim = Pick<

FILE: admin/types/system.ts
  type GpuHealthStatus (line 3) | type GpuHealthStatus = {
  type SystemInformationResponse (line 9) | type SystemInformationResponse = {
  type UsePageProps (line 22) | type UsePageProps = {
  type LSBlockDevice (line 27) | type LSBlockDevice = {
  type NomadDiskInfoRaw (line 39) | type NomadDiskInfoRaw = {
  type NomadDiskInfo (line 53) | type NomadDiskInfo = {
  type SystemUpdateStatus (line 72) | type SystemUpdateStatus = {
  type CheckLatestVersionResult (line 80) | type CheckLatestVersionResult = {

FILE: admin/types/zim.ts
  type ZimFileWithMetadata (line 3) | type ZimFileWithMetadata = FileEntry & {
  type ListZimFilesResponse (line 10) | type ListZimFilesResponse = {
  type ListRemoteZimFilesResponse (line 15) | type ListRemoteZimFilesResponse = {
  type RawRemoteZimFileEntry (line 21) | type RawRemoteZimFileEntry = {
  type RawListRemoteZimFilesResponse (line 43) | type RawListRemoteZimFilesResponse = {
  type RemoteZimFileEntry (line 57) | type RemoteZimFileEntry = {
  type ExtractZIMContentOptions (line 68) | type ExtractZIMContentOptions = {
  type ExtractZIMChunkingStrategy (line 77) | type ExtractZIMChunkingStrategy = 'structured' | 'simple'
  type ZIMArchiveMetadata (line 79) | type ZIMArchiveMetadata = {
  type ZIMContentChunk (line 88) | type ZIMContentChunk = {

FILE: admin/util/files.ts
  function chmodRecursive (line 4) | async function chmodRecursive(
  function chownRecursive (line 30) | async function chownRecursive(targetPath: string, uid: number, gid: numb...

FILE: admin/util/zim.ts
  function isRawListRemoteZimFilesResponse (line 3) | function isRawListRemoteZimFilesResponse(obj: any): obj is RawListRemote...
  function isRawRemoteZimFileEntry (line 21) | function isRawRemoteZimFileEntry(obj: any): obj is RawRemoteZimFileEntry {
Condensed preview — 294 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (1,234K chars).
[
  {
    "path": ".dockerignore",
    "chars": 80,
    "preview": ".env\n.env.*\n.git\nnode_modules\n*.log\nadmin/storage\nadmin/node_modules\nadmin/build"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.yml",
    "chars": 6180,
    "preview": "name: Bug Report\ndescription: Report a bug or issue with Project N.O.M.A.D.\ntitle: \"[Bug]: \"\nlabels: [\"bug\", \"needs-tria"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/config.yml",
    "chars": 768,
    "preview": "blank_issues_enabled: false\ncontact_links:\n  - name: 💬 Discord Community\n    url: https://discord.com/invite/crosstalkso"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature_request.yml",
    "chars": 6097,
    "preview": "name: Feature Request\ndescription: Suggest a new feature or enhancement for Project N.O.M.A.D.\ntitle: \"[Feature]: \"\nlabe"
  },
  {
    "path": ".github/dependabot.yaml",
    "chars": 135,
    "preview": "version: 2\nupdates:\n  - package-ecosystem: \"npm\"\n    directory: \"/admin\"\n    schedule:\n      interval: \"weekly\"\n    targ"
  },
  {
    "path": ".github/scripts/finalize-release-notes.sh",
    "chars": 4082,
    "preview": "#!/usr/bin/env bash\n#\n# finalize-release-notes.sh\n#\n# Stamps the \"## Unreleased\" section in a release-notes file with a "
  },
  {
    "path": ".github/workflows/build-disk-collector.yml",
    "chars": 1745,
    "preview": "name: Build Disk Collector Image\n\non:\n  workflow_dispatch:\n    inputs:\n      version:\n        description: 'Semantic ver"
  },
  {
    "path": ".github/workflows/build-primary-image.yml",
    "chars": 1857,
    "preview": "name: Build Primary Docker Image\n\non:\n  workflow_dispatch:\n    inputs:\n      version:\n        description: 'Semantic ver"
  },
  {
    "path": ".github/workflows/build-sidecar-updater.yml",
    "chars": 1743,
    "preview": "name: Build Sidecar Updater Image\n\non:\n  workflow_dispatch:\n    inputs:\n      version:\n        description: 'Semantic ve"
  },
  {
    "path": ".github/workflows/release.yml",
    "chars": 3837,
    "preview": "name: Release SemVer\n\non: workflow_dispatch\n\njobs:\n  check_authorization:\n    name: Check authorization to release new v"
  },
  {
    "path": ".github/workflows/validate-collection-urls.yml",
    "chars": 1708,
    "preview": "name: Validate Collection URLs\n\non:\n  push:\n    paths:\n      - 'collections/**.json'\n  pull_request:\n    paths:\n      - "
  },
  {
    "path": ".gitignore",
    "chars": 570,
    "preview": "# Logs\nlogs\n*.log\n\n# Diagnostic reports (https://nodejs.org/api/report.html)\nreport.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json\n\n# "
  },
  {
    "path": ".releaserc.json",
    "chars": 425,
    "preview": "{\n  \"branches\": [\n    \"main\",\n    { \"name\": \"rc\", \"prerelease\": \"rc\" }\n  ],\n  \"plugins\": [\n    \"@semantic-release/commit"
  },
  {
    "path": "CODE_OF_CONDUCT.md",
    "chars": 5202,
    "preview": "# Contributor Covenant Code of Conduct\n\n## Our Pledge\n\nWe as members, contributors, and leaders pledge to make participa"
  },
  {
    "path": "CONTRIBUTING.md",
    "chars": 7548,
    "preview": "# Contributing to Project N.O.M.A.D.\n\nThank you for your interest in contributing to Project N.O.M.A.D.! Community contr"
  },
  {
    "path": "Dockerfile",
    "chars": 1899,
    "preview": "FROM node:22-slim AS base\n\n# Install bash & curl for entrypoint script compatibility, graphicsmagick for pdf2pic, and vi"
  },
  {
    "path": "LICENSE",
    "chars": 10779,
    "preview": "                                 Apache License\n                           Version 2.0, January 2004\n                   "
  },
  {
    "path": "README.md",
    "chars": 13268,
    "preview": "<div align=\"center\">\n<img src=\"https://raw.githubusercontent.com/Crosstalk-Solutions/project-nomad/refs/heads/main/admin"
  },
  {
    "path": "admin/.editorconfig",
    "chars": 337,
    "preview": "# http://editorconfig.org\n\n[*]\nindent_style = space\nindent_size = 2\nend_of_line = lf\ncharset = utf-8\ntrim_trailing_white"
  },
  {
    "path": "admin/ace.js",
    "chars": 817,
    "preview": "/*\n|--------------------------------------------------------------------------\n| JavaScript entrypoint for running ace c"
  },
  {
    "path": "admin/adonisrc.ts",
    "chars": 3615,
    "preview": "import { defineConfig } from '@adonisjs/core/app'\n\nexport default defineConfig({\n  /*\n  |-------------------------------"
  },
  {
    "path": "admin/app/controllers/benchmark_controller.ts",
    "chars": 7743,
    "preview": "import { inject } from '@adonisjs/core'\nimport type { HttpContext } from '@adonisjs/core/http'\nimport { BenchmarkService"
  },
  {
    "path": "admin/app/controllers/chats_controller.ts",
    "chars": 3845,
    "preview": "import { inject } from '@adonisjs/core'\nimport type { HttpContext } from '@adonisjs/core/http'\nimport { ChatService } fr"
  },
  {
    "path": "admin/app/controllers/collection_updates_controller.ts",
    "chars": 1053,
    "preview": "import { CollectionUpdateService } from '#services/collection_update_service'\nimport {\n  assertNotPrivateUrl,\n  applyCon"
  },
  {
    "path": "admin/app/controllers/docs_controller.ts",
    "chars": 564,
    "preview": "import { DocsService } from '#services/docs_service'\nimport { inject } from '@adonisjs/core'\nimport type { HttpContext }"
  },
  {
    "path": "admin/app/controllers/downloads_controller.ts",
    "chars": 755,
    "preview": "import type { HttpContext } from '@adonisjs/core/http'\nimport { DownloadService } from '#services/download_service'\nimpo"
  },
  {
    "path": "admin/app/controllers/easy_setup_controller.ts",
    "chars": 1400,
    "preview": "import { SystemService } from '#services/system_service'\nimport { ZimService } from '#services/zim_service'\nimport { Col"
  },
  {
    "path": "admin/app/controllers/home_controller.ts",
    "chars": 660,
    "preview": "import { SystemService } from '#services/system_service'\nimport { inject } from '@adonisjs/core'\nimport type { HttpConte"
  },
  {
    "path": "admin/app/controllers/maps_controller.ts",
    "chars": 3536,
    "preview": "import { MapService } from '#services/map_service'\nimport {\n  assertNotPrivateUrl,\n  downloadCollectionValidator,\n  file"
  },
  {
    "path": "admin/app/controllers/ollama_controller.ts",
    "chars": 11370,
    "preview": "import { ChatService } from '#services/chat_service'\nimport { OllamaService } from '#services/ollama_service'\nimport { R"
  },
  {
    "path": "admin/app/controllers/rag_controller.ts",
    "chars": 2964,
    "preview": "import { RagService } from '#services/rag_service'\nimport { EmbedFileJob } from '#jobs/embed_file_job'\nimport { inject }"
  },
  {
    "path": "admin/app/controllers/settings_controller.ts",
    "chars": 4296,
    "preview": "import KVStore from '#models/kv_store';\nimport { BenchmarkService } from '#services/benchmark_service';\nimport { MapServ"
  },
  {
    "path": "admin/app/controllers/system_controller.ts",
    "chars": 6681,
    "preview": "import { DockerService } from '#services/docker_service';\nimport { SystemService } from '#services/system_service'\nimpor"
  },
  {
    "path": "admin/app/controllers/zim_controller.ts",
    "chars": 2617,
    "preview": "import { ZimService } from '#services/zim_service'\nimport {\n  assertNotPrivateUrl,\n  downloadCategoryTierValidator,\n  fi"
  },
  {
    "path": "admin/app/exceptions/handler.ts",
    "chars": 1560,
    "preview": "import app from '@adonisjs/core/services/app'\nimport { HttpContext, ExceptionHandler } from '@adonisjs/core/http'\nimport"
  },
  {
    "path": "admin/app/exceptions/internal_server_error_exception.ts",
    "chars": 190,
    "preview": "import { Exception } from '@adonisjs/core/exceptions'\n\nexport default class InternalServerErrorException extends Excepti"
  },
  {
    "path": "admin/app/jobs/check_service_updates_job.ts",
    "chars": 3932,
    "preview": "import { Job } from 'bullmq'\nimport { QueueService } from '#services/queue_service'\nimport { DockerService } from '#serv"
  },
  {
    "path": "admin/app/jobs/check_update_job.ts",
    "chars": 2133,
    "preview": "import { Job } from 'bullmq'\nimport { QueueService } from '#services/queue_service'\nimport { DockerService } from '#serv"
  },
  {
    "path": "admin/app/jobs/download_model_job.ts",
    "chars": 3585,
    "preview": "import { Job } from 'bullmq'\nimport { QueueService } from '#services/queue_service'\nimport { createHash } from 'crypto'\n"
  },
  {
    "path": "admin/app/jobs/embed_file_job.ts",
    "chars": 8695,
    "preview": "import { Job, UnrecoverableError } from 'bullmq'\nimport { QueueService } from '#services/queue_service'\nimport { EmbedJo"
  },
  {
    "path": "admin/app/jobs/run_benchmark_job.ts",
    "chars": 3087,
    "preview": "import { Job } from 'bullmq'\nimport { QueueService } from '#services/queue_service'\nimport { BenchmarkService } from '#s"
  },
  {
    "path": "admin/app/jobs/run_download_job.ts",
    "chars": 5284,
    "preview": "import { Job } from 'bullmq'\nimport { RunDownloadJobParams } from '../../types/downloads.js'\nimport { QueueService } fro"
  },
  {
    "path": "admin/app/middleware/container_bindings_middleware.ts",
    "chars": 614,
    "preview": "import { Logger } from '@adonisjs/core/logger'\nimport { HttpContext } from '@adonisjs/core/http'\nimport { NextFn } from "
  },
  {
    "path": "admin/app/middleware/force_json_response_middleware.ts",
    "chars": 542,
    "preview": "import type { HttpContext } from '@adonisjs/core/http'\nimport type { NextFn } from '@adonisjs/core/types/http'\n\n/**\n * U"
  },
  {
    "path": "admin/app/middleware/maps_static_middleware.ts",
    "chars": 632,
    "preview": "import type { HttpContext } from '@adonisjs/core/http'\nimport type { NextFn } from '@adonisjs/core/types/http'\nimport St"
  },
  {
    "path": "admin/app/models/benchmark_result.ts",
    "chars": 1740,
    "preview": "import { DateTime } from 'luxon'\nimport { BaseModel, column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'\nimport"
  },
  {
    "path": "admin/app/models/benchmark_setting.ts",
    "chars": 1735,
    "preview": "import { DateTime } from 'luxon'\nimport { BaseModel, column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'\nimport"
  },
  {
    "path": "admin/app/models/chat_message.ts",
    "chars": 833,
    "preview": "import { DateTime } from 'luxon'\nimport { BaseModel, column, belongsTo, SnakeCaseNamingStrategy } from '@adonisjs/lucid/"
  },
  {
    "path": "admin/app/models/chat_session.ts",
    "chars": 775,
    "preview": "import { DateTime } from 'luxon'\nimport { BaseModel, column, hasMany, SnakeCaseNamingStrategy } from '@adonisjs/lucid/or"
  },
  {
    "path": "admin/app/models/collection_manifest.ts",
    "chars": 638,
    "preview": "import { DateTime } from 'luxon'\nimport { BaseModel, column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'\nimport"
  },
  {
    "path": "admin/app/models/installed_resource.ts",
    "chars": 657,
    "preview": "import { DateTime } from 'luxon'\nimport { BaseModel, column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'\n\nexpor"
  },
  {
    "path": "admin/app/models/kv_store.ts",
    "chars": 2058,
    "preview": "import { DateTime } from 'luxon'\nimport { BaseModel, column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'\nimport"
  },
  {
    "path": "admin/app/models/service.ts",
    "chars": 2096,
    "preview": "import { BaseModel, belongsTo, column, hasMany, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'\nimport type { Belon"
  },
  {
    "path": "admin/app/models/wikipedia_selection.ts",
    "chars": 659,
    "preview": "import { DateTime } from 'luxon'\nimport { BaseModel, column, SnakeCaseNamingStrategy } from '@adonisjs/lucid/orm'\n\nexpor"
  },
  {
    "path": "admin/app/services/benchmark_service.ts",
    "chars": 29391,
    "preview": "import { inject } from '@adonisjs/core'\nimport logger from '@adonisjs/core/services/logger'\nimport transmit from '@adoni"
  },
  {
    "path": "admin/app/services/chat_service.ts",
    "chars": 8812,
    "preview": "import ChatSession from '#models/chat_session'\nimport ChatMessage from '#models/chat_message'\nimport logger from '@adoni"
  },
  {
    "path": "admin/app/services/collection_manifest_service.ts",
    "chars": 10566,
    "preview": "import axios from 'axios'\nimport vine from '@vinejs/vine'\nimport logger from '@adonisjs/core/services/logger'\nimport { D"
  },
  {
    "path": "admin/app/services/collection_update_service.ts",
    "chars": 5142,
    "preview": "import logger from '@adonisjs/core/services/logger'\nimport env from '#start/env'\nimport axios from 'axios'\nimport Instal"
  },
  {
    "path": "admin/app/services/container_registry_service.ts",
    "chars": 16254,
    "preview": "import logger from '@adonisjs/core/services/logger'\nimport { isNewerVersion, parseMajorVersion } from '../utils/version."
  },
  {
    "path": "admin/app/services/docker_service.ts",
    "chars": 40453,
    "preview": "import Service from '#models/service'\nimport Docker from 'dockerode'\nimport logger from '@adonisjs/core/services/logger'"
  },
  {
    "path": "admin/app/services/docs_service.ts",
    "chars": 5810,
    "preview": "import Markdoc from '@markdoc/markdoc'\nimport { streamToString } from '../../util/docs.js'\nimport { getFile, getFileStat"
  },
  {
    "path": "admin/app/services/download_service.ts",
    "chars": 2557,
    "preview": "import { inject } from '@adonisjs/core'\nimport { QueueService } from './queue_service.js'\nimport { RunDownloadJob } from"
  },
  {
    "path": "admin/app/services/map_service.ts",
    "chars": 15185,
    "preview": "import { BaseStylesFile, MapLayer } from '../../types/maps.js'\nimport {\n  DownloadRemoteSuccessCallback,\n  FileEntry,\n} "
  },
  {
    "path": "admin/app/services/ollama_service.ts",
    "chars": 13566,
    "preview": "import { inject } from '@adonisjs/core'\nimport { ChatRequest, Ollama } from 'ollama'\nimport { NomadOllamaModel } from '."
  },
  {
    "path": "admin/app/services/queue_service.ts",
    "chars": 493,
    "preview": "import { Queue } from 'bullmq'\nimport queueConfig from '#config/queue'\n\nexport class QueueService {\n  private queues: Ma"
  },
  {
    "path": "admin/app/services/rag_service.ts",
    "chars": 45652,
    "preview": "import { QdrantClient } from '@qdrant/js-client-rest'\nimport { DockerService } from './docker_service.js'\nimport { injec"
  },
  {
    "path": "admin/app/services/system_service.ts",
    "chars": 22011,
    "preview": "import Service from '#models/service'\nimport { inject } from '@adonisjs/core'\nimport { DockerService } from '#services/d"
  },
  {
    "path": "admin/app/services/system_update_service.ts",
    "chars": 3279,
    "preview": "import logger from '@adonisjs/core/services/logger'\nimport { readFileSync, existsSync } from 'fs'\nimport { writeFile } f"
  },
  {
    "path": "admin/app/services/zim_extraction_service.ts",
    "chars": 12548,
    "preview": "import { Archive, Entry } from '@openzim/libzim'\nimport * as cheerio from 'cheerio'\nimport { HTML_SELECTORS_TO_REMOVE, N"
  },
  {
    "path": "admin/app/services/zim_service.ts",
    "chars": 18659,
    "preview": "import {\n  ListRemoteZimFilesResponse,\n  RawRemoteZimFileEntry,\n  RemoteZimFileEntry,\n} from '../../types/zim.js'\nimport"
  },
  {
    "path": "admin/app/utils/downloads.ts",
    "chars": 6655,
    "preview": "import {\n  DoResumableDownloadParams,\n  DoResumableDownloadWithRetryParams,\n} from '../../types/downloads.js'\nimport axi"
  },
  {
    "path": "admin/app/utils/fs.ts",
    "chars": 4807,
    "preview": "import { mkdir, readdir, readFile, stat, unlink } from 'fs/promises'\nimport path, { join } from 'path'\nimport { FileEntr"
  },
  {
    "path": "admin/app/utils/misc.ts",
    "chars": 768,
    "preview": "export function formatSpeed(bytesPerSecond: number): string {\n  if (bytesPerSecond < 1024) return `${bytesPerSecond.toFi"
  },
  {
    "path": "admin/app/utils/version.ts",
    "chars": 1881,
    "preview": "/**\n * Compare two semantic version strings to determine if the first is newer than the second.\n * @param version1 - The"
  },
  {
    "path": "admin/app/validators/benchmark.ts",
    "chars": 297,
    "preview": "import vine from '@vinejs/vine'\n\nexport const runBenchmarkValidator = vine.compile(\n  vine.object({\n    benchmark_type: "
  },
  {
    "path": "admin/app/validators/chat.ts",
    "chars": 579,
    "preview": "import vine from '@vinejs/vine'\n\nexport const createSessionSchema = vine.compile(\n  vine.object({\n    title: vine.string"
  },
  {
    "path": "admin/app/validators/common.ts",
    "chars": 3047,
    "preview": "import vine from '@vinejs/vine'\n\n/**\n * Checks whether a URL points to a loopback or link-local address.\n * Used to prev"
  },
  {
    "path": "admin/app/validators/curated_collections.ts",
    "chars": 2205,
    "preview": "import vine from '@vinejs/vine'\n\n// ---- Versioned resource validators (with id + version) ----\n\nexport const specResour"
  },
  {
    "path": "admin/app/validators/download.ts",
    "chars": 274,
    "preview": "import vine from '@vinejs/vine'\n\nexport const downloadJobsByFiletypeSchema = vine.compile(\n  vine.object({\n    params: v"
  },
  {
    "path": "admin/app/validators/ollama.ts",
    "chars": 710,
    "preview": "import vine from '@vinejs/vine'\n\nexport const chatSchema = vine.compile(\n  vine.object({\n    model: vine.string().trim()"
  },
  {
    "path": "admin/app/validators/rag.ts",
    "chars": 230,
    "preview": "import vine from '@vinejs/vine'\n\nexport const getJobStatusSchema = vine.compile(\n  vine.object({\n    filePath: vine.stri"
  },
  {
    "path": "admin/app/validators/settings.ts",
    "chars": 230,
    "preview": "import vine from \"@vinejs/vine\";\nimport { SETTINGS_KEYS } from \"../../constants/kv_store.js\";\n\n\nexport const updateSetti"
  },
  {
    "path": "admin/app/validators/system.ts",
    "chars": 801,
    "preview": "import vine from '@vinejs/vine'\n\nexport const installServiceValidator = vine.compile(\n  vine.object({\n    service_name: "
  },
  {
    "path": "admin/app/validators/zim.ts",
    "chars": 242,
    "preview": "import vine from '@vinejs/vine'\n\nexport const listRemoteZimValidator = vine.compile(\n  vine.object({\n    start: vine.num"
  },
  {
    "path": "admin/bin/console.ts",
    "chars": 1333,
    "preview": "/*\n|--------------------------------------------------------------------------\n| Ace entry point\n|----------------------"
  },
  {
    "path": "admin/bin/server.ts",
    "chars": 1727,
    "preview": "/*\n|--------------------------------------------------------------------------\n| HTTP server entrypoint\n|---------------"
  },
  {
    "path": "admin/bin/test.ts",
    "chars": 1719,
    "preview": "/*\n|--------------------------------------------------------------------------\n| Test runner entrypoint\n|---------------"
  },
  {
    "path": "admin/commands/benchmark/results.ts",
    "chars": 3472,
    "preview": "import { BaseCommand, flags } from '@adonisjs/core/ace'\nimport type { CommandOptions } from '@adonisjs/core/types/ace'\n\n"
  },
  {
    "path": "admin/commands/benchmark/run.ts",
    "chars": 3946,
    "preview": "import { BaseCommand, flags } from '@adonisjs/core/ace'\nimport type { CommandOptions } from '@adonisjs/core/types/ace'\n\n"
  },
  {
    "path": "admin/commands/benchmark/submit.ts",
    "chars": 4034,
    "preview": "import { BaseCommand, flags } from '@adonisjs/core/ace'\nimport type { CommandOptions } from '@adonisjs/core/types/ace'\n\n"
  },
  {
    "path": "admin/commands/queue/work.ts",
    "chars": 5491,
    "preview": "import { BaseCommand, flags } from '@adonisjs/core/ace'\nimport type { CommandOptions } from '@adonisjs/core/types/ace'\ni"
  },
  {
    "path": "admin/config/app.ts",
    "chars": 1077,
    "preview": "import env from '#start/env'\nimport app from '@adonisjs/core/services/app'\nimport { Secret } from '@adonisjs/core/helper"
  },
  {
    "path": "admin/config/bodyparser.ts",
    "chars": 1284,
    "preview": "import { defineConfig } from '@adonisjs/core/bodyparser'\n\nconst bodyParserConfig = defineConfig({\n  /**\n   * The bodypar"
  },
  {
    "path": "admin/config/cors.ts",
    "chars": 462,
    "preview": "import { defineConfig } from '@adonisjs/cors'\n\n/**\n * Configuration options to tweak the CORS policy. The following\n * o"
  },
  {
    "path": "admin/config/database.ts",
    "chars": 677,
    "preview": "import env from '#start/env'\nimport { defineConfig } from '@adonisjs/lucid'\n\nconst dbConfig = defineConfig({\n  connectio"
  },
  {
    "path": "admin/config/hash.ts",
    "chars": 503,
    "preview": "import { defineConfig, drivers } from '@adonisjs/core/hash'\n\nconst hashConfig = defineConfig({\n  default: 'scrypt',\n\n  l"
  },
  {
    "path": "admin/config/inertia.ts",
    "chars": 1050,
    "preview": "import KVStore from '#models/kv_store'\nimport { SystemService } from '#services/system_service'\nimport { defineConfig } "
  },
  {
    "path": "admin/config/logger.ts",
    "chars": 1055,
    "preview": "import env from '#start/env'\nimport app from '@adonisjs/core/services/app'\nimport { defineConfig, targets } from '@adoni"
  },
  {
    "path": "admin/config/queue.ts",
    "chars": 177,
    "preview": "import env from '#start/env'\n\nconst queueConfig = {\n  connection: {\n    host: env.get('REDIS_HOST'),\n    port: env.get('"
  },
  {
    "path": "admin/config/session.ts",
    "chars": 1149,
    "preview": "// import env from '#start/env'\n// import app from '@adonisjs/core/services/app'\n// import { defineConfig, stores } from"
  },
  {
    "path": "admin/config/shield.ts",
    "chars": 1009,
    "preview": "import { defineConfig } from '@adonisjs/shield'\n\nconst shieldConfig = defineConfig({\n  /**\n   * Configure CSP policies f"
  },
  {
    "path": "admin/config/static.ts",
    "chars": 438,
    "preview": "import { defineConfig } from '@adonisjs/static'\n\n/**\n * Configuration options to tweak the static files middleware.\n * T"
  },
  {
    "path": "admin/config/transmit.ts",
    "chars": 335,
    "preview": "import env from '#start/env'\nimport { defineConfig } from '@adonisjs/transmit'\nimport { redis } from '@adonisjs/transmit"
  },
  {
    "path": "admin/config/vite.ts",
    "chars": 619,
    "preview": "import { defineConfig } from '@adonisjs/vite'\n\nconst viteBackendConfig = defineConfig({\n  /**\n   * The output of vite wi"
  },
  {
    "path": "admin/constants/broadcast.ts",
    "chars": 226,
    "preview": "\nexport const BROADCAST_CHANNELS = {\n    BENCHMARK_PROGRESS: 'benchmark-progress',\n    OLLAMA_MODEL_DOWNLOAD: 'ollama-mo"
  },
  {
    "path": "admin/constants/kv_store.ts",
    "chars": 227,
    "preview": "import { KVStoreKey } from \"../types/kv_store.js\";\n\nexport const SETTINGS_KEYS: KVStoreKey[] = ['chat.suggestionsEnabled"
  },
  {
    "path": "admin/constants/misc.ts",
    "chars": 72,
    "preview": "\nexport const NOMAD_API_DEFAULT_BASE_URL = 'https://api.projectnomad.us'"
  },
  {
    "path": "admin/constants/ollama.ts",
    "chars": 6365,
    "preview": "import { NomadOllamaModel } from '../types/ollama.js'\n\n/**\n * Fallback basic recommended Ollama models in case fetching "
  },
  {
    "path": "admin/constants/service_names.ts",
    "chars": 208,
    "preview": "export const SERVICE_NAMES = {\n  KIWIX: 'nomad_kiwix_server',\n  OLLAMA: 'nomad_ollama',\n  QDRANT: 'nomad_qdrant',\n  CYBE"
  },
  {
    "path": "admin/constants/zim_extraction.ts",
    "chars": 1061,
    "preview": "\nexport const HTML_SELECTORS_TO_REMOVE = [\n    'script',\n    'style',\n    'nav',\n    'header',\n    'footer',\n    'noscri"
  },
  {
    "path": "admin/database/migrations/1751086751801_create_services_table.ts",
    "chars": 891,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1763499145832_update_services_table.ts",
    "chars": 469,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1764912210741_create_curated_collections_table.ts",
    "chars": 628,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1764912270123_create_curated_collection_resources_table.ts",
    "chars": 768,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1768170944482_update_services_add_installation_statuses_table.ts",
    "chars": 419,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1768453747522_update_services_add_icon.ts",
    "chars": 369,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1769097600001_create_benchmark_results_table.ts",
    "chars": 1610,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1769097600002_create_benchmark_settings_table.ts",
    "chars": 477,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1769300000001_add_powered_by_and_display_order_to_services.ts",
    "chars": 484,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1769300000002_update_services_friendly_names.ts",
    "chars": 3219,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1769324448000_add_builder_tag_to_benchmark_results.ts",
    "chars": 396,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1769400000001_create_installed_tiers_table.ts",
    "chars": 503,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1769400000002_create_kv_store_table.ts",
    "chars": 466,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1769500000001_create_wikipedia_selection_table.ts",
    "chars": 621,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1769646771604_create_create_chat_sessions_table.ts",
    "chars": 466,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1769646798266_create_create_chat_messages_table.ts",
    "chars": 606,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1769700000001_create_zim_file_metadata_table.ts",
    "chars": 623,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1770269324176_add_unique_constraint_to_curated_collection_resources_table.ts",
    "chars": 541,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1770273423670_drop_installed_tiers_table.ts",
    "chars": 583,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1770849108030_create_create_collection_manifests_table.ts",
    "chars": 515,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1770849119787_create_create_installed_resources_table.ts",
    "chars": 779,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1770850092871_create_drop_legacy_curated_tables_table.ts",
    "chars": 379,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  async up() {\n    this.s"
  },
  {
    "path": "admin/database/migrations/1771000000001_add_update_fields_to_services.ts",
    "chars": 599,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/migrations/1771000000002_pin_latest_service_images.ts",
    "chars": 2107,
    "preview": "import { BaseSchema } from '@adonisjs/lucid/schema'\n\nexport default class extends BaseSchema {\n  protected tableName = '"
  },
  {
    "path": "admin/database/seeders/service_seeder.ts",
    "chars": 6364,
    "preview": "import Service from '#models/service'\nimport { BaseSeeder } from '@adonisjs/lucid/seeders'\nimport { ModelAttributes } fr"
  },
  {
    "path": "admin/docs/about.md",
    "chars": 1678,
    "preview": "# About Project N.O.M.A.D.\n\nProject N.O.M.A.D. (Node for Offline Media, Archives, and Data; \"Nomad\" for short) is a proj"
  },
  {
    "path": "admin/docs/faq.md",
    "chars": 12115,
    "preview": "# Frequently Asked Questions\n\n## General Questions\n\n### What is N.O.M.A.D.?\nN.O.M.A.D. (Node for Offline Media, Archives"
  },
  {
    "path": "admin/docs/getting-started.md",
    "chars": 8596,
    "preview": "# Getting Started with N.O.M.A.D.\n\nThis guide will help you get the most out of your N.O.M.A.D. server.\n\n---\n\n## Easy Se"
  },
  {
    "path": "admin/docs/home.md",
    "chars": 3234,
    "preview": "# Welcome to Project N.O.M.A.D.\n\nYour personal offline knowledge server is ready to use.\n\n## What is N.O.M.A.D.?\n\n**N.O."
  },
  {
    "path": "admin/docs/release-notes.md",
    "chars": 21055,
    "preview": "# Release Notes\n\n## Version 1.30.0 - March 20, 2026\n\n### Features\n- **Night Ops**: Added our most requested feature — a "
  },
  {
    "path": "admin/docs/use-cases.md",
    "chars": 6810,
    "preview": "# What Can You Do With N.O.M.A.D.?\n\nN.O.M.A.D. is designed to be your information lifeline when internet isn't available"
  },
  {
    "path": "admin/eslint.config.js",
    "chars": 177,
    "preview": "import { configApp } from '@adonisjs/eslint-config'\nimport pluginQuery from '@tanstack/eslint-plugin-query'\nexport defau"
  },
  {
    "path": "admin/inertia/app/app.tsx",
    "chars": 2083,
    "preview": "/// <reference path=\"../../adonisrc.ts\" />\n/// <reference path=\"../../config/inertia.ts\" />\n\nimport '../css/app.css'\nimp"
  },
  {
    "path": "admin/inertia/components/ActiveDownloads.tsx",
    "chars": 2900,
    "preview": "import useDownloads, { useDownloadsProps } from '~/hooks/useDownloads'\nimport HorizontalBarChart from './HorizontalBarCh"
  },
  {
    "path": "admin/inertia/components/ActiveEmbedJobs.tsx",
    "chars": 1298,
    "preview": "import useEmbedJobs from '~/hooks/useEmbedJobs'\nimport HorizontalBarChart from './HorizontalBarChart'\nimport StyledSecti"
  },
  {
    "path": "admin/inertia/components/ActiveModelDownloads.tsx",
    "chars": 1723,
    "preview": "import useOllamaModelDownloads from '~/hooks/useOllamaModelDownloads'\nimport HorizontalBarChart from './HorizontalBarCha"
  },
  {
    "path": "admin/inertia/components/Alert.tsx",
    "chars": 7127,
    "preview": "import * as Icons from '@tabler/icons-react'\nimport classNames from '~/lib/classNames'\nimport DynamicIcon from './Dynami"
  },
  {
    "path": "admin/inertia/components/BouncingDots.tsx",
    "chars": 935,
    "preview": "import clsx from 'clsx'\n\ninterface BouncingDotsProps {\n  text: string\n  containerClassName?: string\n  textClassName?: st"
  },
  {
    "path": "admin/inertia/components/BouncingLogo.tsx",
    "chars": 1049,
    "preview": "import { useState, useEffect } from 'react';\n\n// Fading Image Component\nconst FadingImage = ({  alt = \"Fading image\", cl"
  },
  {
    "path": "admin/inertia/components/BuilderTagSelector.tsx",
    "chars": 4178,
    "preview": "import { IconRefresh } from '@tabler/icons-react'\nimport { useState, useEffect } from 'react'\nimport {\n  ADJECTIVES,\n  N"
  },
  {
    "path": "admin/inertia/components/CategoryCard.tsx",
    "chars": 3604,
    "preview": "import { formatBytes } from '~/lib/util'\nimport DynamicIcon, { DynamicIconName } from './DynamicIcon'\nimport type { Cate"
  },
  {
    "path": "admin/inertia/components/CuratedCollectionCard.tsx",
    "chars": 2181,
    "preview": "import { formatBytes } from '~/lib/util'\nimport DynamicIcon, { DynamicIconName } from './DynamicIcon'\nimport type { Coll"
  },
  {
    "path": "admin/inertia/components/DebugInfoModal.tsx",
    "chars": 3014,
    "preview": "import { useEffect, useState } from 'react'\nimport { IconBug, IconCopy, IconCheck } from '@tabler/icons-react'\nimport St"
  },
  {
    "path": "admin/inertia/components/DownloadURLModal.tsx",
    "chars": 2909,
    "preview": "import { useState } from 'react'\nimport StyledModal, { StyledModalProps } from './StyledModal'\nimport Input from './inpu"
  },
  {
    "path": "admin/inertia/components/DynamicIcon.tsx",
    "chars": 1037,
    "preview": "import classNames from 'classnames'\nimport * as TablerIcons from '@tabler/icons-react'\n\nexport type DynamicIconName = ke"
  },
  {
    "path": "admin/inertia/components/Footer.tsx",
    "chars": 1144,
    "preview": "import { useState } from 'react'\nimport { usePage } from '@inertiajs/react'\nimport { UsePageProps } from '../../types/sy"
  },
  {
    "path": "admin/inertia/components/HorizontalBarChart.tsx",
    "chars": 3727,
    "preview": "import classNames from '~/lib/classNames'\n\ninterface HorizontalBarChartProps {\n  items: Array<{\n    label: string\n    va"
  },
  {
    "path": "admin/inertia/components/InfoTooltip.tsx",
    "chars": 1232,
    "preview": "import { IconInfoCircle } from '@tabler/icons-react'\nimport { useState } from 'react'\n\ninterface InfoTooltipProps {\n  te"
  },
  {
    "path": "admin/inertia/components/InstallActivityFeed.tsx",
    "chars": 2792,
    "preview": "import { IconCircleCheck, IconCircleX } from '@tabler/icons-react'\nimport classNames from '~/lib/classNames'\n\nexport typ"
  },
  {
    "path": "admin/inertia/components/LoadingSpinner.tsx",
    "chars": 1006,
    "preview": "interface LoadingSpinnerProps {\n  text?: string\n  fullscreen?: boolean\n  iconOnly?: boolean\n  light?: boolean\n  classNam"
  },
  {
    "path": "admin/inertia/components/MarkdocRenderer.tsx",
    "chars": 3702,
    "preview": "import React from 'react'\nimport Markdoc from '@markdoc/markdoc'\nimport { Heading } from './markdoc/Heading'\nimport { Li"
  },
  {
    "path": "admin/inertia/components/ProgressBar.tsx",
    "chars": 726,
    "preview": "const ProgressBar = ({ progress, speed }: { progress: number; speed?: string }) => {\n  if (progress >= 100) {\n    return"
  },
  {
    "path": "admin/inertia/components/StorageProjectionBar.tsx",
    "chars": 4661,
    "preview": "import classNames from '~/lib/classNames'\nimport { formatBytes } from '~/lib/util'\nimport { IconAlertTriangle, IconServe"
  },
  {
    "path": "admin/inertia/components/StyledButton.tsx",
    "chars": 4911,
    "preview": "import { useMemo } from 'react'\nimport clsx from 'clsx'\nimport DynamicIcon, { DynamicIconName} from './DynamicIcon'\nimpo"
  },
  {
    "path": "admin/inertia/components/StyledModal.tsx",
    "chars": 3559,
    "preview": "import { Dialog, DialogBackdrop, DialogPanel, DialogTitle } from '@headlessui/react'\nimport StyledButton, { StyledButton"
  },
  {
    "path": "admin/inertia/components/StyledSectionHeader.tsx",
    "chars": 619,
    "preview": "import classNames from 'classnames'\nimport { JSX } from 'react'\n\nexport interface StyledSectionHeaderProps {\n  title: st"
  },
  {
    "path": "admin/inertia/components/StyledSidebar.tsx",
    "chars": 5069,
    "preview": "import { useMemo, useState } from 'react'\nimport { Dialog, DialogBackdrop, DialogPanel, TransitionChild } from '@headles"
  },
  {
    "path": "admin/inertia/components/StyledTable.tsx",
    "chars": 7530,
    "preview": "import { capitalizeFirstLetter } from '~/lib/util'\nimport classNames from '~/lib/classNames'\nimport LoadingSpinner from "
  },
  {
    "path": "admin/inertia/components/ThemeToggle.tsx",
    "chars": 855,
    "preview": "import { IconSun, IconMoon } from '@tabler/icons-react'\nimport { useThemeContext } from '~/providers/ThemeProvider'\n\nint"
  },
  {
    "path": "admin/inertia/components/TierSelectionModal.tsx",
    "chars": 10544,
    "preview": "import { Fragment, useState, useEffect } from 'react'\nimport { Dialog, Transition } from '@headlessui/react'\nimport { Ic"
  },
  {
    "path": "admin/inertia/components/UpdateServiceModal.tsx",
    "chars": 6803,
    "preview": "import { useState } from \"react\"\nimport { ServiceSlim } from \"../../types/services\"\nimport StyledModal from \"./StyledMod"
  },
  {
    "path": "admin/inertia/components/WikipediaSelector.tsx",
    "chars": 7398,
    "preview": "import { formatBytes } from '~/lib/util'\nimport { WikipediaOption, WikipediaCurrentSelection } from '../../types/downloa"
  },
  {
    "path": "admin/inertia/components/chat/ChatAssistantAvatar.tsx",
    "chars": 320,
    "preview": "import { IconWand } from \"@tabler/icons-react\";\n\nexport default function ChatAssistantAvatar() {\n  return (\n    <div cla"
  },
  {
    "path": "admin/inertia/components/chat/ChatButton.tsx",
    "chars": 577,
    "preview": "import { IconMessages } from '@tabler/icons-react'\n\ninterface ChatButtonProps {\n  onClick: () => void\n}\n\nexport default "
  },
  {
    "path": "admin/inertia/components/chat/ChatInterface.tsx",
    "chars": 8883,
    "preview": "import { IconSend, IconWand } from '@tabler/icons-react'\nimport { useState, useRef, useEffect } from 'react'\nimport clas"
  },
  {
    "path": "admin/inertia/components/chat/ChatMessageBubble.tsx",
    "chars": 4704,
    "preview": "import classNames from '~/lib/classNames'\nimport ReactMarkdown from 'react-markdown'\nimport remarkGfm from 'remark-gfm'\n"
  },
  {
    "path": "admin/inertia/components/chat/ChatModal.tsx",
    "chars": 1342,
    "preview": "import { Dialog, DialogBackdrop, DialogPanel } from '@headlessui/react'\nimport Chat from './index'\nimport { useSystemSet"
  },
  {
    "path": "admin/inertia/components/chat/ChatSidebar.tsx",
    "chars": 4972,
    "preview": "import classNames from '~/lib/classNames'\nimport StyledButton from '../StyledButton'\nimport { router, usePage } from '@i"
  },
  {
    "path": "admin/inertia/components/chat/KnowledgeBaseModal.tsx",
    "chars": 11989,
    "preview": "import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'\nimport { useRef, useState } from 'react'\ni"
  },
  {
    "path": "admin/inertia/components/chat/index.tsx",
    "chars": 14590,
    "preview": "import { useState, useCallback, useEffect, useRef, useMemo } from 'react'\nimport { useQuery, useMutation, useQueryClient"
  },
  {
    "path": "admin/inertia/components/file-uploader/index.css",
    "chars": 82,
    "preview": ".uppy-size--md .uppy-Dashboard-AddFiles-title {\n  font-size: 1.15rem !important;\n}"
  },
  {
    "path": "admin/inertia/components/file-uploader/index.tsx",
    "chars": 2531,
    "preview": "import { forwardRef, useImperativeHandle, useState } from 'react'\nimport Uppy from '@uppy/core'\nimport '@uppy/core/css/s"
  },
  {
    "path": "admin/inertia/components/inputs/Input.tsx",
    "chars": 1844,
    "preview": "import classNames from \"classnames\";\nimport { InputHTMLAttributes } from \"react\";\n\nexport interface InputProps extends I"
  },
  {
    "path": "admin/inertia/components/inputs/Switch.tsx",
    "chars": 1881,
    "preview": "import clsx from 'clsx'\n\ninterface SwitchProps {\n  checked: boolean\n  onChange: (checked: boolean) => void\n  label?: str"
  },
  {
    "path": "admin/inertia/components/layout/BackToHomeHeader.tsx",
    "chars": 761,
    "preview": "import { Link } from '@inertiajs/react'\nimport { IconArrowLeft } from '@tabler/icons-react'\nimport classNames from '~/li"
  },
  {
    "path": "admin/inertia/components/maps/MapComponent.tsx",
    "chars": 1120,
    "preview": "import Map, { FullscreenControl, NavigationControl, MapProvider } from 'react-map-gl/maplibre'\nimport maplibregl from 'm"
  },
  {
    "path": "admin/inertia/components/markdoc/Heading.tsx",
    "chars": 837,
    "preview": "import React, { JSX } from 'react'\n\nexport function Heading({\n  level,\n  id,\n  children,\n}: {\n  level: number\n  id: stri"
  },
  {
    "path": "admin/inertia/components/markdoc/Image.tsx",
    "chars": 561,
    "preview": "export function Image({ src, alt, title }: { src: string; alt?: string; title?: string }) {\n  return (\n    <figure class"
  },
  {
    "path": "admin/inertia/components/markdoc/List.tsx",
    "chars": 511,
    "preview": "export function List({\n  ordered = false,\n  start,\n  children,\n}: {\n  ordered?: boolean\n  start?: number\n  children: Rea"
  },
  {
    "path": "admin/inertia/components/markdoc/ListItem.tsx",
    "chars": 164,
    "preview": "export function ListItem({ children }: { children: React.ReactNode }) {\n  return <li className=\"pl-2 text-desert-green-d"
  },
  {
    "path": "admin/inertia/components/markdoc/Table.tsx",
    "chars": 1169,
    "preview": "export function Table({ children }: { children: React.ReactNode }) {\n  return (\n    <div className=\"overflow-x-auto my-6"
  },
  {
    "path": "admin/inertia/components/systeminfo/CircularGauge.tsx",
    "chars": 4775,
    "preview": "import { useEffect, useState } from 'react'\nimport classNames from '~/lib/classNames'\n\ninterface CircularGaugeProps {\n  "
  },
  {
    "path": "admin/inertia/components/systeminfo/InfoCard.tsx",
    "chars": 2529,
    "preview": "import classNames from '~/lib/classNames'\n\ninterface InfoCardProps {\n  title: string\n  icon?: React.ReactNode\n  data: Ar"
  },
  {
    "path": "admin/inertia/components/systeminfo/StatusCard.tsx",
    "chars": 575,
    "preview": "export type StatusCardProps = {\n  title: string\n  value: string | number\n}\n\nexport default function StatusCard({ title, "
  },
  {
    "path": "admin/inertia/context/ModalContext.ts",
    "chars": 599,
    "preview": "import { createContext, useContext, ReactNode } from 'react'\n\ninterface ModalContextProps {\n  openModal: (content: React"
  },
  {
    "path": "admin/inertia/context/NotificationContext.ts",
    "chars": 735,
    "preview": "import { createContext, useContext } from \"react\";\n\nexport interface Notification {\n  message: string;\n  type: \"error\" |"
  },
  {
    "path": "admin/inertia/css/app.css",
    "chars": 3708,
    "preview": "@import 'tailwindcss';\n\n@theme {\n  --color-desert-white: #f6f6f4;\n  --color-desert-sand: #f7eedc;\n\n  --color-desert-gree"
  },
  {
    "path": "admin/inertia/hooks/useDebounce.ts",
    "chars": 511,
    "preview": "import { useRef, useEffect } from \"react\";\n\nconst useDebounce = () => {\n  const timeout = useRef<number | undefined>(400"
  },
  {
    "path": "admin/inertia/hooks/useDiskDisplayData.ts",
    "chars": 2664,
    "preview": "import { NomadDiskInfo } from '../../types/system'\nimport { Systeminformation } from 'systeminformation'\nimport { format"
  },
  {
    "path": "admin/inertia/hooks/useDownloads.ts",
    "chars": 965,
    "preview": "import { useQuery, useQueryClient } from '@tanstack/react-query'\nimport { useMemo } from 'react'\nimport api from '~/lib/"
  }
]

// ... and 94 more files (download for full content)

About this extraction

This page contains the full source code of the Crosstalk-Solutions/project-nomad GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 294 files (1.1 MB), approximately 276.0k tokens, and a symbol index with 852 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!