Repository: Maciejonos/qbitwebui Branch: master Commit: 1d1e3469ce89 Files: 183 Total size: 1.2 MB Directory structure: gitextract_1rmw6cng/ ├── .dockerignore ├── .github/ │ ├── FUNDING.yml │ ├── ISSUE_TEMPLATE/ │ │ ├── bug_report.md │ │ └── feature_request.md │ └── workflows/ │ ├── docker.yml │ ├── docs.yml │ └── tests.yml ├── .gitignore ├── .npmrc ├── .prettierrc ├── Dockerfile ├── LICENSE ├── README.md ├── __tests__/ │ ├── __mocks__/ │ │ └── bun-sqlite.ts │ ├── api/ │ │ ├── auth.test.ts │ │ ├── crossSeed.test.ts │ │ ├── files.test.ts │ │ ├── instances.test.ts │ │ ├── integrations.test.ts │ │ └── qbittorrent.test.ts │ ├── hooks/ │ │ ├── useInstance.test.tsx │ │ └── usePagination.test.tsx │ ├── reporter.ts │ ├── server/ │ │ ├── crossSeedCache.test.ts │ │ ├── crossSeedMatcher.test.ts │ │ ├── crossSeedScheduler.test.ts │ │ ├── crossSeedWorker.test.ts │ │ ├── fetch.test.ts │ │ ├── logger.test.ts │ │ ├── rateLimit.test.ts │ │ └── url.test.ts │ ├── themes/ │ │ └── themes.test.ts │ └── utils/ │ ├── fileTree.test.ts │ ├── format.test.ts │ ├── pagination.test.ts │ ├── ratioThresholds.test.ts │ └── search.test.ts ├── docs/ │ ├── .vitepress/ │ │ ├── config.ts │ │ └── theme/ │ │ ├── custom.css │ │ └── index.ts │ ├── guide/ │ │ ├── configuration.md │ │ ├── docker.md │ │ ├── features.md │ │ ├── getting-started.md │ │ └── network-agent/ │ │ └── index.md │ └── index.md ├── eslint.config.js ├── index.html ├── net-agent/ │ ├── Dockerfile │ ├── README.md │ ├── go.mod │ └── main.go ├── package.json ├── src/ │ ├── App.tsx │ ├── api/ │ │ ├── auth.ts │ │ ├── crossSeed.ts │ │ ├── files.ts │ │ ├── instances.ts │ │ ├── integrations.ts │ │ ├── netAgent.ts │ │ ├── qbittorrent.ts │ │ └── stats.ts │ ├── components/ │ │ ├── AddTorrentModal.tsx │ │ ├── AuthForm.tsx │ │ ├── CategoryTagManager.tsx │ │ ├── ContextMenu.tsx │ │ ├── CrossSeedManager.tsx │ │ ├── DateSettingsPopup.tsx │ │ ├── FileBrowser.tsx │ │ ├── FilterBar.tsx │ │ ├── Header.tsx │ │ ├── InstanceManager.tsx │ │ ├── Layout.tsx │ │ ├── LogViewer.tsx │ │ ├── NetworkTools.tsx │ │ ├── OrphanManager.tsx │ │ ├── RSSManager.tsx │ │ ├── RatioThresholdPopup.tsx │ │ ├── SearchPanel.tsx │ │ ├── SettingsPanel.tsx │ │ ├── Statistics.tsx │ │ ├── StatusBar.tsx │ │ ├── ThemeManager.tsx │ │ ├── ThemeSwitcher.tsx │ │ ├── TorrentDetailsPanel.tsx │ │ ├── TorrentList.tsx │ │ ├── TorrentRow.tsx │ │ ├── ViewSelector.tsx │ │ ├── columns.ts │ │ ├── settings/ │ │ │ ├── AdvancedTab.tsx │ │ │ ├── BehaviorTab.tsx │ │ │ ├── BitTorrentTab.tsx │ │ │ ├── ConnectionTab.tsx │ │ │ ├── DownloadsTab.tsx │ │ │ ├── RSSTab.tsx │ │ │ ├── SpeedTab.tsx │ │ │ ├── WebUITab.tsx │ │ │ └── index.ts │ │ └── ui/ │ │ ├── Checkbox.tsx │ │ ├── MultiSelect.tsx │ │ ├── Select.tsx │ │ ├── Toggle.tsx │ │ └── index.ts │ ├── contexts/ │ │ ├── InstanceProvider.tsx │ │ ├── PaginationProvider.tsx │ │ ├── ThemeContext.ts │ │ ├── ThemeProvider.tsx │ │ ├── instanceContext.ts │ │ └── paginationContext.ts │ ├── hooks/ │ │ ├── useClickOutside.ts │ │ ├── useCrossSeed.ts │ │ ├── useInstance.ts │ │ ├── usePagination.ts │ │ ├── useRSSManager.ts │ │ ├── useStats.ts │ │ ├── useSyncMaindata.ts │ │ ├── useTheme.ts │ │ ├── useTorrentDetails.ts │ │ ├── useTorrents.ts │ │ ├── useTransferInfo.ts │ │ └── useUpdateCheck.ts │ ├── index.css │ ├── main.tsx │ ├── mobile/ │ │ ├── MobileApp.tsx │ │ ├── MobileCrossSeedManager.tsx │ │ ├── MobileFileBrowser.tsx │ │ ├── MobileInstancePicker.tsx │ │ ├── MobileLogViewer.tsx │ │ ├── MobileNetworkTools.tsx │ │ ├── MobileOrphanManager.tsx │ │ ├── MobileRSSManager.tsx │ │ ├── MobileSearchPanel.tsx │ │ ├── MobileStatistics.tsx │ │ ├── MobileStats.tsx │ │ ├── MobileThemeManager.tsx │ │ ├── MobileThemeSwitcher.tsx │ │ ├── MobileTools.tsx │ │ ├── MobileTorrentDetail.tsx │ │ └── MobileTorrentList.tsx │ ├── server/ │ │ ├── db/ │ │ │ └── index.ts │ │ ├── index.ts │ │ ├── middleware/ │ │ │ └── auth.ts │ │ ├── routes/ │ │ │ ├── auth.ts │ │ │ ├── crossSeed.ts │ │ │ ├── files.ts │ │ │ ├── instances.ts │ │ │ ├── integrations.ts │ │ │ ├── proxy.ts │ │ │ ├── stats.ts │ │ │ └── tools.ts │ │ └── utils/ │ │ ├── crossSeedCache.ts │ │ ├── crossSeedMatcher.ts │ │ ├── crossSeedScheduler.ts │ │ ├── crossSeedWorker.ts │ │ ├── crypto.ts │ │ ├── fetch.ts │ │ ├── logger.ts │ │ ├── qbt.ts │ │ ├── rateLimit.ts │ │ ├── statsRecorder.ts │ │ ├── torznab.ts │ │ └── url.ts │ ├── themes/ │ │ └── index.ts │ ├── types/ │ │ ├── preferences.ts │ │ ├── qbittorrent.ts │ │ ├── rss.ts │ │ ├── torrentDetails.ts │ │ └── views.ts │ └── utils/ │ ├── colorUtils.ts │ ├── customViews.ts │ ├── dateSettings.ts │ ├── fileTree.ts │ ├── format.ts │ ├── markdown.tsx │ ├── pagination.ts │ ├── ratioThresholds.ts │ └── search.ts ├── tsconfig.app.json ├── tsconfig.json ├── tsconfig.node.json ├── tsconfig.server.json ├── vite.config.ts └── vitest.config.ts ================================================ FILE CONTENTS ================================================ ================================================ FILE: .dockerignore ================================================ node_modules dist .git *.md docs ================================================ FILE: .github/FUNDING.yml ================================================ # These are supported funding model platforms github: [Maciejonos] buy_me_a_coffee: maciejonos ================================================ FILE: .github/ISSUE_TEMPLATE/bug_report.md ================================================ --- name: Bug report about: Create a report to help us improve title: '' labels: '' assignees: '' --- **Describe the bug** A clear and concise description of what the bug is. **To Reproduce** Steps to reproduce the behavior: 1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error **Expected behavior** A clear and concise description of what you expected to happen. **Screenshots** If applicable, add screenshots to help explain your problem. **Desktop (please complete the following information):** - OS: [e.g. iOS] - Browser [e.g. chrome, safari] - Version [e.g. 22] **Smartphone (please complete the following information):** - Device: [e.g. iPhone6] - OS: [e.g. iOS8.1] - Browser [e.g. stock browser, safari] - Version [e.g. 22] **Additional context** Add any other context about the problem here. ================================================ FILE: .github/ISSUE_TEMPLATE/feature_request.md ================================================ --- name: Feature request about: Suggest an idea for this project title: '' labels: '' assignees: '' --- **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here. ================================================ FILE: .github/workflows/docker.yml ================================================ name: Docker on: push: tags: ['v*'] workflow_dispatch: env: REGISTRY: ghcr.io IMAGE_NAME: ${{ github.repository }} jobs: build: runs-on: ubuntu-latest permissions: contents: read packages: write steps: - uses: actions/checkout@v4 - uses: docker/login-action@v3 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - uses: docker/metadata-action@v5 id: meta with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} tags: | type=semver,pattern={{version}} type=semver,pattern={{major}}.{{minor}} type=raw,value=latest - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - uses: docker/build-push-action@v6 with: context: . push: true platforms: linux/amd64,linux/arm64 tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} agent: runs-on: ubuntu-latest permissions: contents: read packages: write steps: - uses: actions/checkout@v4 - uses: docker/login-action@v3 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - uses: docker/metadata-action@v5 id: meta with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-agent tags: | type=semver,pattern={{version}} type=semver,pattern={{major}}.{{minor}} type=raw,value=latest - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - uses: docker/build-push-action@v6 with: context: ./net-agent push: true platforms: linux/amd64,linux/arm64 tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} ================================================ FILE: .github/workflows/docs.yml ================================================ name: Docs on: push: branches: [master] paths: - 'docs/**' - '.github/workflows/docs.yml' workflow_dispatch: permissions: contents: read pages: write id-token: write concurrency: group: pages cancel-in-progress: false jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: oven-sh/setup-bun@v2 - run: bun install --frozen-lockfile - run: bun run docs:build - uses: actions/configure-pages@v4 - uses: actions/upload-pages-artifact@v3 with: path: docs/.vitepress/dist deploy: environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} needs: build runs-on: ubuntu-latest steps: - uses: actions/deploy-pages@v4 id: deployment ================================================ FILE: .github/workflows/tests.yml ================================================ name: CI on: push: branches: [master] pull_request: branches: [master] jobs: ci: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: oven-sh/setup-bun@v2 - run: bun install --frozen-lockfile - run: bun run lint - run: bun run build - run: bun run test env: CI: true agent: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: actions/setup-go@v5 with: go-version: '1.22' - run: go build -o /dev/null . working-directory: net-agent ================================================ FILE: .gitignore ================================================ # local data node_modules dist *.local *.log CLAUDE.md docker-compose.yml data/ *.env #enforce package manager and runtime package-lock.json yarn.lock pnpm-lock.yaml # dbs *.db *.db-wal *.db-shm # tests coverage/ # docs docs/.vitepress/dist docs/.vitepress/cache ================================================ FILE: .npmrc ================================================ engine-strict=true ================================================ FILE: .prettierrc ================================================ { "useTabs": true, "tabWidth": 2, "semi": false, "singleQuote": true, "trailingComma": "es5", "printWidth": 120 } ================================================ FILE: Dockerfile ================================================ FROM oven/bun:alpine AS builder WORKDIR /app COPY package.json bun.lock ./ RUN bun install --frozen-lockfile COPY . . RUN bun run build FROM oven/bun:alpine WORKDIR /app COPY --from=builder /app/dist ./dist COPY --from=builder /app/src/server ./src/server COPY --from=builder /app/node_modules ./node_modules COPY --from=builder /app/package.json ./ ENV NODE_ENV=production ENV PORT=3000 ENV DATABASE_PATH=/data/qbitwebui.db ENV SALT_PATH=/data/.salt EXPOSE 3000 VOLUME /data CMD ["bun", "run", "src/server/index.ts"] ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2026 Maciej Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================
logo ### A modern web interface for managing multiple qBittorrent instances Built with [React](https://react.dev/), [Hono](https://hono.dev/), and [Bun](https://bun.sh/) [![GitHub stars](https://img.shields.io/github/stars/Maciejonos/qbitwebui?style=for-the-badge&labelColor=101418&color=9ccbfb)](https://github.com/Maciejonos/qbitwebui/stargazers) [![GitHub License](https://img.shields.io/github/license/Maciejonos/qbitwebui?style=for-the-badge&labelColor=101418&color=abedd5)](https://github.com/Maciejonos/qbitwebui/blob/master/LICENSE) [![GitHub release](https://img.shields.io/github/v/release/Maciejonos/qbitwebui?style=for-the-badge&labelColor=101418&color=b9c8da)](https://github.com/Maciejonos/qbitwebui/releases) [![Docker Build](https://img.shields.io/github/actions/workflow/status/Maciejonos/qbitwebui/docker.yml?style=for-the-badge&labelColor=101418&color=4EB329&label=build)](https://github.com/Maciejonos/qbitwebui/actions) **[Documentation](https://maciejonos.github.io/qbitwebui/)** · **[Docker Examples](https://maciejonos.github.io/qbitwebui/guide/docker)** · **[All Features](https://maciejonos.github.io/qbitwebui/guide/features)**
main

Mobile UI

mobile mobile-detailed
## Features See [features section](https://maciejonos.github.io/qbitwebui/guide/features) for more details. - **Multi-instance** - Manage multiple qBittorrent instances from one dashboard - **Cross seed** - Automatic cross seed directly in qbitwebui. (experimental) - **Instance statistics** - Overview of all instances with status, speeds, torrent counts - **Prowlarr integration** - Search indexers and send torrents directly to qBittorrent - **Real-time monitoring** - Auto-refresh torrent status, speeds, progress - **Customizable columns** - Show/hide columns, drag and drop reorder - **Torrent management** - Add via magnet/file, set priorities, manage trackers/peers - **Organization** - Filter by status, category, tag, or tracker, custom views - **Bulk actions** - Multi-select with context menu, keyboard navigation - **Themes** - Multiple color themes included - **File browser** - Browse and download files from your downloads directory - **RSS management** - Define rules, add RSS feeds, manage folders - **Network agent** - Speedtest, IP check, DNS diagnostics - [setup instructions](https://maciejonos.github.io/qbitwebui/guide/network-agent) ## Docker See [Docker section](https://maciejonos.github.io/qbitwebui/guide/docker) for all setup options. ```yaml services: qbitwebui: image: ghcr.io/maciejonos/qbitwebui:latest ports: - "3000:3000" environment: # Generate your own: openssl rand -hex 32 - ENCRYPTION_KEY=your-secret-key-here # Uncomment to disable login (single-user mode) # - DISABLE_AUTH=true # Uncomment to disable registration (creates default admin account) # - DISABLE_REGISTRATION=true # Uncomment to allow HTTPS with self-signed certificates # - ALLOW_SELF_SIGNED_CERTS=true # Uncomment to enable file browser # - DOWNLOADS_PATH=/downloads volumes: - ./data:/data # Uncomment to enable file browser (read-only: browse & download only) # - /path/to/your/downloads:/downloads:ro # Or mount read-write to enable delete/move/copy/rename # - /path/to/your/downloads:/downloads restart: unless-stopped ``` ## Development ```bash export ENCRYPTION_KEY=$(openssl rand -hex 32) bun install bun run dev ``` ## Tech Stack React 19, TypeScript, Tailwind CSS v4, Vite, TanStack Query, Hono, SQLite, Bun ## Star History [![Star History Chart](https://api.star-history.com/svg?repos=Maciejonos/qbitwebui&type=date&legend=top-left)](https://www.star-history.com/#Maciejonos/qbitwebui&type=date&legend=top-left) ## Credits Big thanks to [cross-seed](https://github.com/cross-seed/cross-seed). A huge chunk of Qbitwebui cross seed implementation is basically taken from cross-seed directly, or ported and slightly adjusted. Qbitwebui is of course in no way associated or endorsed by cross-seed. I highly recommend to check cross-seed out, if you want something very reliable. ## License MIT ================================================ FILE: __tests__/__mocks__/bun-sqlite.ts ================================================ import { vi } from 'vitest' export class Database { exec = vi.fn() run = vi.fn(() => ({ changes: 0, lastInsertRowid: 0 })) query = vi.fn(() => ({ get: vi.fn(), all: vi.fn(() => []), })) close = vi.fn() } ================================================ FILE: __tests__/api/auth.test.ts ================================================ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest' import { register, login, logout, getMe, changePassword } from '../../src/api/auth' // Mock fetch globally const mockFetch = vi.fn() vi.stubGlobal('fetch', mockFetch) describe('auth API', () => { beforeEach(() => { mockFetch.mockReset() }) afterEach(() => { vi.clearAllMocks() }) describe('register', () => { it('sends correct request and returns user on success', async () => { const mockUser = { id: 1, username: 'testuser' } mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockUser), }) const result = await register('testuser', 'password123') expect(mockFetch).toHaveBeenCalledWith('/api/auth/register', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ username: 'testuser', password: 'password123' }), }) expect(result).toEqual(mockUser) }) it('throws error with message from response on failure', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'Username already exists' }), }) await expect(register('testuser', 'password')) .rejects.toThrow('Username already exists') }) it('throws default error when no error message in response', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({}), }) await expect(register('testuser', 'password')) .rejects.toThrow('Registration failed') }) }) describe('login', () => { it('sends correct request and returns user on success', async () => { const mockUser = { id: 1, username: 'testuser' } mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockUser), }) const result = await login('testuser', 'password123') expect(mockFetch).toHaveBeenCalledWith('/api/auth/login', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ username: 'testuser', password: 'password123' }), }) expect(result).toEqual(mockUser) }) it('throws error with message on invalid credentials', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'Invalid credentials' }), }) await expect(login('testuser', 'wrongpassword')) .rejects.toThrow('Invalid credentials') }) }) describe('logout', () => { it('sends POST request to logout endpoint', async () => { mockFetch.mockResolvedValueOnce({ ok: true }) await logout() expect(mockFetch).toHaveBeenCalledWith('/api/auth/logout', { method: 'POST', credentials: 'include', }) }) }) describe('getMe', () => { it('returns user when authenticated', async () => { const mockUser = { id: 1, username: 'testuser' } mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockUser), }) const result = await getMe() expect(mockFetch).toHaveBeenCalledWith('/api/auth/me', { credentials: 'include', }) expect(result).toEqual(mockUser) }) it('returns null when not authenticated', async () => { mockFetch.mockResolvedValueOnce({ ok: false }) const result = await getMe() expect(result).toBeNull() }) }) describe('changePassword', () => { it('sends correct request on success', async () => { mockFetch.mockResolvedValueOnce({ ok: true }) await changePassword('oldpass', 'newpass') expect(mockFetch).toHaveBeenCalledWith('/api/auth/password', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ currentPassword: 'oldpass', newPassword: 'newpass' }), }) }) it('throws error when current password is wrong', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'Current password is incorrect' }), }) await expect(changePassword('wrongpass', 'newpass')) .rejects.toThrow('Current password is incorrect') }) }) }) ================================================ FILE: __tests__/api/crossSeed.test.ts ================================================ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest' import { getCrossSeedConfig, updateCrossSeedConfig, triggerScan, getSchedulerStatus, getInstanceStatus, clearCache, getCacheStats, getSearchHistory, getDecisions, getIndexers, stopScan, getLogs, } from '../../src/api/crossSeed' describe('crossSeed API', () => { const mockFetch = vi.fn() const originalFetch = global.fetch beforeEach(() => { global.fetch = mockFetch mockFetch.mockReset() }) afterEach(() => { global.fetch = originalFetch }) describe('getCrossSeedConfig', () => { it('fetches config for instance', async () => { const mockConfig = { instance_id: 1, enabled: true, interval_hours: 24, dry_run: false, category_suffix: '_cross-seed', tag: 'cross-seed', skip_recheck: false, integration_id: 1, indexer_ids: [1, 2], last_run: null, next_run: null, } mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockConfig), }) const result = await getCrossSeedConfig(1) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/config/1', { credentials: 'include' }) expect(result).toEqual(mockConfig) }) it('throws on error response', async () => { mockFetch.mockResolvedValueOnce({ ok: false, }) await expect(getCrossSeedConfig(1)).rejects.toThrow('Failed to fetch cross-seed config') }) }) describe('updateCrossSeedConfig', () => { it('updates config successfully', async () => { mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve({ success: true }), }) await updateCrossSeedConfig(1, { enabled: true, interval_hours: 12, indexer_ids: [1, 2] }) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/config/1', { method: 'PUT', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ enabled: true, interval_hours: 12, indexer_ids: [1, 2] }), }) }) it('throws on error with message', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'Invalid config' }), }) await expect(updateCrossSeedConfig(1, {})).rejects.toThrow('Invalid config') }) }) describe('triggerScan', () => { it('triggers scan without force', async () => { const mockResult = { instanceId: 1, torrentsTotal: 150, torrentsScanned: 100, torrentsSkipped: 50, matchesFound: 5, torrentsAdded: 3, errors: [], dryRun: false, startedAt: 1704067200000, completedAt: 1704067260000, } mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockResult), }) const result = await triggerScan(1) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/scan/1', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ force: false }), }) expect(result).toEqual(mockResult) }) it('triggers force scan', async () => { mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve({}), }) await triggerScan(1, true) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/scan/1', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ force: true }), }) }) }) describe('getSchedulerStatus', () => { it('fetches all scheduler statuses', async () => { const mockStatuses = [ { instanceId: 1, enabled: true, running: false }, { instanceId: 2, enabled: false, running: false }, ] mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockStatuses), }) const result = await getSchedulerStatus() expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/status', { credentials: 'include' }) expect(result).toEqual(mockStatuses) }) }) describe('getIndexers', () => { it('fetches indexers for instance', async () => { const mockIndexers = [{ id: 1, name: 'Indexer A', protocol: 'torrent', supportsSearch: true, categories: [2000] }] mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockIndexers), }) const result = await getIndexers(1) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/indexers/1', { credentials: 'include' }) expect(result).toEqual(mockIndexers) }) it('throws on error response', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'No Prowlarr integration configured' }), }) await expect(getIndexers(1)).rejects.toThrow('No Prowlarr integration configured') }) }) describe('getInstanceStatus', () => { it('fetches status for specific instance', async () => { const mockStatus = { instanceId: 1, enabled: true, running: true, lastRun: 1704067200, nextRun: 1704153600, } mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockStatus), }) const result = await getInstanceStatus(1) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/status/1', { credentials: 'include' }) expect(result).toEqual(mockStatus) }) }) describe('clearCache', () => { it('clears cache for instance', async () => { mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve({ cacheCleared: 10, outputCleared: 5 }), }) const result = await clearCache(1) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/cache/1/clear', { method: 'POST', credentials: 'include', }) expect(result.cacheCleared).toBe(10) expect(result.outputCleared).toBe(5) }) }) describe('getCacheStats', () => { it('fetches cache statistics', async () => { const mockStats = { cache: { count: 50, totalSize: 1024000 }, output: { count: 10, files: ['file1.torrent', 'file2.torrent'] }, } mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockStats), }) const result = await getCacheStats(1) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/cache/1/stats', { credentials: 'include' }) expect(result).toEqual(mockStats) }) }) describe('getSearchHistory', () => { it('fetches search history with pagination', async () => { const mockHistory = { searchees: [ { id: 1, torrent_name: 'Movie 1', decision_count: 5 }, { id: 2, torrent_name: 'Movie 2', decision_count: 3 }, ], total: 100, } mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockHistory), }) const result = await getSearchHistory(1, 50, 10) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/history/1?limit=50&offset=10', { credentials: 'include', }) expect(result.total).toBe(100) }) it('uses default pagination', async () => { mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve({ searchees: [], total: 0 }), }) await getSearchHistory(1) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/history/1?limit=100&offset=0', { credentials: 'include', }) }) }) describe('getDecisions', () => { it('fetches decisions for searchee', async () => { const mockDecisions = [ { id: 1, decision: 'MATCH', candidate_name: 'Match 1' }, { id: 2, decision: 'SIZE_MISMATCH', candidate_name: 'No Match' }, ] mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockDecisions), }) const result = await getDecisions(1, 5) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/history/1/5/decisions', { credentials: 'include', }) expect(result).toEqual(mockDecisions) }) }) describe('stopScan', () => { it('stops scan for instance', async () => { mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve({ stopped: true }), }) const result = await stopScan(1) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/stop/1', { method: 'POST', credentials: 'include', }) expect(result.stopped).toBe(true) }) it('throws on error response', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'No scan running' }), }) await expect(stopScan(1)).rejects.toThrow('No scan running') }) }) describe('getLogs', () => { it('fetches logs with limit', async () => { const mockLogs = [{ timestamp: '2024-01-01T00:00:00.000Z', level: 'INFO', message: 'Test' }] mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockLogs), }) const result = await getLogs(200) expect(mockFetch).toHaveBeenCalledWith('/api/cross-seed/logs?limit=200', { credentials: 'include' }) expect(result).toEqual(mockLogs) }) }) }) ================================================ FILE: __tests__/api/files.test.ts ================================================ import { describe, it, expect, vi, beforeEach } from 'vitest' import { listFiles, getDownloadUrl, checkWritable, deleteFiles, moveFiles, copyFiles, renameFile, } from '../../src/api/files' const mockFetch = vi.fn() vi.stubGlobal('fetch', mockFetch) describe('files API', () => { beforeEach(() => { mockFetch.mockReset() }) describe('listFiles', () => { it('fetches files with encoded path', async () => { const mockFiles = [ { name: 'file1.txt', size: 1024, isDirectory: false, modified: 1234567890 }, { name: 'folder', size: 0, isDirectory: true, modified: 1234567900 }, ] mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockFiles), }) const result = await listFiles('/downloads/movies') expect(mockFetch).toHaveBeenCalledWith( '/api/files?path=%2Fdownloads%2Fmovies', { credentials: 'include' } ) expect(result).toEqual(mockFiles) }) it('handles special characters in path', async () => { mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve([]), }) await listFiles('/downloads/My Movies & Shows') expect(mockFetch).toHaveBeenCalledWith( expect.stringContaining('My%20Movies%20%26%20Shows'), expect.anything() ) }) it('throws error with message from server', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'Path not found' }), }) await expect(listFiles('/nonexistent')).rejects.toThrow('Path not found') }) }) describe('getDownloadUrl', () => { it('returns encoded download URL', () => { const url = getDownloadUrl('/downloads/movie.mkv') expect(url).toBe('/api/files/download?path=%2Fdownloads%2Fmovie.mkv') }) it('handles special characters', () => { const url = getDownloadUrl('/downloads/Movie (2024) [1080p].mkv') expect(url).toContain('Movie%20') expect(url).toContain('%5B1080p%5D') }) }) describe('checkWritable', () => { it('returns true when writable', async () => { mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve({ writable: true }), }) const result = await checkWritable() expect(result).toBe(true) }) it('returns false when not writable', async () => { mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve({ writable: false }), }) const result = await checkWritable() expect(result).toBe(false) }) it('returns false on request failure', async () => { mockFetch.mockResolvedValueOnce({ ok: false }) const result = await checkWritable() expect(result).toBe(false) }) }) describe('deleteFiles', () => { it('sends delete request with paths', async () => { mockFetch.mockResolvedValueOnce({ ok: true }) await deleteFiles(['/downloads/file1.txt', '/downloads/file2.txt']) expect(mockFetch).toHaveBeenCalledWith('/api/files/delete', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ paths: ['/downloads/file1.txt', '/downloads/file2.txt'] }), }) }) it('throws error on failure', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'Permission denied' }), }) await expect(deleteFiles(['/protected/file'])).rejects.toThrow('Permission denied') }) }) describe('moveFiles', () => { it('sends move request with paths and destination', async () => { mockFetch.mockResolvedValueOnce({ ok: true }) await moveFiles(['/downloads/file.txt'], '/archive') expect(mockFetch).toHaveBeenCalledWith('/api/files/move', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ paths: ['/downloads/file.txt'], destination: '/archive' }), }) }) it('throws error on move failure', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'Destination not found' }), }) await expect(moveFiles(['/file'], '/nonexistent')).rejects.toThrow('Destination not found') }) }) describe('copyFiles', () => { it('sends copy request with paths and destination', async () => { mockFetch.mockResolvedValueOnce({ ok: true }) await copyFiles(['/downloads/file.txt'], '/backup') expect(mockFetch).toHaveBeenCalledWith('/api/files/copy', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ paths: ['/downloads/file.txt'], destination: '/backup' }), }) }) }) describe('renameFile', () => { it('sends rename request', async () => { mockFetch.mockResolvedValueOnce({ ok: true }) await renameFile('/downloads/old.txt', 'new.txt') expect(mockFetch).toHaveBeenCalledWith('/api/files/rename', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ path: '/downloads/old.txt', newName: 'new.txt' }), }) }) it('throws error on rename failure', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'File already exists' }), }) await expect(renameFile('/file', 'existing')).rejects.toThrow('File already exists') }) }) }) ================================================ FILE: __tests__/api/instances.test.ts ================================================ import { describe, it, expect, vi, beforeEach } from 'vitest' import { getInstances, createInstance, updateInstance, deleteInstance, type CreateInstanceData, } from '../../src/api/instances' const mockFetch = vi.fn() vi.stubGlobal('fetch', mockFetch) describe('instances API', () => { beforeEach(() => { mockFetch.mockReset() }) describe('getInstances', () => { it('fetches and returns instance list', async () => { const mockInstances = [ { id: 1, label: 'Home', url: 'http://localhost:8080', qbt_username: 'admin', skip_auth: false, created_at: 1234567890 }, { id: 2, label: 'Server', url: 'http://192.168.1.100:8080', qbt_username: null, skip_auth: true, created_at: 1234567900 }, ] mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockInstances), }) const result = await getInstances() expect(mockFetch).toHaveBeenCalledWith('/api/instances', { credentials: 'include', }) expect(result).toEqual(mockInstances) expect(result).toHaveLength(2) }) it('throws error on failure', async () => { mockFetch.mockResolvedValueOnce({ ok: false }) await expect(getInstances()).rejects.toThrow('Failed to fetch instances') }) }) describe('createInstance', () => { it('creates instance with all fields', async () => { const createData: CreateInstanceData = { label: 'New Instance', url: 'http://localhost:9090', qbt_username: 'admin', qbt_password: 'secret', skip_auth: false, } const mockInstance = { id: 3, ...createData, qbt_password: undefined, created_at: Date.now(), } mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockInstance), }) const result = await createInstance(createData) expect(mockFetch).toHaveBeenCalledWith('/api/instances', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify(createData), }) expect(result.id).toBe(3) expect(result.label).toBe('New Instance') }) it('creates instance with minimal fields', async () => { const createData: CreateInstanceData = { label: 'Minimal', url: 'http://localhost:8080', } mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve({ id: 1, ...createData, skip_auth: false, created_at: 0 }), }) await createInstance(createData) expect(mockFetch).toHaveBeenCalled() }) it('throws error with message from server', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'Instance with this label already exists' }), }) await expect(createInstance({ label: 'Dup', url: 'http://test' })) .rejects.toThrow('Instance with this label already exists') }) }) describe('updateInstance', () => { it('updates instance with partial data', async () => { const updateData = { label: 'Updated Label' } const mockUpdated = { id: 1, label: 'Updated Label', url: 'http://localhost:8080', qbt_username: 'admin', skip_auth: false, created_at: 1234567890, } mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockUpdated), }) const result = await updateInstance(1, updateData) expect(mockFetch).toHaveBeenCalledWith('/api/instances/1', { method: 'PUT', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify(updateData), }) expect(result.label).toBe('Updated Label') }) it('throws error on update failure', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'Instance not found' }), }) await expect(updateInstance(999, { label: 'Test' })) .rejects.toThrow('Instance not found') }) }) describe('deleteInstance', () => { it('deletes instance by id', async () => { mockFetch.mockResolvedValueOnce({ ok: true }) await deleteInstance(5) expect(mockFetch).toHaveBeenCalledWith('/api/instances/5', { method: 'DELETE', credentials: 'include', }) }) it('throws error on deletion failure', async () => { mockFetch.mockResolvedValueOnce({ ok: false }) await expect(deleteInstance(999)).rejects.toThrow('Failed to delete instance') }) }) }) ================================================ FILE: __tests__/api/integrations.test.ts ================================================ import { describe, it, expect, vi, beforeEach } from 'vitest' import { getIntegrations, createIntegration, deleteIntegration, testIntegrationConnection, getIndexers, search, grabRelease, } from '../../src/api/integrations' const mockFetch = vi.fn() vi.stubGlobal('fetch', mockFetch) describe('integrations API', () => { beforeEach(() => { mockFetch.mockReset() }) describe('getIntegrations', () => { it('fetches integrations list', async () => { const mockIntegrations = [ { id: 1, type: 'prowlarr', label: 'My Prowlarr', url: 'http://localhost:9696', created_at: 123456 }, ] mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockIntegrations), }) const result = await getIntegrations() expect(mockFetch).toHaveBeenCalledWith('/api/integrations', { credentials: 'include' }) expect(result).toEqual(mockIntegrations) }) it('throws on failure', async () => { mockFetch.mockResolvedValueOnce({ ok: false }) await expect(getIntegrations()).rejects.toThrow('Failed to fetch integrations') }) }) describe('createIntegration', () => { it('creates integration with data', async () => { const createData = { type: 'prowlarr', label: 'My Prowlarr', url: 'http://localhost:9696', api_key: 'secret123', } const mockResult = { id: 1, ...createData, created_at: 123456 } mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockResult), }) const result = await createIntegration(createData) expect(mockFetch).toHaveBeenCalledWith('/api/integrations', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify(createData), }) expect(result.id).toBe(1) }) it('throws error with message from server', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'Invalid API key' }), }) await expect(createIntegration({ type: 'prowlarr', label: 'Test', url: 'http://test', api_key: 'invalid', })).rejects.toThrow('Invalid API key') }) }) describe('deleteIntegration', () => { it('deletes integration by id', async () => { mockFetch.mockResolvedValueOnce({ ok: true }) await deleteIntegration(5) expect(mockFetch).toHaveBeenCalledWith('/api/integrations/5', { method: 'DELETE', credentials: 'include', }) }) it('throws on failure', async () => { mockFetch.mockResolvedValueOnce({ ok: false }) await expect(deleteIntegration(999)).rejects.toThrow('Failed to delete integration') }) }) describe('testIntegrationConnection', () => { it('returns success with version', async () => { mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve({ success: true, version: '1.0.0' }), }) const result = await testIntegrationConnection('http://localhost:9696', 'apikey123') expect(mockFetch).toHaveBeenCalledWith('/api/integrations/test', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ url: 'http://localhost:9696', api_key: 'apikey123' }), }) expect(result.success).toBe(true) expect(result.version).toBe('1.0.0') }) it('returns failure with error', async () => { mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve({ success: false, error: 'Connection refused' }), }) const result = await testIntegrationConnection('http://invalid', 'apikey') expect(result.success).toBe(false) expect(result.error).toBe('Connection refused') }) }) describe('getIndexers', () => { it('fetches indexers for integration', async () => { const mockIndexers = [ { id: 1, name: 'Indexer 1', enable: true, protocol: 'torrent' }, { id: 2, name: 'Indexer 2', enable: false, protocol: 'usenet' }, ] mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockIndexers), }) const result = await getIndexers(1) expect(mockFetch).toHaveBeenCalledWith('/api/integrations/1/indexers', { credentials: 'include' }) expect(result).toHaveLength(2) }) it('throws on failure', async () => { mockFetch.mockResolvedValueOnce({ ok: false }) await expect(getIndexers(1)).rejects.toThrow('Failed to fetch indexers') }) }) describe('search', () => { it('searches with query only', async () => { const mockResults = [ { guid: '123', indexerId: 1, indexer: 'Test', title: 'Result', size: 1000, publishDate: '2024-01-01', categories: [] }, ] mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve(mockResults), }) const result = await search(1, 'test query') expect(mockFetch).toHaveBeenCalledWith( expect.stringContaining('/api/integrations/1/search?query=test+query'), expect.anything() ) expect(result).toEqual(mockResults) }) it('searches with options', async () => { mockFetch.mockResolvedValueOnce({ ok: true, json: () => Promise.resolve([]), }) await search(1, 'test', { indexerIds: '1,2', categories: '5000', type: 'movie' }) expect(mockFetch).toHaveBeenCalledWith( expect.stringContaining('indexerIds=1%2C2'), expect.anything() ) }) it('throws error with message from server', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'Rate limited' }), }) await expect(search(1, 'test')).rejects.toThrow('Rate limited') }) }) describe('grabRelease', () => { it('grabs release with download URL', async () => { mockFetch.mockResolvedValueOnce({ ok: true }) await grabRelease(1, { guid: 'abc123', indexerId: 1, downloadUrl: 'http://example.com/download', }, 5, { savepath: '/downloads/complete', downloadPath: '/downloads/incomplete' }) expect(mockFetch).toHaveBeenCalledWith('/api/integrations/1/grab', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: expect.stringMatching(/"instanceId":5.*"savepath":"\/downloads\/complete".*"downloadPath":"\/downloads\/incomplete"|"instanceId":5.*"downloadPath":"\/downloads\/incomplete".*"savepath":"\/downloads\/complete"/), }) }) it('grabs release with magnet URL', async () => { mockFetch.mockResolvedValueOnce({ ok: true }) await grabRelease(1, { guid: 'abc123', indexerId: 1, magnetUrl: 'magnet:?xt=urn:btih:abc123', }, 5) expect(mockFetch).toHaveBeenCalled() }) it('throws error on failure', async () => { mockFetch.mockResolvedValueOnce({ ok: false, json: () => Promise.resolve({ error: 'Indexer offline' }), }) await expect(grabRelease(1, { guid: '123', indexerId: 1 }, 5)) .rejects.toThrow('Indexer offline') }) }) }) ================================================ FILE: __tests__/api/qbittorrent.test.ts ================================================ import { describe, it, expect, vi, beforeEach } from 'vitest' import { getTorrents, getTransferInfo, getSyncMaindata, stopTorrents, startTorrents, deleteTorrents, getCategories, getTags, createTags, deleteTags, setCategory, addTags, removeTags, getTorrentProperties, getTorrentTrackers, getTorrentFiles, renameTorrent, setTorrentLocation, setTorrentDownloadPath, addTrackers, removeTrackers, getPreferences, getLog, getPeerLog, getSpeedLimitsMode, toggleSpeedLimitsMode, createCategory, editCategory, removeCategories, setFilePriority, getRSSItems, getRSSRules, } from '../../src/api/qbittorrent' const mockFetch = vi.fn() vi.stubGlobal('fetch', mockFetch) describe('qBittorrent API', () => { const instanceId = 1 beforeEach(() => { mockFetch.mockReset() }) // Helper to create successful JSON response const jsonResponse = (data: unknown) => ({ ok: true, text: () => Promise.resolve(JSON.stringify(data)), }) describe('getTorrents', () => { it('fetches torrents without filters', async () => { const mockTorrents = [{ hash: 'abc123', name: 'Test Torrent' }] mockFetch.mockResolvedValueOnce(jsonResponse(mockTorrents)) const result = await getTorrents(instanceId) expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/info', expect.objectContaining({ credentials: 'include' }) ) expect(result).toEqual(mockTorrents) }) it('fetches torrents with filter options', async () => { mockFetch.mockResolvedValueOnce(jsonResponse([])) await getTorrents(instanceId, { filter: 'downloading', category: 'movies', tag: 'hd' }) expect(mockFetch).toHaveBeenCalledWith( expect.stringContaining('filter=downloading'), expect.anything() ) expect(mockFetch).toHaveBeenCalledWith( expect.stringContaining('category=movies'), expect.anything() ) expect(mockFetch).toHaveBeenCalledWith( expect.stringContaining('tag=hd'), expect.anything() ) }) it('skips filter=all in request', async () => { mockFetch.mockResolvedValueOnce(jsonResponse([])) await getTorrents(instanceId, { filter: 'all' }) expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/info', expect.anything() ) }) }) describe('getTransferInfo', () => { it('fetches transfer info', async () => { const mockInfo = { dl_info_speed: 1024, up_info_speed: 512 } mockFetch.mockResolvedValueOnce(jsonResponse(mockInfo)) const result = await getTransferInfo(instanceId) expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/transfer/info', expect.anything() ) expect(result).toEqual(mockInfo) }) }) describe('getSyncMaindata', () => { it('fetches sync maindata', async () => { const mockData = { rid: 1, torrents: {} } mockFetch.mockResolvedValueOnce(jsonResponse(mockData)) const result = await getSyncMaindata(instanceId) expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/sync/maindata?rid=0', expect.anything() ) expect(result).toEqual(mockData) }) }) describe('torrent actions', () => { it('stops torrents', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await stopTorrents(instanceId, ['hash1', 'hash2']) expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/stop', expect.objectContaining({ method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, }) ) }) it('starts torrents', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await startTorrents(instanceId, ['hash1']) expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/start', expect.anything() ) }) it('deletes torrents without files', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await deleteTorrents(instanceId, ['hash1'], false) const call = mockFetch.mock.calls[0] expect(call[0]).toBe('/api/instances/1/qbt/v2/torrents/delete') expect(call[1].body.get('deleteFiles')).toBe('false') }) it('deletes torrents with files', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await deleteTorrents(instanceId, ['hash1'], true) const call = mockFetch.mock.calls[0] expect(call[1].body.get('deleteFiles')).toBe('true') }) it('changes torrent save path', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await setTorrentLocation(instanceId, ['hash1', 'hash2'], '/downloads/new-path') const call = mockFetch.mock.calls[0] expect(call[0]).toBe('/api/instances/1/qbt/v2/torrents/setLocation') expect(call[1].body.get('hashes')).toBe('hash1|hash2') expect(call[1].body.get('location')).toBe('/downloads/new-path') }) it('changes torrent download path', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await setTorrentDownloadPath(instanceId, ['hash1'], '/downloads/incomplete') const call = mockFetch.mock.calls[0] expect(call[0]).toBe('/api/instances/1/qbt/v2/torrents/setDownloadPath') expect(call[1].body.get('hashes')).toBe('hash1') expect(call[1].body.get('downloadPath')).toBe('/downloads/incomplete') }) }) describe('categories', () => { it('gets categories', async () => { const mockCategories = { movies: { name: 'movies', savePath: '/downloads/movies' } } mockFetch.mockResolvedValueOnce(jsonResponse(mockCategories)) const result = await getCategories(instanceId) expect(result).toEqual(mockCategories) }) it('creates category with save path', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await createCategory(instanceId, 'movies', '/downloads/movies') expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/createCategory', expect.anything() ) }) it('edits category', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await editCategory(instanceId, 'movies', '/new/path') expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/editCategory', expect.anything() ) }) it('removes categories', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await removeCategories(instanceId, ['movies', 'tv']) expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/removeCategories', expect.anything() ) }) it('sets category on torrents', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await setCategory(instanceId, ['hash1', 'hash2'], 'movies') expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/setCategory', expect.anything() ) }) }) describe('tags', () => { it('gets tags', async () => { mockFetch.mockResolvedValueOnce(jsonResponse(['tag1', 'tag2'])) const result = await getTags(instanceId) expect(result).toEqual(['tag1', 'tag2']) }) it('creates tags', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await createTags(instanceId, 'newtag') expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/createTags', expect.anything() ) }) it('deletes tags', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await deleteTags(instanceId, 'oldtag') expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/deleteTags', expect.anything() ) }) it('adds tags to torrents', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await addTags(instanceId, ['hash1'], 'tag1,tag2') expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/addTags', expect.anything() ) }) it('removes tags from torrents', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await removeTags(instanceId, ['hash1'], 'tag1') expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/removeTags', expect.anything() ) }) }) describe('torrent details', () => { it('gets torrent properties', async () => { const mockProps = { save_path: '/downloads', total_size: 1000000 } mockFetch.mockResolvedValueOnce(jsonResponse(mockProps)) const result = await getTorrentProperties(instanceId, 'abc123') expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/properties?hash=abc123', expect.anything() ) expect(result).toEqual(mockProps) }) it('gets torrent trackers', async () => { const mockTrackers = [{ url: 'http://tracker.example.com', status: 2 }] mockFetch.mockResolvedValueOnce(jsonResponse(mockTrackers)) const result = await getTorrentTrackers(instanceId, 'abc123') expect(result).toEqual(mockTrackers) }) it('gets torrent files', async () => { const mockFiles = [{ name: 'file.mkv', size: 1000000 }] mockFetch.mockResolvedValueOnce(jsonResponse(mockFiles)) const result = await getTorrentFiles(instanceId, 'abc123') expect(result).toEqual(mockFiles) }) }) describe('torrent management', () => { it('renames torrent', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await renameTorrent(instanceId, 'abc123', 'New Name') expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/rename', expect.anything() ) }) it('adds trackers', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await addTrackers(instanceId, 'abc123', ['http://tracker1.com', 'http://tracker2.com']) expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/addTrackers', expect.anything() ) }) it('removes trackers', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await removeTrackers(instanceId, 'abc123', ['http://tracker1.com']) expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/removeTrackers', expect.anything() ) }) it('sets file priority', async () => { mockFetch.mockResolvedValueOnce(jsonResponse({})) await setFilePriority(instanceId, 'abc123', [0, 1, 2], 7) expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/torrents/filePrio', expect.anything() ) }) }) describe('preferences', () => { it('gets preferences', async () => { const mockPrefs = { save_path: '/downloads', listen_port: 6881 } mockFetch.mockResolvedValueOnce(jsonResponse(mockPrefs)) const result = await getPreferences(instanceId) expect(result).toEqual(mockPrefs) }) }) describe('speed limits', () => { it('gets speed limits mode', async () => { mockFetch.mockResolvedValueOnce({ ok: true, text: () => Promise.resolve('1') }) const result = await getSpeedLimitsMode(instanceId) expect(result).toBe(1) }) it('toggles speed limits mode', async () => { mockFetch.mockResolvedValueOnce({ ok: true }) await toggleSpeedLimitsMode(instanceId) expect(mockFetch).toHaveBeenCalledWith( '/api/instances/1/qbt/v2/transfer/toggleSpeedLimitsMode', expect.objectContaining({ method: 'POST' }) ) }) }) describe('logs', () => { it('gets log entries', async () => { const mockLogs = [{ id: 1, message: 'Test log', timestamp: 123456, type: 1 }] mockFetch.mockResolvedValueOnce(jsonResponse(mockLogs)) const result = await getLog(instanceId) expect(result).toEqual(mockLogs) }) it('gets log with filter options', async () => { mockFetch.mockResolvedValueOnce(jsonResponse([])) await getLog(instanceId, { normal: true, warning: true, critical: true, lastKnownId: 10 }) expect(mockFetch).toHaveBeenCalledWith( expect.stringContaining('normal=true'), expect.anything() ) expect(mockFetch).toHaveBeenCalledWith( expect.stringContaining('last_known_id=10'), expect.anything() ) }) it('gets peer log', async () => { const mockPeerLogs = [{ id: 1, ip: '192.168.1.1', timestamp: 123456, blocked: false, reason: '' }] mockFetch.mockResolvedValueOnce(jsonResponse(mockPeerLogs)) const result = await getPeerLog(instanceId) expect(result).toEqual(mockPeerLogs) }) }) describe('RSS', () => { it('gets RSS items', async () => { const mockItems = { 'Feed 1': { url: 'http://feed.com/rss' } } mockFetch.mockResolvedValueOnce(jsonResponse(mockItems)) const result = await getRSSItems(instanceId) expect(result).toEqual(mockItems) }) it('gets RSS rules', async () => { const mockRules = { 'Rule 1': { enabled: true, mustContain: 'test' } } mockFetch.mockResolvedValueOnce(jsonResponse(mockRules)) const result = await getRSSRules(instanceId) expect(result).toEqual(mockRules) }) }) describe('error handling', () => { it('throws on non-ok response', async () => { mockFetch.mockResolvedValueOnce({ ok: false, status: 500, text: () => Promise.resolve('Error') }) await expect(getTorrents(instanceId)).rejects.toThrow('API error: 500') }) it('throws on empty response', async () => { mockFetch.mockResolvedValueOnce({ ok: true, text: () => Promise.resolve('') }) await expect(getTorrents(instanceId)).rejects.toThrow('Empty response from API') }) it('throws on invalid JSON', async () => { mockFetch.mockResolvedValueOnce({ ok: true, text: () => Promise.resolve('not json') }) await expect(getTorrents(instanceId)).rejects.toThrow('Invalid JSON response') }) }) }) ================================================ FILE: __tests__/hooks/useInstance.test.tsx ================================================ import { describe, it, expect, vi } from 'vitest' import { renderHook } from '@testing-library/react' import React from 'react' import { useInstance } from '../../src/hooks/useInstance' import { InstanceContext } from '../../src/contexts/instanceContext' import type { Instance } from '../../src/api/instances' describe('useInstance', () => { const mockInstance: Instance = { id: 1, label: 'Test Instance', url: 'http://localhost:8080', qbt_username: 'admin', skip_auth: false, created_at: 1234567890, } it('returns instance from context', () => { const wrapper = ({ children }: { children: React.ReactNode }) => React.createElement( InstanceContext.Provider, { value: { instance: mockInstance, setInstance: vi.fn() } }, children ) const { result } = renderHook(() => useInstance(), { wrapper }) expect(result.current).toEqual(mockInstance) expect(result.current.id).toBe(1) expect(result.current.label).toBe('Test Instance') }) it('throws error when used outside provider', () => { // Suppress console.error for this test const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => { }) expect(() => { renderHook(() => useInstance()) }).toThrow('useInstance must be used within InstanceProvider') consoleSpy.mockRestore() }) }) ================================================ FILE: __tests__/hooks/usePagination.test.tsx ================================================ import { describe, it, expect, vi } from 'vitest' import { renderHook } from '@testing-library/react' import React from 'react' import { usePagination } from '../../src/hooks/usePagination' import { PaginationContext } from '../../src/contexts/paginationContext' describe('usePagination', () => { const mockPaginationContext = { page: 1, perPage: 50, setPage: vi.fn(), setPerPage: vi.fn(), } it('returns pagination context values', () => { const wrapper = ({ children }: { children: React.ReactNode }) => React.createElement( PaginationContext.Provider, { value: mockPaginationContext }, children ) const { result } = renderHook(() => usePagination(), { wrapper }) expect(result.current.page).toBe(1) expect(result.current.perPage).toBe(50) expect(typeof result.current.setPage).toBe('function') expect(typeof result.current.setPerPage).toBe('function') }) it('throws error when used outside provider', () => { const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => { }) expect(() => { renderHook(() => usePagination()) }).toThrow('usePagination must be used within PaginationProvider') consoleSpy.mockRestore() }) }) ================================================ FILE: __tests__/reporter.ts ================================================ import type { Reporter, Vitest } from "vitest/node"; import pc from "picocolors"; import path from "node:path"; type TaskState = "pass" | "fail" | "skip" | "todo" | "pending" | "unknown"; type AnyTask = { id?: string; type?: "suite" | "test"; name?: string; mode?: "run" | "skip" | "only" | "todo"; tasks?: AnyTask[]; result?: { state?: unknown; duration?: number; errors?: unknown[]; }; }; type AnyFile = { filepath?: string; name?: string; file?: string; tasks?: AnyTask[]; }; type TaskResultPack = { id: string; result?: { state?: unknown; duration?: number; errors?: unknown[]; }; }; function normalisePath(p: string) { return p.replaceAll("\\", "/"); } function toRelative(p: string) { try { return path.relative(process.cwd(), p); } catch { return p; } } function safeBasename(p: string) { try { return path.basename(p); } catch { return p; } } function safeDirname(p: string) { try { return path.dirname(p); } catch { return ""; } } function normaliseState(rawState: unknown, mode: unknown): TaskState { if (mode === "skip") return "skip"; if (mode === "todo") return "todo"; const s = String(rawState ?? "").toLowerCase(); if (s === "pass" || s === "passed" || s === "success") return "pass"; if (s === "fail" || s === "failed") return "fail"; if (s === "skip" || s === "skipped") return "skip"; if (s === "todo") return "todo"; if (!s) return "pending"; return "unknown"; } function iconFor(state: TaskState) { switch (state) { case "pass": return pc.green("✔"); case "fail": return pc.red("✖"); case "skip": return pc.yellow("↷"); case "todo": return pc.yellow("…"); case "pending": return pc.gray("·"); default: return pc.gray("?"); } } function colourName(state: TaskState, text: string) { switch (state) { case "pass": return pc.white(text); case "fail": return pc.red(text); case "skip": case "todo": return pc.yellow(text); default: return pc.gray(text); } } function formatDuration(ms?: number) { if (!ms || ms <= 0) return ""; if (ms < 1000) return pc.dim(` ${Math.round(ms)}ms`); return pc.dim(` ${(ms / 1000).toFixed(2)}s`); } export default class PrettyReporter implements Reporter { private ctx: Vitest | undefined; private startMs = 0; private indexed = false; private totalTests = 0; private pass = 0; private fail = 0; private skip = 0; private todo = 0; private completed = new Set(); private lastProgressRender = 0; onInit(ctx: Vitest) { try { this.ctx = ctx; this.startMs = Date.now(); process.stdout.write(pc.cyan(pc.bold("\n QBITWEBUI TEST SUITE \n"))); process.stdout.write(pc.gray(" Running tests…\n\n")); } catch (e) { console.error("Reporter init error:", e); } } onTaskUpdate(packs: TaskResultPack[]) { try { this.ensureIndexedFromState(); for (const pack of packs ?? []) { if (!pack?.id) continue; if (!pack.result) continue; const state = normaliseState(pack.result.state, undefined); const terminal = state === "pass" || state === "fail" || state === "skip" || state === "todo"; if (!terminal) continue; if (this.completed.has(pack.id)) continue; this.completed.add(pack.id); if (state === "pass") this.pass += 1; if (state === "fail") this.fail += 1; if (state === "skip") this.skip += 1; if (state === "todo") this.todo += 1; } // Progress bar (throttled) const now = Date.now(); if (now - this.lastProgressRender < 1) return; this.lastProgressRender = now; this.renderProgressLine(); } catch (e) { console.error("Reporter update error:", e); } } onTestRunEnd() { try { // Clear the progress line process.stdout.write("\r\x1b[2K\n"); this.printReportFromState(); } catch (e) { console.error("Reporter error:", e); } } private getStateFiles(): AnyFile[] { const ctx = this.ctx as { state?: { getFiles?: () => unknown; files?: unknown } } | undefined; const state = ctx?.state; const filesFromGetter = state?.getFiles?.(); if (Array.isArray(filesFromGetter)) return filesFromGetter; const filesFromProp = state?.files; if (Array.isArray(filesFromProp)) return filesFromProp; return []; } private ensureIndexedFromState() { if (this.indexed) return; const files = this.getStateFiles(); if (!files.length) return; let total = 0; const walk = (t: AnyTask) => { if (!t) return; if (t.type === "test") total += 1; if (Array.isArray(t.tasks)) t.tasks.forEach(walk); }; files.forEach((f) => { if (Array.isArray(f.tasks)) f.tasks.forEach(walk); }); this.totalTests = total; this.indexed = true; } private renderProgressLine() { const total = Math.max(this.totalTests, 1); const done = Math.min(this.completed.size, total); const pct = this.totalTests ? Math.round((done / total) * 100) : 0; const width = 28; const filled = Math.round((pct / 100) * width); const bar = pc.green("█".repeat(filled)) + pc.gray("░".repeat(width - filled)); const elapsed = (Date.now() - this.startMs) / 1000; const line = [ pc.dim(" Progress "), "[", bar, "] ", pc.white(`${pct}%`), pc.dim(` (${done}/${this.totalTests})`), pc.dim(" | "), pc.green(`✔ ${this.pass}`), pc.dim(" "), pc.red(`✖ ${this.fail}`), pc.dim(" "), pc.yellow(`↷ ${this.skip}`), this.todo ? pc.dim(" ") : "", this.todo ? pc.yellow(`… ${this.todo}`) : "", pc.dim(` | ${elapsed.toFixed(1)}s`), ].join(""); process.stdout.write("\r\x1b[2K" + line); } private printReportFromState() { const files = this.getStateFiles(); const endMs = Date.now(); const duration = ((endMs - this.startMs) / 1000).toFixed(2); // Recompute final totals from the actual state (authoritative) const totals = this.computeTotals(files); this.pass = totals.pass; this.fail = totals.fail; this.skip = totals.skip; this.todo = totals.todo; this.totalTests = totals.total; process.stdout.write(pc.cyan(pc.bold("\n RESULTS \n"))); // Group by directory const grouped = new Map(); for (const file of files) { const raw = file.filepath ?? file.file ?? file.name ?? ""; const rel = normalisePath(toRelative(raw)); const dir = normalisePath(safeDirname(rel)) || "."; const arr = grouped.get(dir) ?? []; arr.push(file); grouped.set(dir, arr); } const dirs = [...grouped.keys()].sort((a, b) => a.localeCompare(b)); for (const dir of dirs) { const niceDir = dir === "." ? "__tests__" : dir; process.stdout.write(pc.magenta(pc.bold(`\n 📁 ${niceDir}\n`))); const dirFiles = grouped.get(dir) ?? []; dirFiles.sort((a, b) => { const ap = a.filepath ?? a.file ?? a.name ?? ""; const bp = b.filepath ?? b.file ?? b.name ?? ""; return ap.localeCompare(bp); }); for (const file of dirFiles) { const raw = file.filepath ?? file.file ?? file.name ?? ""; const rel = normalisePath(toRelative(raw)); const fname = safeBasename(rel); const stats = this.computeFileTotals(file); const badge = stats.fail > 0 ? pc.red(` ${stats.fail} failed`) : pc.green(` ${stats.pass} passed`); const extrasParts: string[] = []; if (stats.skip > 0) extrasParts.push(pc.yellow(`${stats.skip} skipped`)); if (stats.todo > 0) extrasParts.push(pc.yellow(`${stats.todo} todo`)); const extras = extrasParts.length ? pc.dim(` (${extrasParts.join(", ")})`) : ""; process.stdout.write( ` ${pc.dim(fname)}${pc.dim(" ")}${badge}${extras}${formatDuration( stats.durationMs, )}\n`, ); if (Array.isArray(file.tasks) && file.tasks.length) { for (const t of file.tasks) this.printTaskTree(t, 4); } else { process.stdout.write(pc.dim(" (no tasks collected)\n")); } } } const done = this.pass + this.fail + this.skip + this.todo; const pct = this.totalTests ? Math.round((done / this.totalTests) * 100) : 0; process.stdout.write(pc.gray("\n ─────────────────────────────────────────────\n")); process.stdout.write( ` Summary: ${pc.green(`✔ ${this.pass}`)} ${pc.red( `✖ ${this.fail}`, )} ${pc.yellow(`↷ ${this.skip}`)}${this.todo ? ` ${pc.yellow(`… ${this.todo}`)}` : "" }\n`, ); process.stdout.write( ` Progress: ${pct}% (${done}/${this.totalTests})\n`, ); process.stdout.write(` Time: ${duration}s\n`); if (this.fail > 0) { process.stdout.write( pc.red( `\n ${this.fail} failing test(s). Check your code.\n\n`, ), ); process.exitCode = 1; } else { process.stdout.write( pc.green(`\n All tests passed. \n\n`), ); process.exitCode = 0; } } private computeTotals(files: AnyFile[]) { let pass = 0; let fail = 0; let skip = 0; let todo = 0; let total = 0; const walk = (t: AnyTask) => { if (!t) return; if (t.type === "test") { total += 1; const st = normaliseState(t.result?.state, t.mode); if (st === "pass") pass += 1; else if (st === "fail") fail += 1; else if (st === "skip") skip += 1; else if (st === "todo") todo += 1; } if (Array.isArray(t.tasks)) t.tasks.forEach(walk); }; files.forEach((f) => { if (Array.isArray(f.tasks)) f.tasks.forEach(walk); }); return { pass, fail, skip, todo, total }; } private computeFileTotals(file: AnyFile) { let pass = 0; let fail = 0; let skip = 0; let todo = 0; let durationMs = 0; const walk = (t: AnyTask) => { if (!t) return; if (t.type === "test") { const st = normaliseState(t.result?.state, t.mode); if (st === "pass") pass += 1; else if (st === "fail") fail += 1; else if (st === "skip") skip += 1; else if (st === "todo") todo += 1; if (typeof t.result?.duration === "number") { durationMs += t.result.duration; } } if (Array.isArray(t.tasks)) t.tasks.forEach(walk); }; if (Array.isArray(file.tasks)) file.tasks.forEach(walk); return { pass, fail, skip, todo, durationMs }; } private printTaskTree(task: AnyTask, indent: number) { const pad = " ".repeat(indent); if (task.type === "suite") { if (task.name && task.name.trim()) { process.stdout.write(`${pad}${pc.blue(pc.bold(task.name))}\n`); } if (Array.isArray(task.tasks)) { for (const child of task.tasks) this.printTaskTree(child, indent + 2); } return; } if (task.type === "test") { const state = normaliseState(task.result?.state, task.mode); const icon = iconFor(state); const name = colourName(state, task.name ?? "(unnamed test)"); const time = formatDuration(task.result?.duration); process.stdout.write(`${pad}${icon} ${name}${time}\n`); if (state === "fail" && Array.isArray(task.result?.errors)) { for (const err of task.result.errors) { let msg: string; if (err instanceof Error) { msg = err.message.split("\n")[0]; } else if (typeof err === "string") { msg = err.split("\n")[0]; } else if (err && typeof err === "object") { try { msg = JSON.stringify(err).slice(0, 100); } catch { msg = "[object]"; } } else { msg = String(err ?? "Unknown error").split("\n")[0]; } process.stdout.write(`${pad} ${pc.red("└─ ")}${pc.dim(msg)}\n`); } } } } } ================================================ FILE: __tests__/server/crossSeedCache.test.ts ================================================ import { describe, it, expect, beforeEach, afterEach, beforeAll, afterAll } from 'vitest' import { existsSync, rmSync } from 'fs' import { tmpdir } from 'os' import { join } from 'path' const TEST_DATA_PATH = join(tmpdir(), `crossseed-test-${process.pid}`) process.env.DATA_PATH = TEST_DATA_PATH import { cacheTorrent, getCachedTorrent, hasCachedTorrent, clearCacheForInstance, clearOutputForInstance, saveTorrentToOutput, getCacheStats, getOutputStats, _resetCachePaths, } from '../../src/server/utils/crossSeedCache' describe('crossSeedCache', () => { const TEST_INSTANCE = 99999 const HASH_1 = 'abc123def456abc123def456abc123def456abc1' const HASH_2 = 'def456abc123def456abc123def456abc123def4' beforeAll(() => { _resetCachePaths() }) afterAll(() => { rmSync(TEST_DATA_PATH, { recursive: true, force: true }) }) beforeEach(() => { clearCacheForInstance(TEST_INSTANCE) clearOutputForInstance(TEST_INSTANCE) }) afterEach(() => { clearCacheForInstance(TEST_INSTANCE) clearOutputForInstance(TEST_INSTANCE) }) describe('cacheTorrent and getCachedTorrent', () => { it('caches and retrieves torrent data', () => { const torrentData = Buffer.from('test torrent data') cacheTorrent(TEST_INSTANCE, HASH_1, torrentData) const cached = getCachedTorrent(TEST_INSTANCE, HASH_1) expect(cached).not.toBeNull() expect(cached?.toString()).toBe('test torrent data') }) it('overwrites existing cache', () => { const data1 = Buffer.from('first') const data2 = Buffer.from('second') cacheTorrent(TEST_INSTANCE, HASH_1, data1) cacheTorrent(TEST_INSTANCE, HASH_1, data2) const cached = getCachedTorrent(TEST_INSTANCE, HASH_1) expect(cached?.toString()).toBe('second') }) it('returns null for non-existent cache', () => { expect(getCachedTorrent(TEST_INSTANCE, HASH_2)).toBeNull() }) }) describe('hasCachedTorrent', () => { it('returns true when torrent is cached', () => { cacheTorrent(TEST_INSTANCE, HASH_1, Buffer.from('data')) expect(hasCachedTorrent(TEST_INSTANCE, HASH_1)).toBe(true) }) it('returns false when torrent is not cached', () => { expect(hasCachedTorrent(TEST_INSTANCE, HASH_2)).toBe(false) }) }) describe('clearCacheForInstance', () => { it('clears all cache for instance', () => { cacheTorrent(TEST_INSTANCE, HASH_1, Buffer.from('data1')) cacheTorrent(TEST_INSTANCE, HASH_2, Buffer.from('data2')) const cleared = clearCacheForInstance(TEST_INSTANCE) expect(cleared).toBe(2) expect(hasCachedTorrent(TEST_INSTANCE, HASH_1)).toBe(false) expect(hasCachedTorrent(TEST_INSTANCE, HASH_2)).toBe(false) }) it('returns 0 when no cache exists', () => { expect(clearCacheForInstance(88888)).toBe(0) }) }) describe('saveTorrentToOutput', () => { it('saves torrent to output directory', () => { const name = 'test-torrent' const data = Buffer.from('torrent data') const path = saveTorrentToOutput(TEST_INSTANCE, name, HASH_1, data) expect(path).toContain('test-torrent') expect(path).toContain('.torrent') expect(existsSync(path)).toBe(true) }) it('sanitizes filename', () => { const name = 'test/torrent:with*bad?chars' const data = Buffer.from('data') const path = saveTorrentToOutput(TEST_INSTANCE, name, HASH_2, data) const filename = path.split('/').pop()! expect(filename).not.toContain(':') expect(filename).not.toContain('*') expect(filename).not.toContain('?') expect(filename).toContain('test_torrent_with_bad_chars') }) }) describe('clearOutputForInstance', () => { it('clears output directory for instance', () => { saveTorrentToOutput(TEST_INSTANCE, 'torrent1', HASH_1, Buffer.from('data1')) saveTorrentToOutput(TEST_INSTANCE, 'torrent2', HASH_2, Buffer.from('data2')) const cleared = clearOutputForInstance(TEST_INSTANCE) expect(cleared).toBe(2) }) }) describe('getCacheStats', () => { it('returns correct stats for cached torrents', () => { cacheTorrent(TEST_INSTANCE, HASH_1, Buffer.from('12345')) cacheTorrent(TEST_INSTANCE, HASH_2, Buffer.from('1234567890')) const stats = getCacheStats(TEST_INSTANCE) expect(stats.count).toBe(2) expect(stats.totalSize).toBe(15) }) it('returns zero stats for empty cache', () => { const stats = getCacheStats(77777) expect(stats.count).toBe(0) expect(stats.totalSize).toBe(0) }) }) describe('getOutputStats', () => { it('returns correct stats for output files', () => { saveTorrentToOutput(TEST_INSTANCE, 'torrent1', HASH_1, Buffer.from('data')) saveTorrentToOutput(TEST_INSTANCE, 'torrent2', HASH_2, Buffer.from('moredata')) const stats = getOutputStats(TEST_INSTANCE) expect(stats.count).toBe(2) expect(stats.files.length).toBe(2) }) it('returns empty stats for no output', () => { const stats = getOutputStats(66666) expect(stats.count).toBe(0) expect(stats.files).toEqual([]) }) }) }) ================================================ FILE: __tests__/server/crossSeedMatcher.test.ts ================================================ import { describe, it, expect, vi } from 'vitest' vi.mock('../../src/server/db', () => ({ db: { exec: vi.fn(), run: vi.fn(), query: vi.fn(() => ({ get: vi.fn(), all: vi.fn(() => []) })), }, CrossSeedDecisionType: { MATCH: 'MATCH', MATCH_SIZE_ONLY: 'MATCH_SIZE_ONLY', SIZE_MISMATCH: 'SIZE_MISMATCH', FILE_TREE_MISMATCH: 'FILE_TREE_MISMATCH', ALREADY_EXISTS: 'ALREADY_EXISTS', DOWNLOAD_FAILED: 'DOWNLOAD_FAILED', NO_DOWNLOAD_LINK: 'NO_DOWNLOAD_LINK', BLOCKED_RELEASE: 'BLOCKED_RELEASE', }, BlocklistType: { NAME: 'name', NAME_REGEX: 'nameRegex', FOLDER: 'folder', FOLDER_REGEX: 'folderRegex', CATEGORY: 'category', TAG: 'tag', TRACKER: 'tracker', INFOHASH: 'infoHash', SIZE_BELOW: 'sizeBelow', SIZE_ABOVE: 'sizeAbove', LEGACY: 'legacy', }, })) import { matchTorrentsBySizes, preFilterCandidate, type FileInfo, } from '../../src/server/utils/crossSeedMatcher' import { CrossSeedDecisionType } from '../../src/server/db' describe('crossSeedMatcher', () => { describe('matchTorrentsBySizes', () => { describe('exact matches', () => { it('matches identical single file torrents', () => { const source: FileInfo[] = [{ name: 'movie.mkv', size: 1000000 }] const candidate: FileInfo[] = [{ name: 'movie.mkv', size: 1000000 }] const result = matchTorrentsBySizes(source, candidate) expect(result.matched).toBe(true) expect(result.decision).toBe(CrossSeedDecisionType.MATCH) }) it('matches multi-file torrents with same files', () => { const source: FileInfo[] = [ { name: 'video.mkv', size: 5000000 }, { name: 'subs.srt', size: 50000 }, { name: 'info.nfo', size: 1000 }, ] const candidate: FileInfo[] = [ { name: 'video.mkv', size: 5000000 }, { name: 'subs.srt', size: 50000 }, { name: 'info.nfo', size: 1000 }, ] const result = matchTorrentsBySizes(source, candidate) expect(result.matched).toBe(true) expect(result.decision).toBe(CrossSeedDecisionType.MATCH) }) }) describe('flexible matching - different names, same sizes', () => { it('matches when file names differ but sizes match', () => { const source: FileInfo[] = [{ name: 'Movie.2024.1080p.mkv', size: 5000000 }] const candidate: FileInfo[] = [{ name: 'different-name.mkv', size: 5000000 }] const result = matchTorrentsBySizes(source, candidate) expect(result.matched).toBe(true) expect(result.decision).toBe(CrossSeedDecisionType.MATCH_SIZE_ONLY) }) it('matches multi-file with different names but same sizes', () => { const source: FileInfo[] = [ { name: 'ep01.mkv', size: 500000 }, { name: 'ep02.mkv', size: 500000 }, { name: 'ep03.mkv', size: 500000 }, ] const candidate: FileInfo[] = [ { name: 's01e01.mkv', size: 500000 }, { name: 's01e02.mkv', size: 500000 }, { name: 's01e03.mkv', size: 500000 }, ] const result = matchTorrentsBySizes(source, candidate) expect(result.matched).toBe(true) }) }) describe('mismatches', () => { it('rejects when candidate file size not found in searchee', () => { const source: FileInfo[] = [ { name: 'file1.mkv', size: 1000 }, { name: 'file2.mkv', size: 1000 }, ] const candidate: FileInfo[] = [{ name: 'file1.mkv', size: 2000 }] const result = matchTorrentsBySizes(source, candidate) expect(result.matched).toBe(false) expect(result.decision).toBe(CrossSeedDecisionType.SIZE_MISMATCH) }) it('allows searchee to have extra files (candidate subset of searchee)', () => { const source: FileInfo[] = [ { name: 'file1.mkv', size: 1000 }, { name: 'file2.mkv', size: 2000 }, { name: 'extra.nfo', size: 500 }, ] const candidate: FileInfo[] = [ { name: 'file1.mkv', size: 1000 }, { name: 'file2.mkv', size: 2000 }, ] const result = matchTorrentsBySizes(source, candidate) expect(result.matched).toBe(true) expect(result.decision).toBe(CrossSeedDecisionType.MATCH) }) it('rejects when sizes do not match', () => { const source: FileInfo[] = [{ name: 'movie.mkv', size: 1000000 }] const candidate: FileInfo[] = [{ name: 'movie.mkv', size: 999999 }] const result = matchTorrentsBySizes(source, candidate) expect(result.matched).toBe(false) expect(result.decision).toBe(CrossSeedDecisionType.SIZE_MISMATCH) }) it('rejects when multi-file sizes partially match', () => { const source: FileInfo[] = [ { name: 'ep01.mkv', size: 500000 }, { name: 'ep02.mkv', size: 500000 }, ] const candidate: FileInfo[] = [ { name: 'ep01.mkv', size: 500000 }, { name: 'ep02.mkv', size: 499999 }, ] const result = matchTorrentsBySizes(source, candidate) expect(result.matched).toBe(false) expect(result.decision).toBe(CrossSeedDecisionType.SIZE_MISMATCH) }) }) describe('edge cases', () => { it('rejects empty file arrays', () => { const source: FileInfo[] = [] const candidate: FileInfo[] = [] const result = matchTorrentsBySizes(source, candidate) expect(result.matched).toBe(false) }) it('handles files with duplicate sizes correctly', () => { const source: FileInfo[] = [ { name: 'ep01.mkv', size: 500000 }, { name: 'ep02.mkv', size: 500000 }, { name: 'ep03.mkv', size: 500000 }, ] const candidate: FileInfo[] = [ { name: 'different1.mkv', size: 500000 }, { name: 'different2.mkv', size: 500000 }, { name: 'different3.mkv', size: 500000 }, ] const result = matchTorrentsBySizes(source, candidate) expect(result.matched).toBe(true) }) it('handles very large file sizes', () => { const largeSize = 50 * 1024 * 1024 * 1024 const source: FileInfo[] = [{ name: 'large.mkv', size: largeSize }] const candidate: FileInfo[] = [{ name: 'large.mkv', size: largeSize }] const result = matchTorrentsBySizes(source, candidate) expect(result.matched).toBe(true) }) it('handles zero-size files', () => { const source: FileInfo[] = [ { name: 'empty.txt', size: 0 }, { name: 'video.mkv', size: 1000000 }, ] const candidate: FileInfo[] = [ { name: 'empty.txt', size: 0 }, { name: 'video.mkv', size: 1000000 }, ] const result = matchTorrentsBySizes(source, candidate) expect(result.matched).toBe(true) }) }) }) describe('preFilterCandidate', () => { describe('passing filters', () => { it('passes when sizes are within threshold', () => { const result = preFilterCandidate('Movie 2024', 1000000, 'Movie 2024', 1000000) expect(result.pass).toBe(true) }) it('passes when sizes are close (within 2%)', () => { const result = preFilterCandidate('Movie', 1000000, 'Movie', 1019000) expect(result.pass).toBe(true) }) it('passes with different name formatting', () => { const result = preFilterCandidate('Movie.2024.1080p', 1000000, 'Movie 2024 1080p', 1000000) expect(result.pass).toBe(true) }) it('passes when release group is missing on one side', () => { const result = preFilterCandidate('Movie.2024.1080p', 1000000, 'Movie.2024.1080p-GROUP', 1000000) expect(result.pass).toBe(true) }) it('passes when source tag is missing on one side', () => { const result = preFilterCandidate('Movie.2024.1080p', 1000000, 'Movie.2024.1080p.WEB-DL.NF', 1000000) expect(result.pass).toBe(true) }) }) describe('failing filters', () => { it('fails when resolution differs', () => { const result = preFilterCandidate('Movie.2024.1080p', 1000000, 'Movie.2024.720p', 1000000) expect(result.pass).toBe(false) expect(result.reason?.toLowerCase()).toContain('resolution') }) it('fails when release group differs', () => { const result = preFilterCandidate('Movie.2024.1080p-GROUPA', 1000000, 'Movie.2024.1080p-GROUPB', 1000000) expect(result.pass).toBe(false) expect(result.reason?.toLowerCase()).toContain('group') }) it('fails when source tag differs', () => { const result = preFilterCandidate( 'Movie.2024.1080p.AMZN.WEB-DL.x264-GROUP', 1000000, 'Movie.2024.1080p.NF.WEB-DL.x264-GROUP', 1000000 ) expect(result.pass).toBe(false) expect(result.reason?.toLowerCase()).toContain('source') }) it('fails when proper/repack mismatch', () => { const result = preFilterCandidate('Movie.2024.1080p.PROPER-GROUP', 1000000, 'Movie.2024.1080p-GROUP', 1000000) expect(result.pass).toBe(false) expect(result.reason?.toLowerCase()).toContain('proper') }) it('fails when sizes differ too much', () => { const result = preFilterCandidate('Movie', 1000000, 'Movie', 2000000) expect(result.pass).toBe(false) expect(result.reason?.toLowerCase()).toContain('size') }) it('fails when candidate is much smaller', () => { const result = preFilterCandidate('Movie', 1000000, 'Movie', 100000) expect(result.pass).toBe(false) }) }) describe('edge cases', () => { it('handles zero source size with zero candidate', () => { const result = preFilterCandidate('Movie', 0, 'Movie', 0) expect(result.pass).toBe(true) }) it('handles zero source size with non-zero candidate without dividing by zero', () => { const result = preFilterCandidate('Movie', 0, 'Movie', 1000) expect(result.pass).toBe(false) expect(result.reason).toBeDefined() expect(result.reason).not.toContain('Infinity') expect(result.reason).toContain('100.0%') }) it('handles missing candidate size', () => { const result = preFilterCandidate('Movie', 1000000, 'Movie', undefined as unknown as number) expect(result.pass).toBe(true) }) }) }) }) ================================================ FILE: __tests__/server/crossSeedScheduler.test.ts ================================================ import { describe, it, expect, vi, beforeEach, afterEach, type Mock } from 'vitest' vi.mock('../../src/server/db', () => ({ db: { query: vi.fn(() => ({ get: vi.fn(), all: vi.fn(() => []), })), run: vi.fn(), }, })) vi.mock('../../src/server/utils/crossSeedWorker', () => ({ runCrossSeedScan: vi.fn(), })) vi.mock('../../src/server/utils/logger', () => ({ log: { info: vi.fn(), error: vi.fn(), warn: vi.fn(), }, })) import { isInstanceRunning, triggerManualScan, stopScheduler, } from '../../src/server/utils/crossSeedScheduler' import { runCrossSeedScan } from '../../src/server/utils/crossSeedWorker' const mockRunCrossSeedScan = runCrossSeedScan as Mock describe('crossSeedScheduler', () => { beforeEach(() => { vi.clearAllMocks() stopScheduler() }) afterEach(() => { stopScheduler() }) describe('isInstanceRunning', () => { it('returns false when no scan is running', () => { expect(isInstanceRunning(1)).toBe(false) }) it('returns true when a scan is in progress', async () => { let resolvePromise: () => void const scanPromise = new Promise((resolve) => { resolvePromise = resolve }) mockRunCrossSeedScan.mockImplementation(() => scanPromise) const triggerPromise = triggerManualScan(1, 1, false).catch(() => {}) await new Promise((r) => setTimeout(r, 10)) expect(isInstanceRunning(1)).toBe(true) resolvePromise!() await triggerPromise expect(isInstanceRunning(1)).toBe(false) }) }) describe('triggerManualScan', () => { it('prevents concurrent scans on the same instance', async () => { let resolveFirst: (value: unknown) => void const firstScanPromise = new Promise((resolve) => { resolveFirst = resolve }) mockRunCrossSeedScan.mockImplementationOnce(() => firstScanPromise) const firstTrigger = triggerManualScan(1, 1, false) await new Promise((r) => setTimeout(r, 10)) await expect(triggerManualScan(1, 1, false)).rejects.toThrow('Scan already in progress') resolveFirst!({ instanceId: 1 }) await firstTrigger }) it('allows concurrent scans on different instances', async () => { let resolveFirst: (value: unknown) => void let resolveSecond: (value: unknown) => void mockRunCrossSeedScan .mockImplementationOnce(() => new Promise((resolve) => { resolveFirst = resolve })) .mockImplementationOnce(() => new Promise((resolve) => { resolveSecond = resolve })) const firstTrigger = triggerManualScan(1, 1, false) await new Promise((r) => setTimeout(r, 10)) const secondTrigger = triggerManualScan(2, 1, false) await new Promise((r) => setTimeout(r, 10)) expect(isInstanceRunning(1)).toBe(true) expect(isInstanceRunning(2)).toBe(true) resolveFirst!({ instanceId: 1 }) resolveSecond!({ instanceId: 2 }) await firstTrigger await secondTrigger }) it('clears running state on error', async () => { mockRunCrossSeedScan.mockRejectedValueOnce(new Error('Scan failed')) await expect(triggerManualScan(1, 1, false)).rejects.toThrow('Scan failed') expect(isInstanceRunning(1)).toBe(false) }) it('clears running state on success', async () => { mockRunCrossSeedScan.mockResolvedValueOnce({ instanceId: 1, torrentsTotal: 10, torrentsScanned: 5, torrentsSkipped: 5, matchesFound: 1, torrentsAdded: 1, errors: [], dryRun: false, startedAt: Date.now(), completedAt: Date.now(), }) await triggerManualScan(1, 1, false) expect(isInstanceRunning(1)).toBe(false) }) }) }) ================================================ FILE: __tests__/server/crossSeedWorker.test.ts ================================================ import { describe, it, expect, vi, beforeEach, type Mock } from 'vitest' const { state, db, fsMocks } = vi.hoisted(() => { const state = { config: null as null | { instance_id: number enabled: number interval_hours: number delay_seconds: number dry_run: number category_suffix: string tag: string skip_recheck: number integration_id: number | null indexer_ids: string | null match_mode: 'strict' | 'flexible' link_dir: string | null blocklist: string | null include_single_episodes: number last_run: number | null next_run: number | null updated_at: number }, integration: null as null | { id: number user_id: number type: string label: string url: string api_key_encrypted: string created_at: number }, instance: null as null | { id: number user_id: number label: string url: string qbt_username: string | null qbt_password_encrypted: string | null skip_auth: number created_at: number }, searchees: new Map(), decisions: new Map(), nextSearcheeId: 1, } const db = { query: vi.fn((sql: string) => ({ get: (...params: unknown[]) => { if (sql.includes('FROM cross_seed_config')) { return state.config && state.config.instance_id === params[0] ? state.config : undefined } if (sql.includes('FROM integrations')) { return state.integration && state.integration.id === params[0] && state.integration.user_id === params[1] ? state.integration : undefined } if (sql.includes('FROM instances')) { return state.instance && state.instance.id === params[0] && state.instance.user_id === params[1] ? state.instance : undefined } if (sql.includes('FROM cross_seed_searchee') && sql.includes('torrent_hash = ?')) { const key = `${params[0]}:${params[1]}` const row = state.searchees.get(key) return row ? { id: row.id } : undefined } if (sql.includes('FROM cross_seed_decision') && sql.includes('guid = ?')) { return state.decisions.get(`${params[0]}:${params[1]}`) } return undefined }, all: (...params: unknown[]) => { if (sql.includes('FROM cross_seed_searchee')) { return Array.from(state.searchees.values()).filter((row) => row.instance_id === params[0]) } return [] }, })), run: vi.fn((sql: string, params: unknown[]) => { if (sql.startsWith('INSERT INTO cross_seed_searchee')) { const [instanceId, hash, name, size, fileCount, fileSizesJson] = params const key = `${instanceId}:${hash}` let row = state.searchees.get(key) const now = Math.floor(Date.now() / 1000) if (!row) { row = { id: state.nextSearcheeId++, instance_id: instanceId as number, torrent_hash: hash as string, torrent_name: name as string, total_size: size as number, file_count: fileCount as number, file_sizes: fileSizesJson as string, first_searched: now, last_searched: now, } } else { row.last_searched = now } state.searchees.set(key, row) return { changes: 1, lastInsertRowid: row.id } } if (sql.startsWith('INSERT INTO cross_seed_decision')) { const [searcheeId, guid, info_hash, candidate_name, candidate_size, decision] = params const key = `${searcheeId}:${guid}` const now = Math.floor(Date.now() / 1000) const existing = state.decisions.get(key) if (existing) { existing.info_hash = info_hash as string | null existing.decision = decision as string existing.last_seen = now } else { state.decisions.set(key, { searchee_id: searcheeId as number, guid: guid as string, info_hash: info_hash as string | null, candidate_name: candidate_name as string, candidate_size: candidate_size as number, decision: decision as string, first_seen: now, last_seen: now, }) } return { changes: 1 } } if (sql.startsWith('UPDATE cross_seed_decision SET last_seen')) { const [lastSeen, searcheeId, guid] = params const key = `${searcheeId}:${guid}` const entry = state.decisions.get(key) if (entry) entry.last_seen = lastSeen as number return { changes: entry ? 1 : 0 } } if (sql.startsWith('UPDATE cross_seed_config SET last_run')) { const [lastRun, instanceId] = params if (state.config && state.config.instance_id === instanceId) { state.config.last_run = lastRun as number } return { changes: 1 } } return { changes: 0 } }), } const fsMocks = { link: vi.fn().mockResolvedValue(undefined), mkdir: vi.fn().mockResolvedValue(undefined), stat: vi.fn().mockResolvedValue({ dev: 1 }), access: vi.fn().mockResolvedValue(undefined), } return { state, db, fsMocks } }) vi.mock('../../src/server/db', () => ({ db, CrossSeedDecisionType: { MATCH: 'MATCH', MATCH_SIZE_ONLY: 'MATCH_SIZE_ONLY', SIZE_MISMATCH: 'SIZE_MISMATCH', FILE_COUNT_MISMATCH: 'FILE_COUNT_MISMATCH', ALREADY_EXISTS: 'ALREADY_EXISTS', DOWNLOAD_FAILED: 'DOWNLOAD_FAILED', NO_DOWNLOAD_LINK: 'NO_DOWNLOAD_LINK', }, MatchMode: { STRICT: 'strict', FLEXIBLE: 'flexible', }, })) vi.mock('../../src/server/utils/qbt', () => ({ loginToQbt: vi.fn(), })) vi.mock('../../src/server/utils/crypto', () => ({ decrypt: vi.fn(() => 'apikey'), })) vi.mock('../../src/server/utils/torznab', () => ({ searchAllIndexers: vi.fn(), downloadTorrentDirect: vi.fn(), })) vi.mock('../../src/server/utils/crossSeedCache', () => ({ cacheTorrent: vi.fn(), saveTorrentToOutput: vi.fn(() => '/tmp/output.torrent'), })) vi.mock('fs/promises', () => ({ ...fsMocks, default: fsMocks, })) vi.mock('../../src/server/utils/fetch', () => ({ fetchWithTls: vi.fn(), })) vi.mock('../../src/server/utils/logger', () => ({ log: { info: vi.fn(), warn: vi.fn(), error: vi.fn(), }, })) import { runCrossSeedScan } from '../../src/server/utils/crossSeedWorker' import { loginToQbt } from '../../src/server/utils/qbt' import { searchAllIndexers, downloadTorrentDirect } from '../../src/server/utils/torznab' import { cacheTorrent, saveTorrentToOutput } from '../../src/server/utils/crossSeedCache' import { fetchWithTls } from '../../src/server/utils/fetch' const mockLoginToQbt = loginToQbt as Mock const mockSearchAllIndexers = searchAllIndexers as Mock const mockDownloadTorrentDirect = downloadTorrentDirect as Mock const mockCacheTorrent = cacheTorrent as Mock const mockSaveTorrentToOutput = saveTorrentToOutput as Mock const mockFetchWithTls = fetchWithTls as Mock function makeTorrentData(name: string, length: number): Buffer { return Buffer.from(`d4:infod4:name${name.length}:${name}6:lengthi${length}eee`) } type BencodeValue = number | string | Buffer | BencodeValue[] | { [key: string]: BencodeValue } function encodeBencode(data: BencodeValue): Buffer { if (typeof data === 'number') { return Buffer.from(`i${data}e`) } if (Buffer.isBuffer(data)) { return Buffer.concat([Buffer.from(`${data.length}:`), data]) } if (typeof data === 'string') { const buf = Buffer.from(data) return Buffer.concat([Buffer.from(`${buf.length}:`), buf]) } if (Array.isArray(data)) { const parts: Buffer[] = [Buffer.from('l')] for (const item of data) { parts.push(encodeBencode(item)) } parts.push(Buffer.from('e')) return Buffer.concat(parts) } const parts: Buffer[] = [Buffer.from('d')] const keys = Object.keys(data).sort() for (const key of keys) { parts.push(encodeBencode(key)) parts.push(encodeBencode(data[key])) } parts.push(Buffer.from('e')) return Buffer.concat(parts) } function makeMultiFileTorrentData( name: string, files: Array<{ path: string[]; length: number }> ): Buffer { return encodeBencode({ info: { name, files: files.map((file) => ({ length: file.length, path: file.path })), }, }) } function resetState() { state.config = { instance_id: 1, enabled: 1, interval_hours: 24, delay_seconds: 0, dry_run: 0, category_suffix: '_cross-seed', tag: 'cross-seed', skip_recheck: 0, integration_id: 10, indexer_ids: null, match_mode: 'strict', link_dir: null, blocklist: null, include_single_episodes: 0, last_run: null, next_run: null, updated_at: Math.floor(Date.now() / 1000), } state.integration = { id: 10, user_id: 1, type: 'prowlarr', label: 'Prowlarr', url: 'http://prowlarr', api_key_encrypted: 'encrypted', created_at: Math.floor(Date.now() / 1000), } state.instance = { id: 1, user_id: 1, label: 'QBT', url: 'http://qbt', qbt_username: 'user', qbt_password_encrypted: 'pass', skip_auth: 0, created_at: Math.floor(Date.now() / 1000), } state.searchees.clear() state.decisions.clear() state.nextSearcheeId = 1 } function mockQbtResponses(torrents: unknown[], files: unknown[]) { let addedTorrent = false mockFetchWithTls.mockImplementation((url: string) => { if (url.endsWith('/api/v2/app/version')) { return Promise.resolve(new Response('v5.0.0', { status: 200 })) } if (url.includes('/api/v2/torrents/info')) { const hashMatch = url.match(/hashes=([a-fA-F0-9]+)/) if (hashMatch) { const queriedHash = hashMatch[1].toUpperCase() const found = (torrents as { hash: string }[]).find((t) => t.hash.toUpperCase() === queriedHash) if (found) { return Promise.resolve(new Response(JSON.stringify([found]), { status: 200 })) } if (addedTorrent) { return Promise.resolve( new Response( JSON.stringify([ { hash: queriedHash, name: 'Added', state: 'pausedUP', amount_left: 0, }, ]), { status: 200 } ) ) } return Promise.resolve(new Response('[]', { status: 200 })) } return Promise.resolve(new Response(JSON.stringify(torrents), { status: 200 })) } if (url.includes('/api/v2/torrents/files')) { return Promise.resolve(new Response(JSON.stringify(files), { status: 200 })) } if (url.endsWith('/api/v2/torrents/add')) { addedTorrent = true return Promise.resolve(new Response('Ok.', { status: 200 })) } if (url.includes('/api/v2/torrents/stop') || url.includes('/api/v2/torrents/start') || url.includes('/api/v2/torrents/recheck')) { return Promise.resolve(new Response('', { status: 200 })) } return Promise.resolve(new Response('Not Found', { status: 404 })) }) } describe('crossSeedWorker', () => { beforeEach(() => { vi.clearAllMocks() resetState() mockLoginToQbt.mockResolvedValue({ success: true, cookie: 'SID=abc' }) }) it('adds a matched torrent when not dry-run', async () => { const torrents = [ { hash: 'HASH1', name: 'Movie.2024.1080p.mkv', size: 1000, state: 'uploading', category: 'movies', tags: '', save_path: '/downloads', content_path: '/downloads/Movie.2024.1080p.mkv', progress: 1, }, ] const files = [{ name: 'Movie.2024.1080p.mkv', size: 1000 }] mockQbtResponses(torrents, files) mockSearchAllIndexers.mockResolvedValue([ { guid: 'guid-1', title: 'Movie 2024 1080p', link: 'http://indexer/download/1', size: 1000, pubDate: '', indexer: 'Test', indexerId: 1, }, ]) mockDownloadTorrentDirect.mockResolvedValue(makeTorrentData('Movie.2024.1080p.mkv', 1000)) const result = await runCrossSeedScan({ instanceId: 1, userId: 1, force: false }) expect(result.matchesFound).toBe(1) expect(result.torrentsAdded).toBe(1) expect(result.torrentsScanned).toBe(1) expect(result.torrentsSkipped).toBe(0) expect(mockCacheTorrent).toHaveBeenCalledTimes(1) expect(mockSaveTorrentToOutput).not.toHaveBeenCalled() expect(mockFetchWithTls.mock.calls.some((call) => String(call[0]).endsWith('/api/v2/torrents/add'))).toBe(true) }) it('matches multi-file torrents in strict mode using basenames', async () => { state.config!.match_mode = 'strict' const torrents = [ { hash: 'HASH2', name: 'Show.S01', size: 3000, state: 'uploading', category: 'shows', tags: '', save_path: '/downloads', content_path: '/downloads/Show.S01', progress: 1, }, ] const files = [ { name: 'Show.S01/E01.mkv', size: 1000 }, { name: 'Show.S01/E02.mkv', size: 2000 }, ] mockQbtResponses(torrents, files) mockSearchAllIndexers.mockResolvedValue([ { guid: 'guid-2', title: 'Show S01', link: 'http://indexer/download/2', size: 3000, pubDate: '', indexer: 'Test', indexerId: 1, }, ]) mockDownloadTorrentDirect.mockResolvedValue( makeMultiFileTorrentData('Show.S01', [ { path: ['E01.mkv'], length: 1000 }, { path: ['E02.mkv'], length: 2000 }, ]) ) const result = await runCrossSeedScan({ instanceId: 1, userId: 1, force: false }) expect(result.matchesFound).toBe(1) expect(result.torrentsAdded).toBe(1) expect(mockFetchWithTls.mock.calls.some((call) => String(call[0]).endsWith('/api/v2/torrents/add'))).toBe(true) }) it('adds size-only matches in flexible mode using hardlinks', async () => { state.config!.match_mode = 'flexible' state.config!.link_dir = '/links' const torrents = [ { hash: 'HASH3', name: 'Movie.2024.1080p.mkv', size: 1000, state: 'uploading', category: 'movies', tags: '', save_path: '/downloads', content_path: '/downloads/Movie.2024.1080p.mkv', progress: 1, }, ] const files = [{ name: 'Movie.2024.1080p.mkv', size: 1000 }] mockQbtResponses(torrents, files) mockSearchAllIndexers.mockResolvedValue([ { guid: 'guid-3', title: 'Movie 2024 1080p', link: 'http://indexer/download/3', size: 1000, pubDate: '', indexer: 'Test', indexerId: 1, }, ]) mockDownloadTorrentDirect.mockResolvedValue(makeTorrentData('Movie.2024.1080p.REPACK.mkv', 1000)) const result = await runCrossSeedScan({ instanceId: 1, userId: 1, force: false }) expect(result.matchesFound).toBe(1) expect(result.torrentsAdded).toBe(1) expect(fsMocks.link.mock.calls.length).toBeGreaterThan(0) expect(fsMocks.link.mock.calls[0][0]).toBe('/downloads/Movie.2024.1080p.mkv') expect(fsMocks.link.mock.calls[0][1]).toBe('/links/Movie.2024.1080p.REPACK.mkv') }) it('detects structure mismatch when single-file source matches multi-file candidate with folder', async () => { state.config!.match_mode = 'flexible' state.config!.link_dir = '/links' const torrents = [ { hash: 'HASH4', name: 'Movie.2024.1080p.mkv', size: 1000, state: 'uploading', category: 'movies', tags: '', save_path: '/downloads', content_path: '/downloads/Movie.2024.1080p.mkv', progress: 1, }, ] const files = [{ name: 'Movie.2024.1080p.mkv', size: 1000 }] mockQbtResponses(torrents, files) mockSearchAllIndexers.mockResolvedValue([ { guid: 'guid-4', title: 'Movie (2024)', link: 'http://indexer/download/4', size: 1000, pubDate: '', indexer: 'Test', indexerId: 1, }, ]) mockDownloadTorrentDirect.mockResolvedValue( makeMultiFileTorrentData('Movie (2024)', [{ path: ['Movie.2024.1080p.mkv'], length: 1000 }]) ) const result = await runCrossSeedScan({ instanceId: 1, userId: 1, force: false }) expect(result.matchesFound).toBe(1) expect(result.torrentsAdded).toBe(1) expect(fsMocks.link.mock.calls.length).toBeGreaterThan(0) expect(fsMocks.link.mock.calls[0][1]).toBe('/links/Movie (2024)/Movie.2024.1080p.mkv') }) it('constructs correct source paths for multi-file hardlinks', async () => { state.config!.match_mode = 'flexible' state.config!.link_dir = '/links' const torrents = [ { hash: 'HASH5', name: 'Show.S01', size: 3000, state: 'uploading', category: 'shows', tags: '', save_path: '/downloads', content_path: '/downloads/Show.S01', progress: 1, }, ] const files = [ { name: 'Show.S01/E01.mkv', size: 1000 }, { name: 'Show.S01/E02.mkv', size: 2000 }, ] mockQbtResponses(torrents, files) mockSearchAllIndexers.mockResolvedValue([ { guid: 'guid-5', title: 'Show Season 1', link: 'http://indexer/download/5', size: 3000, pubDate: '', indexer: 'Test', indexerId: 1, }, ]) mockDownloadTorrentDirect.mockResolvedValue( makeMultiFileTorrentData('Show Season 1', [ { path: ['Episode01.mkv'], length: 1000 }, { path: ['Episode02.mkv'], length: 2000 }, ]) ) const result = await runCrossSeedScan({ instanceId: 1, userId: 1, force: false }) expect(result.matchesFound).toBe(1) expect(result.torrentsAdded).toBe(1) expect(fsMocks.link.mock.calls[0][0]).toBe('/downloads/Show.S01/E01.mkv') expect(fsMocks.link.mock.calls[0][1]).toBe('/links/Show Season 1/Episode01.mkv') expect(fsMocks.link.mock.calls[1][0]).toBe('/downloads/Show.S01/E02.mkv') expect(fsMocks.link.mock.calls[1][1]).toBe('/links/Show Season 1/Episode02.mkv') }) it('saves to output in dry-run mode', async () => { state.config!.dry_run = 1 const torrents = [ { hash: 'HASH1', name: 'Movie.2024.1080p.mkv', size: 1000, state: 'uploading', category: 'movies', tags: '', save_path: '/downloads', content_path: '/downloads/Movie.2024.1080p.mkv', progress: 1, }, ] const files = [{ name: 'Movie.2024.1080p.mkv', size: 1000 }] mockQbtResponses(torrents, files) mockSearchAllIndexers.mockResolvedValue([ { guid: 'guid-1', title: 'Movie 2024 1080p', link: 'http://indexer/download/1', size: 1000, pubDate: '', indexer: 'Test', indexerId: 1, }, ]) mockDownloadTorrentDirect.mockResolvedValue(makeTorrentData('Movie.2024.1080p.mkv', 1000)) const result = await runCrossSeedScan({ instanceId: 1, userId: 1, force: false }) expect(result.matchesFound).toBe(1) expect(result.torrentsAdded).toBe(0) expect(mockSaveTorrentToOutput).toHaveBeenCalledTimes(1) expect(mockFetchWithTls.mock.calls.some((call) => String(call[0]).endsWith('/api/v2/torrents/add'))).toBe(false) }) it('skips torrents that were already searched when not forced', async () => { state.searchees.set('1:HASH1', { id: 1, instance_id: 1, torrent_hash: 'HASH1', torrent_name: 'Movie', total_size: 1000, file_count: 1, file_sizes: '[1000]', first_searched: 0, last_searched: 0, }) const torrents = [ { hash: 'HASH1', name: 'Movie.2024.1080p.mkv', size: 1000, state: 'uploading', category: 'movies', tags: '', save_path: '/downloads', content_path: '/downloads/Movie.2024.1080p.mkv', progress: 1, }, ] mockQbtResponses(torrents, []) const result = await runCrossSeedScan({ instanceId: 1, userId: 1, force: false }) expect(result.torrentsSkipped).toBe(1) expect(result.torrentsScanned).toBe(0) expect(mockSearchAllIndexers).not.toHaveBeenCalled() expect(mockDownloadTorrentDirect).not.toHaveBeenCalled() }) it('skips candidates already in the client by infohash', async () => { const torrents = [ { hash: 'HASH1', name: 'Movie.2024.1080p.mkv', size: 1000, state: 'uploading', category: 'movies', tags: '', save_path: '/downloads', content_path: '/downloads/Movie.2024.1080p.mkv', progress: 1, }, { hash: 'EXISTING', name: 'Already added', size: 1000, state: 'uploading', category: 'movies', tags: '', save_path: '/downloads', content_path: '/downloads/Already added', progress: 0.5, }, ] mockQbtResponses(torrents, [{ name: 'Movie.2024.1080p.mkv', size: 1000 }]) mockSearchAllIndexers.mockResolvedValue([ { guid: 'guid-1', title: 'Movie 2024 1080p', link: 'http://indexer/download/1', size: 1000, pubDate: '', indexer: 'Test', indexerId: 1, infoHash: 'EXISTING', }, ]) const result = await runCrossSeedScan({ instanceId: 1, userId: 1, force: true }) expect(result.matchesFound).toBe(0) expect(mockDownloadTorrentDirect).not.toHaveBeenCalled() }) it('passes configured indexer ids to search', async () => { state.config!.indexer_ids = JSON.stringify([5, 9]) const torrents = [ { hash: 'HASH1', name: 'Movie.2024.1080p.mkv', size: 1000, state: 'uploading', category: 'movies', tags: '', save_path: '/downloads', content_path: '/downloads/Movie.2024.1080p.mkv', progress: 1, }, ] const files = [{ name: 'Movie.2024.1080p.mkv', size: 1000 }] mockQbtResponses(torrents, files) mockSearchAllIndexers.mockResolvedValue([]) await runCrossSeedScan({ instanceId: 1, userId: 1, force: false }) expect(mockSearchAllIndexers).toHaveBeenCalledWith('http://prowlarr', 'apikey', 'Movie 2024 1080p', 10, [5, 9]) }) it('returns error when qBittorrent login fails', async () => { mockLoginToQbt.mockResolvedValueOnce({ success: false, error: 'bad credentials' }) const result = await runCrossSeedScan({ instanceId: 1, userId: 1, force: false }) expect(result.errors[0]).toContain('qBittorrent login failed') expect(mockFetchWithTls).not.toHaveBeenCalled() }) it('returns error when qBittorrent torrent list fetch fails', async () => { mockFetchWithTls.mockImplementation((url: string) => { if (url.endsWith('/api/v2/app/version')) { return Promise.resolve(new Response('v5.0.0', { status: 200 })) } if (url.includes('/api/v2/torrents/info')) { return Promise.resolve(new Response('fail', { status: 500 })) } return Promise.resolve(new Response('Not Found', { status: 404 })) }) const result = await runCrossSeedScan({ instanceId: 1, userId: 1, force: false }) expect(result.errors[0]).toContain('Failed to fetch torrents from qBittorrent') expect(mockSearchAllIndexers).not.toHaveBeenCalled() }) it('records search errors when torznab search throws', async () => { const torrents = [ { hash: 'HASH1', name: 'Movie.2024.1080p.mkv', size: 1000, state: 'uploading', category: 'movies', tags: '', save_path: '/downloads', content_path: '/downloads/Movie.2024.1080p.mkv', progress: 1, }, ] const files = [{ name: 'Movie.2024.1080p.mkv', size: 1000 }] mockQbtResponses(torrents, files) mockSearchAllIndexers.mockRejectedValueOnce(new Error('torznab down')) const result = await runCrossSeedScan({ instanceId: 1, userId: 1, force: false }) expect(result.errors[0]).toContain('Search failed for Movie.2024.1080p.mkv') expect(result.matchesFound).toBe(0) }) it('records add failure when qBittorrent rejects the torrent', async () => { const torrents = [ { hash: 'HASH1', name: 'Movie.2024.1080p.mkv', size: 1000, state: 'uploading', category: 'movies', tags: '', save_path: '/downloads', content_path: '/downloads/Movie.2024.1080p.mkv', progress: 1, }, ] const files = [{ name: 'Movie.2024.1080p.mkv', size: 1000 }] mockFetchWithTls.mockImplementation((url: string) => { if (url.endsWith('/api/v2/torrents/info')) { return Promise.resolve(new Response(JSON.stringify(torrents), { status: 200 })) } if (url.includes('/api/v2/torrents/files')) { return Promise.resolve(new Response(JSON.stringify(files), { status: 200 })) } if (url.endsWith('/api/v2/torrents/add')) { return Promise.resolve(new Response('Nope', { status: 200 })) } return Promise.resolve(new Response('Not Found', { status: 404 })) }) mockSearchAllIndexers.mockResolvedValue([ { guid: 'guid-1', title: 'Movie 2024 1080p', link: 'http://indexer/download/1', size: 1000, pubDate: '', indexer: 'Test', indexerId: 1, }, ]) mockDownloadTorrentDirect.mockResolvedValue(makeTorrentData('Movie.2024.1080p.mkv', 1000)) const result = await runCrossSeedScan({ instanceId: 1, userId: 1, force: false }) expect(result.matchesFound).toBe(1) expect(result.torrentsAdded).toBe(0) expect(result.errors[0]).toContain('Failed to add torrent: Movie 2024 1080p') }) it('updates last_seen for existing decisions', async () => { state.searchees.set('1:HASH1', { id: 1, instance_id: 1, torrent_hash: 'HASH1', torrent_name: 'Movie', total_size: 1000, file_count: 1, file_sizes: '[1000]', first_searched: 0, last_searched: 0, }) state.decisions.set('1:guid-1', { decision: 'SIZE_MISMATCH', info_hash: null, last_seen: 100, }) const torrents = [ { hash: 'HASH1', name: 'Movie.2024.1080p.mkv', size: 1000, state: 'uploading', category: 'movies', tags: '', save_path: '/downloads', content_path: '/downloads/Movie.2024.1080p.mkv', progress: 1, }, ] const files = [{ name: 'Movie.2024.1080p.mkv', size: 1000 }] mockQbtResponses(torrents, files) mockSearchAllIndexers.mockResolvedValue([ { guid: 'guid-1', title: 'Movie 2024 1080p', link: 'http://indexer/download/1', size: 1000, pubDate: '', indexer: 'Test', indexerId: 1, }, ]) mockDownloadTorrentDirect.mockResolvedValue(makeTorrentData('Movie.2024.1080p.mkv', 2000)) await runCrossSeedScan({ instanceId: 1, userId: 1, force: true }) const updated = state.decisions.get('1:guid-1') expect(updated?.last_seen).toBeGreaterThan(100) }) }) ================================================ FILE: __tests__/server/fetch.test.ts ================================================ import { describe, it, expect, vi, beforeEach } from 'vitest' import { fetchWithTls } from '../../src/server/utils/fetch' // Mock global fetch const mockFetch = vi.fn() vi.stubGlobal('fetch', mockFetch) describe('fetchWithTls', () => { beforeEach(() => { mockFetch.mockReset() }) describe('successful requests', () => { it('makes basic fetch request', async () => { const mockResponse = new Response('OK', { status: 200 }) mockFetch.mockResolvedValueOnce(mockResponse) const result = await fetchWithTls('http://localhost:8080/api') expect(mockFetch).toHaveBeenCalledWith( 'http://localhost:8080/api', expect.anything() ) expect(result).toBe(mockResponse) }) it('passes through request options', async () => { mockFetch.mockResolvedValueOnce(new Response('OK')) await fetchWithTls('http://localhost:8080/api', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ test: true }), }) expect(mockFetch).toHaveBeenCalledWith( 'http://localhost:8080/api', expect.objectContaining({ method: 'POST', headers: { 'Content-Type': 'application/json' }, }) ) }) }) describe('error handling', () => { it('rethrows non-certificate errors', async () => { mockFetch.mockRejectedValueOnce(new Error('Network error')) await expect(fetchWithTls('http://localhost:8080')) .rejects.toThrow('Network error') }) it('provides helpful message for self-signed cert errors', async () => { mockFetch.mockRejectedValueOnce(new Error('self-signed certificate')) await expect(fetchWithTls('http://localhost:8080')) .rejects.toThrow('TLS certificate validation failed') }) it('handles SELF_SIGNED_CERT_IN_CHAIN error', async () => { mockFetch.mockRejectedValueOnce(new Error('SELF_SIGNED_CERT_IN_CHAIN')) await expect(fetchWithTls('http://localhost:8080')) .rejects.toThrow('TLS certificate validation failed') }) it('handles certificate expired errors', async () => { mockFetch.mockRejectedValueOnce(new Error('certificate has expired')) await expect(fetchWithTls('http://localhost:8080')) .rejects.toThrow('TLS certificate validation failed') }) it('handles CERT_HAS_EXPIRED error', async () => { mockFetch.mockRejectedValueOnce(new Error('CERT_HAS_EXPIRED')) await expect(fetchWithTls('http://localhost:8080')) .rejects.toThrow('TLS certificate validation failed') }) }) describe('request types', () => { it('handles GET requests', async () => { mockFetch.mockResolvedValueOnce(new Response('{}')) await fetchWithTls('http://localhost/api', { method: 'GET' }) expect(mockFetch).toHaveBeenCalledWith( expect.any(String), expect.objectContaining({ method: 'GET' }) ) }) it('handles POST requests with body', async () => { mockFetch.mockResolvedValueOnce(new Response('{}')) await fetchWithTls('http://localhost/api', { method: 'POST', body: 'test=value', }) expect(mockFetch).toHaveBeenCalledWith( expect.any(String), expect.objectContaining({ method: 'POST', body: 'test=value' }) ) }) it('handles DELETE requests', async () => { mockFetch.mockResolvedValueOnce(new Response('')) await fetchWithTls('http://localhost/api/1', { method: 'DELETE' }) expect(mockFetch).toHaveBeenCalledWith( expect.any(String), expect.objectContaining({ method: 'DELETE' }) ) }) }) }) ================================================ FILE: __tests__/server/logger.test.ts ================================================ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest' import { log } from '../../src/server/utils/logger' describe('logger utilities', () => { let consoleSpy: { log: ReturnType; warn: ReturnType; error: ReturnType } beforeEach(() => { consoleSpy = { log: vi.spyOn(console, 'log').mockImplementation(() => { }), warn: vi.spyOn(console, 'warn').mockImplementation(() => { }), error: vi.spyOn(console, 'error').mockImplementation(() => { }), } }) afterEach(() => { vi.restoreAllMocks() }) describe('log.info', () => { it('logs message with INFO level', () => { log.info('Test message') expect(consoleSpy.log).toHaveBeenCalledOnce() expect(consoleSpy.log.mock.calls[0][0]).toContain('[INFO]') expect(consoleSpy.log.mock.calls[0][0]).toContain('Test message') }) it('includes timestamp', () => { log.info('Test') const call = consoleSpy.log.mock.calls[0][0] // Timestamp format: [2024-01-18T12:00:00.000Z] expect(call).toMatch(/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/) }) }) describe('log.warn', () => { it('logs message with WARN level', () => { log.warn('Warning message') expect(consoleSpy.warn).toHaveBeenCalledOnce() expect(consoleSpy.warn.mock.calls[0][0]).toContain('[WARN]') expect(consoleSpy.warn.mock.calls[0][0]).toContain('Warning message') }) }) describe('log.error', () => { it('logs message with ERROR level', () => { log.error('Error message') expect(consoleSpy.error).toHaveBeenCalledOnce() expect(consoleSpy.error.mock.calls[0][0]).toContain('[ERROR]') expect(consoleSpy.error.mock.calls[0][0]).toContain('Error message') }) }) describe('log formatting', () => { it('handles empty messages', () => { log.info('') expect(consoleSpy.log).toHaveBeenCalledOnce() }) it('handles special characters', () => { log.info('Message with "quotes" and ') expect(consoleSpy.log.mock.calls[0][0]).toContain('Message with "quotes" and ') }) it('handles unicode characters', () => { log.info('🚀 Unicode message ✅') expect(consoleSpy.log.mock.calls[0][0]).toContain('🚀 Unicode message ✅') }) }) }) ================================================ FILE: __tests__/server/rateLimit.test.ts ================================================ import { describe, it, expect, beforeEach, vi } from 'vitest' import { checkRateLimit, resetRateLimit } from '../../src/server/utils/rateLimit' describe('rateLimit utilities', () => { beforeEach(() => { // Reset all rate limits before each test resetRateLimit('test-key') resetRateLimit('another-key') }) describe('checkRateLimit', () => { it('allows first request', () => { const result = checkRateLimit('test-key') expect(result.allowed).toBe(true) expect(result.retryAfter).toBeUndefined() }) it('allows requests up to the limit', () => { for (let i = 0; i < 5; i++) { const result = checkRateLimit('test-key') expect(result.allowed).toBe(true) } }) it('blocks requests after limit is exceeded', () => { // Use up all attempts for (let i = 0; i < 5; i++) { checkRateLimit('test-key') } // This should be blocked const result = checkRateLimit('test-key') expect(result.allowed).toBe(false) expect(result.retryAfter).toBeDefined() expect(result.retryAfter).toBeGreaterThan(0) }) it('tracks limits per key independently', () => { // Use up key1 limit for (let i = 0; i < 5; i++) { checkRateLimit('key1') } // key2 should still be allowed const result = checkRateLimit('key2') expect(result.allowed).toBe(true) }) it('returns retryAfter in seconds', () => { // Use up all attempts for (let i = 0; i < 5; i++) { checkRateLimit('test-key') } const result = checkRateLimit('test-key') expect(result.allowed).toBe(false) // retryAfter should be in seconds (less than 60 since window is 60s) expect(result.retryAfter).toBeLessThanOrEqual(60) expect(result.retryAfter).toBeGreaterThan(0) }) }) describe('resetRateLimit', () => { it('resets rate limit counter', () => { // Use up all attempts for (let i = 0; i < 5; i++) { checkRateLimit('test-key') } // Should be blocked expect(checkRateLimit('test-key').allowed).toBe(false) // Reset resetRateLimit('test-key') // Should be allowed again expect(checkRateLimit('test-key').allowed).toBe(true) }) it('does not affect other keys', () => { // Use up both keys for (let i = 0; i < 5; i++) { checkRateLimit('key1') checkRateLimit('key2') } // Reset only key1 resetRateLimit('key1') // key1 should be allowed, key2 should be blocked expect(checkRateLimit('key1').allowed).toBe(true) expect(checkRateLimit('key2').allowed).toBe(false) }) it('handles resetting non-existent key', () => { // This should not throw expect(() => resetRateLimit('nonexistent')).not.toThrow() }) }) describe('rate limit timing', () => { it('resets after window expires', async () => { vi.useFakeTimers() // Use up all attempts for (let i = 0; i < 5; i++) { checkRateLimit('test-key') } expect(checkRateLimit('test-key').allowed).toBe(false) // Advance time past the window (60 seconds) vi.advanceTimersByTime(61 * 1000) // Should be allowed again expect(checkRateLimit('test-key').allowed).toBe(true) vi.useRealTimers() }) it('does not reset before window expires', async () => { vi.useFakeTimers() // Use up all attempts for (let i = 0; i < 5; i++) { checkRateLimit('test-key') } // Advance time but not past the window vi.advanceTimersByTime(30 * 1000) // Should still be blocked expect(checkRateLimit('test-key').allowed).toBe(false) vi.useRealTimers() }) }) describe('concurrent usage patterns', () => { it('handles rapid sequential requests', () => { let blockedCount = 0 for (let i = 0; i < 10; i++) { const result = checkRateLimit('rapid-test') if (!result.allowed) blockedCount++ } // 5 allowed, 5 blocked expect(blockedCount).toBe(5) }) }) }) ================================================ FILE: __tests__/server/url.test.ts ================================================ import { describe, it, expect } from 'vitest' import { isUrlAllowed, validateUrl } from '../../src/server/utils/url' describe('url utilities', () => { describe('isUrlAllowed', () => { describe('valid URLs', () => { it('allows http URLs', () => { const result = isUrlAllowed('http://localhost:8080') expect(result.allowed).toBe(true) }) it('allows https URLs', () => { const result = isUrlAllowed('https://example.com') expect(result.allowed).toBe(true) }) it('allows IP addresses', () => { const result = isUrlAllowed('http://192.168.1.100:8080') expect(result.allowed).toBe(true) }) it('allows URLs with paths', () => { const result = isUrlAllowed('http://localhost:8080/api/v1') expect(result.allowed).toBe(true) }) it('allows URLs with query strings', () => { const result = isUrlAllowed('https://example.com/search?q=test') expect(result.allowed).toBe(true) }) }) describe('invalid URLs', () => { it('rejects invalid URL format', () => { const result = isUrlAllowed('not-a-url') expect(result.allowed).toBe(false) expect(result.reason).toBe('Invalid URL format') }) it('rejects empty string', () => { const result = isUrlAllowed('') expect(result.allowed).toBe(false) expect(result.reason).toBe('Invalid URL format') }) it('rejects ftp protocol', () => { const result = isUrlAllowed('ftp://example.com') expect(result.allowed).toBe(false) expect(result.reason).toBe('Only HTTP/HTTPS protocols allowed') }) it('rejects file protocol', () => { const result = isUrlAllowed('file:///etc/passwd') expect(result.allowed).toBe(false) expect(result.reason).toBe('Only HTTP/HTTPS protocols allowed') }) it('rejects javascript protocol', () => { const result = isUrlAllowed('javascript:alert(1)') expect(result.allowed).toBe(false) expect(result.reason).toBe('Only HTTP/HTTPS protocols allowed') }) }) describe('cloud metadata protection', () => { it('blocks AWS metadata endpoint', () => { const result = isUrlAllowed('http://169.254.169.254/latest/meta-data/') expect(result.allowed).toBe(false) expect(result.reason).toBe('Cloud metadata endpoints not allowed') }) it('blocks Google Cloud metadata endpoint', () => { const result = isUrlAllowed('http://metadata.google.internal/computeMetadata/') expect(result.allowed).toBe(false) expect(result.reason).toBe('Cloud metadata endpoints not allowed') }) it('blocks AWS metadata internal', () => { const result = isUrlAllowed('http://metadata.aws.internal/') expect(result.allowed).toBe(false) expect(result.reason).toBe('Cloud metadata endpoints not allowed') }) it('blocks ECS metadata endpoint', () => { const result = isUrlAllowed('http://169.254.170.2/v2/credentials') expect(result.allowed).toBe(false) expect(result.reason).toBe('Cloud metadata endpoints not allowed') }) it('blocks any 169.254.x.x link-local address', () => { const result = isUrlAllowed('http://169.254.1.1/') expect(result.allowed).toBe(false) expect(result.reason).toBe('Cloud metadata endpoints not allowed') }) it('handles case-insensitive hostnames', () => { const result = isUrlAllowed('http://METADATA.GOOGLE.INTERNAL/') expect(result.allowed).toBe(false) }) }) }) describe('validateUrl', () => { it('does not throw for valid URLs', () => { expect(() => validateUrl('http://localhost:8080')).not.toThrow() expect(() => validateUrl('https://example.com')).not.toThrow() }) it('throws for invalid URL format', () => { expect(() => validateUrl('not-a-url')).toThrow('Invalid URL format') }) it('throws for invalid protocol', () => { expect(() => validateUrl('ftp://example.com')).toThrow('Only HTTP/HTTPS protocols allowed') }) it('throws for cloud metadata endpoints', () => { expect(() => validateUrl('http://169.254.169.254/')).toThrow('Cloud metadata endpoints not allowed') }) }) }) ================================================ FILE: __tests__/themes/themes.test.ts ================================================ import { describe, it, expect } from 'vitest' import { themes, getThemeById } from '../../src/themes/index' describe('themes', () => { describe('themes array', () => { it('contains expected theme count', () => { expect(themes.length).toBeGreaterThanOrEqual(5) }) it('has default theme first', () => { expect(themes[0].id).toBe('default') }) it('all themes have required properties', () => { for (const theme of themes) { expect(theme.id).toBeTruthy() expect(theme.name).toBeTruthy() expect(theme.colors).toBeDefined() expect(theme.colors.bgPrimary).toBeTruthy() expect(theme.colors.bgSecondary).toBeTruthy() expect(theme.colors.textPrimary).toBeTruthy() expect(theme.colors.accent).toBeTruthy() expect(theme.colors.error).toBeTruthy() expect(theme.colors.warning).toBeTruthy() } }) it('all colors are valid hex codes', () => { const hexPattern = /^#[0-9A-Fa-f]{6}$/ for (const theme of themes) { for (const [key, value] of Object.entries(theme.colors)) { expect(value, `${theme.id}.colors.${key}`).toMatch(hexPattern) } } }) it('has unique theme IDs', () => { const ids = themes.map(t => t.id) const uniqueIds = new Set(ids) expect(uniqueIds.size).toBe(ids.length) }) it('has unique theme names', () => { const names = themes.map(t => t.name) const uniqueNames = new Set(names) expect(uniqueNames.size).toBe(names.length) }) }) describe('individual themes', () => { it('default (Midnight) theme has correct structure', () => { const midnight = themes.find(t => t.id === 'default') expect(midnight).toBeDefined() expect(midnight?.name).toBe('Midnight') expect(midnight?.colors.accent).toBe('#00d4aa') }) it('catppuccin theme exists', () => { const catppuccin = themes.find(t => t.id === 'catppuccin') expect(catppuccin).toBeDefined() expect(catppuccin?.name).toBe('Catppuccin') }) it('dracula theme exists', () => { const dracula = themes.find(t => t.id === 'dracula') expect(dracula).toBeDefined() expect(dracula?.colors.accent).toBe('#bd93f9') }) it('nord theme exists', () => { const nord = themes.find(t => t.id === 'nord') expect(nord).toBeDefined() }) it('gruvbox theme exists', () => { const gruvbox = themes.find(t => t.id === 'gruvbox') expect(gruvbox).toBeDefined() }) it('everforest theme exists', () => { const everforest = themes.find(t => t.id === 'everforest') expect(everforest).toBeDefined() }) }) describe('getThemeById', () => { it('returns correct theme for valid ID', () => { const theme = getThemeById('catppuccin') expect(theme.id).toBe('catppuccin') expect(theme.name).toBe('Catppuccin') }) it('returns default theme for unknown ID', () => { const theme = getThemeById('nonexistent') expect(theme).toEqual(themes[0]) expect(theme.id).toBe('default') }) it('returns default theme for empty string', () => { const theme = getThemeById('') expect(theme.id).toBe('default') }) it('returns theme with all color properties', () => { const theme = getThemeById('nord') expect(theme.colors.bgPrimary).toBeDefined() expect(theme.colors.bgSecondary).toBeDefined() expect(theme.colors.bgTertiary).toBeDefined() expect(theme.colors.textPrimary).toBeDefined() expect(theme.colors.textSecondary).toBeDefined() expect(theme.colors.textMuted).toBeDefined() expect(theme.colors.accent).toBeDefined() expect(theme.colors.accentContrast).toBeDefined() expect(theme.colors.warning).toBeDefined() expect(theme.colors.error).toBeDefined() expect(theme.colors.border).toBeDefined() expect(theme.colors.progress).toBeDefined() }) }) describe('theme color accessibility', () => { it('text colors are light on dark backgrounds', () => { for (const theme of themes) { // Primary text should be light (high value) const textPrimary = parseInt(theme.colors.textPrimary.slice(1, 3), 16) expect(textPrimary, `${theme.id} textPrimary should be light`).toBeGreaterThan(150) // Primary background should be dark (low value) const bgPrimary = parseInt(theme.colors.bgPrimary.slice(1, 3), 16) expect(bgPrimary, `${theme.id} bgPrimary should be dark`).toBeLessThan(80) } }) }) }) ================================================ FILE: __tests__/utils/fileTree.test.ts ================================================ import { describe, it, expect } from 'vitest' import { buildFileTree, flattenVisibleNodes, getInitialExpanded, type FileTreeNode } from '../../src/utils/fileTree' import type { TorrentFile } from '../../src/types/torrentDetails' // Helper to create mock TorrentFile function createFile(name: string, size = 1000, priority = 1, progress = 0, availability = 1): TorrentFile { return { name, size, priority, progress, availability, index: 0, piece_range: [0, 0], is_seed: false } } describe('buildFileTree', () => { it('creates flat file list for single-level files', () => { const files: TorrentFile[] = [ createFile('file1.txt'), createFile('file2.txt'), ] const tree = buildFileTree(files) expect(tree).toHaveLength(2) expect(tree[0].name).toBe('file1.txt') expect(tree[0].isFolder).toBe(false) expect(tree[1].name).toBe('file2.txt') }) it('creates folder structure from paths', () => { const files: TorrentFile[] = [ createFile('folder/file1.txt'), createFile('folder/file2.txt'), ] const tree = buildFileTree(files) expect(tree).toHaveLength(1) expect(tree[0].name).toBe('folder') expect(tree[0].isFolder).toBe(true) expect(tree[0].children).toHaveLength(2) }) it('creates nested folder structure', () => { const files: TorrentFile[] = [ createFile('a/b/c/file.txt'), ] const tree = buildFileTree(files) expect(tree[0].name).toBe('a') expect(tree[0].children[0].name).toBe('b') expect(tree[0].children[0].children[0].name).toBe('c') expect(tree[0].children[0].children[0].children[0].name).toBe('file.txt') }) it('calculates folder sizes correctly', () => { const files: TorrentFile[] = [ createFile('folder/file1.txt', 1000), createFile('folder/file2.txt', 2000), ] const tree = buildFileTree(files) expect(tree[0].size).toBe(3000) }) it('sorts folders before files', () => { const files: TorrentFile[] = [ createFile('zfile.txt'), createFile('afolder/file.txt'), ] const tree = buildFileTree(files) expect(tree[0].name).toBe('afolder') expect(tree[0].isFolder).toBe(true) expect(tree[1].name).toBe('zfile.txt') expect(tree[1].isFolder).toBe(false) }) it('sorts nodes alphabetically within their type', () => { const files: TorrentFile[] = [ createFile('zfile.txt'), createFile('afile.txt'), createFile('mfile.txt'), ] const tree = buildFileTree(files) expect(tree[0].name).toBe('afile.txt') expect(tree[1].name).toBe('mfile.txt') expect(tree[2].name).toBe('zfile.txt') }) it('maps priority values correctly', () => { const files: TorrentFile[] = [ createFile('skip.txt', 100, 0), createFile('normal.txt', 100, 1), createFile('high.txt', 100, 6), createFile('max.txt', 100, 7), ] const tree = buildFileTree(files) expect(tree.find(n => n.name === 'skip.txt')?.priority).toBe('skip') expect(tree.find(n => n.name === 'normal.txt')?.priority).toBe('normal') expect(tree.find(n => n.name === 'high.txt')?.priority).toBe('high') expect(tree.find(n => n.name === 'max.txt')?.priority).toBe('max') }) it('sets mixed priority for folders with different file priorities', () => { const files: TorrentFile[] = [ createFile('folder/file1.txt', 100, 1), createFile('folder/file2.txt', 100, 6), ] const tree = buildFileTree(files) expect(tree[0].priority).toBe('mixed') }) it('calculates folder progress correctly', () => { const files: TorrentFile[] = [ createFile('folder/file1.txt', 1000, 1, 0.5), createFile('folder/file2.txt', 1000, 1, 1.0), ] const tree = buildFileTree(files) expect(tree[0].progress).toBeCloseTo(0.75) }) }) describe('flattenVisibleNodes', () => { function createTestTree(): FileTreeNode[] { return [ { name: 'folder1', path: 'folder1', isFolder: true, size: 1000, progress: 0, priority: 'normal', availability: 1, fileIndices: [0, 1], children: [ { name: 'file1.txt', path: 'folder1/file1.txt', isFolder: false, size: 500, progress: 0, priority: 'normal', availability: 1, fileIndices: [0], children: [], }, { name: 'file2.txt', path: 'folder1/file2.txt', isFolder: false, size: 500, progress: 0, priority: 'normal', availability: 1, fileIndices: [1], children: [], }, ], }, { name: 'file3.txt', path: 'file3.txt', isFolder: false, size: 1000, progress: 0, priority: 'normal', availability: 1, fileIndices: [2], children: [], }, ] } it('returns only top-level nodes when nothing is expanded', () => { const tree = createTestTree() const expanded = new Set() const flattened = flattenVisibleNodes(tree, expanded) expect(flattened).toHaveLength(2) expect(flattened[0].node.name).toBe('folder1') expect(flattened[0].depth).toBe(0) expect(flattened[1].node.name).toBe('file3.txt') }) it('includes children when folder is expanded', () => { const tree = createTestTree() const expanded = new Set(['folder1']) const flattened = flattenVisibleNodes(tree, expanded) expect(flattened).toHaveLength(4) expect(flattened[0].node.name).toBe('folder1') expect(flattened[0].depth).toBe(0) expect(flattened[1].node.name).toBe('file1.txt') expect(flattened[1].depth).toBe(1) expect(flattened[2].node.name).toBe('file2.txt') expect(flattened[2].depth).toBe(1) }) it('correctly tracks depth for nested expansions', () => { const tree: FileTreeNode[] = [{ name: 'a', path: 'a', isFolder: true, size: 0, progress: 0, priority: 'normal', availability: 0, fileIndices: [], children: [{ name: 'b', path: 'a/b', isFolder: true, size: 0, progress: 0, priority: 'normal', availability: 0, fileIndices: [], children: [{ name: 'file.txt', path: 'a/b/file.txt', isFolder: false, size: 100, progress: 0, priority: 'normal', availability: 0, fileIndices: [0], children: [], }], }], }] const expanded = new Set(['a', 'a/b']) const flattened = flattenVisibleNodes(tree, expanded) expect(flattened).toHaveLength(3) expect(flattened[0].depth).toBe(0) expect(flattened[1].depth).toBe(1) expect(flattened[2].depth).toBe(2) }) }) describe('getInitialExpanded', () => { it('returns empty set for files only', () => { const tree: FileTreeNode[] = [ { name: 'file.txt', path: 'file.txt', isFolder: false, size: 100, progress: 0, priority: 'normal', availability: 0, fileIndices: [0], children: [], }, ] const expanded = getInitialExpanded(tree) expect(expanded.size).toBe(0) }) it('expands single-child folder paths', () => { const tree: FileTreeNode[] = [{ name: 'a', path: 'a', isFolder: true, size: 100, progress: 0, priority: 'normal', availability: 0, fileIndices: [], children: [{ name: 'b', path: 'a/b', isFolder: true, size: 100, progress: 0, priority: 'normal', availability: 0, fileIndices: [], children: [{ name: 'file.txt', path: 'a/b/file.txt', isFolder: false, size: 100, progress: 0, priority: 'normal', availability: 0, fileIndices: [0], children: [], }], }], }] const expanded = getInitialExpanded(tree) expect(expanded.has('a')).toBe(true) expect(expanded.has('a/b')).toBe(true) }) it('stops expanding when there are multiple folders', () => { const tree: FileTreeNode[] = [{ name: 'a', path: 'a', isFolder: true, size: 0, progress: 0, priority: 'normal', availability: 0, fileIndices: [], children: [ { name: 'b', path: 'a/b', isFolder: true, size: 0, progress: 0, priority: 'normal', availability: 0, fileIndices: [], children: [], }, { name: 'c', path: 'a/c', isFolder: true, size: 0, progress: 0, priority: 'normal', availability: 0, fileIndices: [], children: [], }, ], }] const expanded = getInitialExpanded(tree) expect(expanded.has('a')).toBe(true) expect(expanded.has('a/b')).toBe(false) expect(expanded.has('a/c')).toBe(false) }) }) ================================================ FILE: __tests__/utils/format.test.ts ================================================ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest' import { formatSpeed, formatSize, formatCompactSpeed, formatCompactSize, formatEta, formatDate, formatDuration, formatRelativeTime, formatRelativeDate, normalizeSearch, } from '../../src/utils/format' describe('formatSpeed', () => { it('formats bytes per second', () => { expect(formatSpeed(0)).toBe('0 B/s') expect(formatSpeed(512)).toBe('512 B/s') expect(formatSpeed(1023)).toBe('1023 B/s') }) it('formats kibibytes per second', () => { expect(formatSpeed(1024)).toBe('1.0 KiB/s') expect(formatSpeed(1536)).toBe('1.5 KiB/s') expect(formatSpeed(1024 * 1024 - 1)).toBe('1024.0 KiB/s') }) it('formats mebibytes per second', () => { expect(formatSpeed(1024 * 1024)).toBe('1.00 MiB/s') expect(formatSpeed(1.5 * 1024 * 1024)).toBe('1.50 MiB/s') expect(formatSpeed(100 * 1024 * 1024)).toBe('100.00 MiB/s') }) it('returns dash when showZero is false and value is 0', () => { expect(formatSpeed(0, false)).toBe('—') }) }) describe('formatSize', () => { it('formats bytes', () => { expect(formatSize(0)).toBe('0 B') expect(formatSize(1)).toBe('1 B') expect(formatSize(1023)).toBe('1023 B') }) it('formats kibibytes', () => { expect(formatSize(1024)).toBe('1.0 KiB') expect(formatSize(1536)).toBe('1.5 KiB') }) it('formats mebibytes', () => { expect(formatSize(1024 * 1024)).toBe('1.0 MiB') expect(formatSize(500 * 1024 * 1024)).toBe('500.0 MiB') }) it('formats gibibytes', () => { expect(formatSize(1024 * 1024 * 1024)).toBe('1.00 GiB') expect(formatSize(4.7 * 1024 * 1024 * 1024)).toBe('4.70 GiB') }) it('formats tebibytes', () => { expect(formatSize(1024 * 1024 * 1024 * 1024)).toBe('1.00 TiB') expect(formatSize(2.5 * 1024 * 1024 * 1024 * 1024)).toBe('2.50 TiB') }) }) describe('formatCompactSpeed', () => { it('returns dash for zero', () => { expect(formatCompactSpeed(0)).toBe('-') }) it('formats compact bytes', () => { expect(formatCompactSpeed(512)).toBe('512B') }) it('formats compact kibibytes', () => { expect(formatCompactSpeed(1024)).toBe('1Ki') expect(formatCompactSpeed(2048)).toBe('2Ki') }) it('formats compact mebibytes', () => { expect(formatCompactSpeed(1024 * 1024)).toBe('1.0Mi') expect(formatCompactSpeed(10.5 * 1024 * 1024)).toBe('10.5Mi') }) }) describe('formatCompactSize', () => { it('formats compact bytes', () => { expect(formatCompactSize(512)).toBe('512B') }) it('formats compact kibibytes', () => { expect(formatCompactSize(1024)).toBe('1Ki') }) it('formats compact mebibytes', () => { expect(formatCompactSize(1024 * 1024)).toBe('1Mi') }) it('formats compact gibibytes', () => { expect(formatCompactSize(1024 * 1024 * 1024)).toBe('1.0Gi') }) }) describe('formatEta', () => { it('returns infinity for negative values', () => { expect(formatEta(-1)).toBe('∞') expect(formatEta(-1000)).toBe('∞') }) it('returns infinity for qBittorrent unknown value', () => { expect(formatEta(8640000)).toBe('∞') }) it('formats seconds', () => { expect(formatEta(0)).toBe('0s') expect(formatEta(30)).toBe('30s') expect(formatEta(59)).toBe('59s') }) it('formats minutes', () => { expect(formatEta(60)).toBe('1m') expect(formatEta(120)).toBe('2m') expect(formatEta(3599)).toBe('59m') }) it('formats hours and minutes', () => { expect(formatEta(3600)).toBe('1h 0m') expect(formatEta(3661)).toBe('1h 1m') expect(formatEta(7200)).toBe('2h 0m') }) it('formats days', () => { expect(formatEta(86400)).toBe('1d') expect(formatEta(172800)).toBe('2d') }) }) describe('formatDate', () => { it('returns dash for zero or negative timestamp', () => { expect(formatDate(0)).toBe('—') expect(formatDate(-1)).toBe('—') }) it('formats valid timestamps', () => { // Just verify it returns a non-empty string for valid timestamps const result = formatDate(1704067200) // Jan 1, 2024 00:00:00 UTC expect(result).toBeTruthy() expect(result).not.toBe('—') }) }) describe('formatDuration', () => { it('returns dash for negative values', () => { expect(formatDuration(-1)).toBe('—') }) it('formats seconds only', () => { expect(formatDuration(0)).toBe('0s') expect(formatDuration(30)).toBe('30s') expect(formatDuration(59)).toBe('59s') }) it('formats minutes and seconds', () => { expect(formatDuration(60)).toBe('1m 0s') expect(formatDuration(125)).toBe('2m 5s') }) it('formats hours, minutes, and seconds', () => { expect(formatDuration(3600)).toBe('1h 0m 0s') expect(formatDuration(3665)).toBe('1h 1m 5s') }) it('formats days, hours, and minutes', () => { expect(formatDuration(86400)).toBe('1d 0h 0m') expect(formatDuration(90061)).toBe('1d 1h 1m') }) }) describe('formatRelativeTime', () => { beforeEach(() => { vi.useFakeTimers() vi.setSystemTime(new Date('2024-01-15T12:00:00Z')) }) afterEach(() => { vi.useRealTimers() }) it('returns Never for zero or negative timestamp', () => { expect(formatRelativeTime(0)).toBe('Never') expect(formatRelativeTime(-1)).toBe('Never') }) it('returns Just now for recent timestamps', () => { const now = Math.floor(Date.now() / 1000) expect(formatRelativeTime(now)).toBe('Just now') expect(formatRelativeTime(now - 30)).toBe('Just now') }) it('formats minutes ago', () => { const now = Math.floor(Date.now() / 1000) expect(formatRelativeTime(now - 60)).toBe('1m ago') expect(formatRelativeTime(now - 300)).toBe('5m ago') }) it('formats hours ago', () => { const now = Math.floor(Date.now() / 1000) expect(formatRelativeTime(now - 3600)).toBe('1h ago') expect(formatRelativeTime(now - 7200)).toBe('2h ago') }) it('formats days ago', () => { const now = Math.floor(Date.now() / 1000) expect(formatRelativeTime(now - 86400)).toBe('1d ago') expect(formatRelativeTime(now - 259200)).toBe('3d ago') }) it('formats weeks ago', () => { const now = Math.floor(Date.now() / 1000) expect(formatRelativeTime(now - 604800)).toBe('1w ago') expect(formatRelativeTime(now - 1209600)).toBe('2w ago') }) }) describe('formatRelativeDate', () => { beforeEach(() => { vi.useFakeTimers() vi.setSystemTime(new Date('2024-01-15T12:00:00Z')) }) afterEach(() => { vi.useRealTimers() }) it('returns dash for zero or negative timestamp', () => { expect(formatRelativeDate(0)).toBe('-') expect(formatRelativeDate(-1)).toBe('-') }) it('returns Today for same day', () => { const todayTimestamp = Math.floor(Date.now() / 1000) expect(formatRelativeDate(todayTimestamp)).toBe('Today') }) it('returns Yesterday for previous day', () => { const yesterdayTimestamp = Math.floor(Date.now() / 1000) - 86400 expect(formatRelativeDate(yesterdayTimestamp)).toBe('Yesterday') }) it('formats days ago within a week', () => { const threeDaysAgo = Math.floor(Date.now() / 1000) - 86400 * 3 expect(formatRelativeDate(threeDaysAgo)).toBe('3d ago') }) }) describe('normalizeSearch', () => { it('converts to lowercase', () => { expect(normalizeSearch('HELLO')).toBe('hello') expect(normalizeSearch('Hello World')).toBe('hello world') }) it('replaces dots, underscores, and hyphens with spaces', () => { expect(normalizeSearch('hello.world')).toBe('hello world') expect(normalizeSearch('hello_world')).toBe('hello world') expect(normalizeSearch('hello-world')).toBe('hello world') }) it('normalizes multiple separators', () => { expect(normalizeSearch('hello...world')).toBe('hello world') expect(normalizeSearch('hello___world')).toBe('hello world') expect(normalizeSearch('hello---world')).toBe('hello world') expect(normalizeSearch('hello._-world')).toBe('hello world') }) it('handles torrent-style names', () => { expect(normalizeSearch('Movie.Name.2024.1080p.BluRay')).toBe('movie name 2024 1080p bluray') }) it('handles empty string', () => { expect(normalizeSearch('')).toBe('') }) it('handles strings with only separators', () => { expect(normalizeSearch('...')).toBe(' ') expect(normalizeSearch('___')).toBe(' ') }) it('preserves numbers', () => { expect(normalizeSearch('file123.txt')).toBe('file123 txt') }) it('handles mixed separators at start and end', () => { expect(normalizeSearch('.hello.')).toBe(' hello ') expect(normalizeSearch('-test-')).toBe(' test ') }) }) // Additional edge case tests describe('format edge cases', () => { describe('formatSpeed edge cases', () => { it('handles very large values', () => { expect(formatSpeed(1024 * 1024 * 1024)).toBe('1024.00 MiB/s') }) it('handles floating point precision', () => { expect(formatSpeed(1536)).toBe('1.5 KiB/s') }) }) describe('formatSize edge cases', () => { it('handles exact boundary values', () => { expect(formatSize(1024)).toBe('1.0 KiB') expect(formatSize(1024 * 1024)).toBe('1.0 MiB') expect(formatSize(1024 * 1024 * 1024)).toBe('1.00 GiB') expect(formatSize(1024 * 1024 * 1024 * 1024)).toBe('1.00 TiB') }) it('handles values just below boundaries', () => { expect(formatSize(1023)).toBe('1023 B') expect(formatSize(1024 * 1024 - 1)).toBe('1024.0 KiB') }) }) describe('formatEta edge cases', () => { it('handles exact boundary transitions', () => { expect(formatEta(59)).toBe('59s') expect(formatEta(60)).toBe('1m') expect(formatEta(3599)).toBe('59m') expect(formatEta(3600)).toBe('1h 0m') expect(formatEta(86399)).toBe('23h 59m') expect(formatEta(86400)).toBe('1d') }) }) describe('formatDuration edge cases', () => { it('handles exact day boundary', () => { expect(formatDuration(86400)).toBe('1d 0h 0m') }) it('handles complex durations', () => { expect(formatDuration(90061)).toBe('1d 1h 1m') }) }) }) ================================================ FILE: __tests__/utils/pagination.test.ts ================================================ import { describe, it, expect } from 'vitest' import { PER_PAGE_OPTIONS } from '../../src/utils/pagination' describe('pagination', () => { describe('PER_PAGE_OPTIONS', () => { it('contains expected values', () => { expect(PER_PAGE_OPTIONS).toEqual([25, 50, 100, 200]) }) it('is readonly', () => { // TypeScript should prevent mutation, but verify the values expect(PER_PAGE_OPTIONS[0]).toBe(25) expect(PER_PAGE_OPTIONS.length).toBe(4) }) it('values are in ascending order', () => { for (let i = 1; i < PER_PAGE_OPTIONS.length; i++) { expect(PER_PAGE_OPTIONS[i]).toBeGreaterThan(PER_PAGE_OPTIONS[i - 1]) } }) it('starts with a reasonable minimum', () => { expect(PER_PAGE_OPTIONS[0]).toBeGreaterThanOrEqual(10) }) it('has a reasonable maximum', () => { expect(PER_PAGE_OPTIONS[PER_PAGE_OPTIONS.length - 1]).toBeLessThanOrEqual(500) }) }) }) ================================================ FILE: __tests__/utils/ratioThresholds.test.ts ================================================ import { describe, it, expect, beforeEach, vi } from 'vitest' import { loadRatioThreshold, saveRatioThreshold } from '../../src/utils/ratioThresholds' describe('ratioThresholds', () => { // Mock localStorage const localStorageMock = (() => { let store: Record = {} return { getItem: vi.fn((key: string) => store[key] ?? null), setItem: vi.fn((key: string, value: string) => { store[key] = value }), clear: () => { store = {} }, } })() beforeEach(() => { localStorageMock.clear() vi.stubGlobal('localStorage', localStorageMock) }) describe('loadRatioThreshold', () => { it('returns default threshold (1.0) when no value stored', () => { expect(loadRatioThreshold()).toBe(1.0) }) it('returns stored threshold when valid', () => { localStorageMock.setItem('ratioThreshold', '2.5') expect(loadRatioThreshold()).toBe(2.5) }) it('returns default for invalid stored value', () => { localStorageMock.setItem('ratioThreshold', 'not-a-number') expect(loadRatioThreshold()).toBe(1.0) }) it('returns default for negative stored value', () => { localStorageMock.setItem('ratioThreshold', '-1') expect(loadRatioThreshold()).toBe(1.0) }) it('accepts zero as valid threshold', () => { localStorageMock.setItem('ratioThreshold', '0') expect(loadRatioThreshold()).toBe(0) }) it('handles decimal values correctly', () => { localStorageMock.setItem('ratioThreshold', '0.5') expect(loadRatioThreshold()).toBe(0.5) }) it('handles large values', () => { localStorageMock.setItem('ratioThreshold', '100') expect(loadRatioThreshold()).toBe(100) }) }) describe('saveRatioThreshold', () => { it('saves threshold to localStorage', () => { saveRatioThreshold(2.0) expect(localStorageMock.setItem).toHaveBeenCalledWith('ratioThreshold', '2') }) it('saves decimal values', () => { saveRatioThreshold(1.5) expect(localStorageMock.setItem).toHaveBeenCalledWith('ratioThreshold', '1.5') }) it('saves zero', () => { saveRatioThreshold(0) expect(localStorageMock.setItem).toHaveBeenCalledWith('ratioThreshold', '0') }) }) }) ================================================ FILE: __tests__/utils/search.test.ts ================================================ import { describe, it, expect } from 'vitest' import { extractTags, sortResults, filterResults } from '../../src/utils/search' describe('extractTags', () => { it('extracts resolution tags', () => { const titles = [ 'Movie.2024.1080p.BluRay', 'Show.S01E01.720p.WEB-DL', 'Film.2024.2160p.UHD', ] const tags = extractTags(titles) expect(tags.find(t => t.tag === '1080P')).toBeTruthy() expect(tags.find(t => t.tag === '720P')).toBeTruthy() expect(tags.find(t => t.tag === '2160P')).toBeTruthy() }) it('extracts codec tags', () => { const titles = ['Movie.x264.mkv', 'Film.x265.mp4', 'Show.HEVC.avi'] const tags = extractTags(titles) expect(tags.find(t => t.tag === 'X264')).toBeTruthy() expect(tags.find(t => t.tag === 'X265')).toBeTruthy() expect(tags.find(t => t.tag === 'HEVC')).toBeTruthy() }) it('extracts source tags', () => { const titles = [ 'Movie.BluRay.x264', 'Show.WEB-DL.1080p', 'Film.HDRip.720p', ] const tags = extractTags(titles) expect(tags.find(t => t.tag === 'BLURAY')).toBeTruthy() expect(tags.find(t => t.tag === 'WEB-DL')).toBeTruthy() expect(tags.find(t => t.tag === 'HDRIP')).toBeTruthy() }) it('counts tag occurrences', () => { const titles = [ 'Movie1.1080p.x264', 'Movie2.1080p.x265', 'Movie3.1080p.HEVC', ] const tags = extractTags(titles) const tag1080p = tags.find(t => t.tag === '1080P') expect(tag1080p?.count).toBe(3) }) it('sorts by count descending', () => { const titles = [ 'Movie1.1080p.x264', 'Movie2.1080p.x264', 'Movie3.720p.x264', ] const tags = extractTags(titles) // x264 appears 3 times, 1080p appears 2 times expect(tags[0].tag).toBe('X264') expect(tags[0].count).toBe(3) }) it('returns empty array for titles with no tags', () => { const titles = ['just a regular title', 'another title here'] const tags = extractTags(titles) expect(tags).toHaveLength(0) }) }) describe('sortResults', () => { const mockResults = [ { title: 'Movie A', seeders: 100, size: 1000, publishDate: '2024-01-15' }, { title: 'Movie B', seeders: 50, size: 2000, publishDate: '2024-01-10' }, { title: 'Movie C', seeders: 200, size: 500, publishDate: '2024-01-20' }, ] describe('sorting by seeders', () => { it('sorts by seeders descending (default)', () => { const sorted = sortResults(mockResults, 'seeders', false) expect(sorted[0].title).toBe('Movie C') expect(sorted[1].title).toBe('Movie A') expect(sorted[2].title).toBe('Movie B') }) it('sorts by seeders ascending', () => { const sorted = sortResults(mockResults, 'seeders', true) expect(sorted[0].title).toBe('Movie B') expect(sorted[1].title).toBe('Movie A') expect(sorted[2].title).toBe('Movie C') }) }) describe('sorting by size', () => { it('sorts by size descending (default)', () => { const sorted = sortResults(mockResults, 'size', false) expect(sorted[0].title).toBe('Movie B') expect(sorted[1].title).toBe('Movie A') expect(sorted[2].title).toBe('Movie C') }) it('sorts by size ascending', () => { const sorted = sortResults(mockResults, 'size', true) expect(sorted[0].title).toBe('Movie C') expect(sorted[1].title).toBe('Movie A') expect(sorted[2].title).toBe('Movie B') }) }) describe('sorting by age', () => { it('sorts by age descending (newest first)', () => { const sorted = sortResults(mockResults, 'age', false) expect(sorted[0].title).toBe('Movie C') expect(sorted[1].title).toBe('Movie A') expect(sorted[2].title).toBe('Movie B') }) it('sorts by age ascending (oldest first)', () => { const sorted = sortResults(mockResults, 'age', true) expect(sorted[0].title).toBe('Movie B') expect(sorted[1].title).toBe('Movie A') expect(sorted[2].title).toBe('Movie C') }) }) it('handles missing seeders', () => { const results = [ { title: 'A', size: 100, publishDate: '2024-01-01' }, { title: 'B', seeders: 10, size: 100, publishDate: '2024-01-01' }, ] const sorted = sortResults(results, 'seeders', false) expect(sorted[0].title).toBe('B') expect(sorted[1].title).toBe('A') }) it('does not mutate original array', () => { const original = [...mockResults] sortResults(mockResults, 'seeders', false) expect(mockResults).toEqual(original) }) }) describe('filterResults', () => { const mockResults = [ { title: 'The Matrix 1999', extra: 'data' }, { title: 'Matrix Reloaded 2003', extra: 'info' }, { title: 'Inception 2010', extra: 'value' }, ] it('returns all results when filter is empty', () => { expect(filterResults(mockResults, '')).toHaveLength(3) }) it('filters by title case-insensitively', () => { const filtered = filterResults(mockResults, 'matrix') expect(filtered).toHaveLength(2) expect(filtered[0].title).toBe('The Matrix 1999') expect(filtered[1].title).toBe('Matrix Reloaded 2003') }) it('handles uppercase filter', () => { const filtered = filterResults(mockResults, 'INCEPTION') expect(filtered).toHaveLength(1) expect(filtered[0].title).toBe('Inception 2010') }) it('returns empty array when no matches', () => { const filtered = filterResults(mockResults, 'nonexistent') expect(filtered).toHaveLength(0) }) it('matches partial strings', () => { const filtered = filterResults(mockResults, 'rix') expect(filtered).toHaveLength(2) }) }) ================================================ FILE: docs/.vitepress/config.ts ================================================ import { defineConfig } from 'vitepress' export default defineConfig({ title: 'qbitwebui', description: 'Modern web interface for qBittorrent', base: '/qbitwebui/', head: [ ['link', { rel: 'icon', href: '/qbitwebui/logo.svg' }] ], themeConfig: { logo: '/logo.svg', nav: [ { text: 'Guide', link: '/guide/getting-started' }, { text: 'GitHub', link: 'https://github.com/Maciejonos/qbitwebui' }, ], sidebar: [ { text: 'Guide', items: [ { text: 'Getting Started', link: '/guide/getting-started' }, { text: 'Configuration', link: '/guide/configuration' }, { text: 'Features', link: '/guide/features' }, { text: 'Docker', link: '/guide/docker' }, ], }, { text: 'Add-ons', items: [{ text: 'Network Agent', link: '/guide/network-agent/' }], }, ], socialLinks: [{ icon: 'github', link: 'https://github.com/Maciejonos/qbitwebui' }], search: { provider: 'local' }, footer: { message: 'Released under the MIT License.' }, }, }) ================================================ FILE: docs/.vitepress/theme/custom.css ================================================ :root { --vp-c-brand-1: #0d7a6e; --vp-c-brand-2: #0f665c; --vp-c-brand-3: #15803d; --vp-c-brand-soft: rgba(13, 122, 110, 0.1); } .dark { --vp-c-bg: #07070a; --vp-c-bg-alt: #0a0a0f; --vp-sidebar-bg-color: #0a0a0f; --vp-c-bg-soft: #0e0e14; --vp-code-block-bg: #0e0e14; --vp-c-border: #32323e; --vp-c-divider: #32323e; --vp-c-gutter: #32323e; --vp-c-text-1: #e8e8ed; --vp-c-text-2: #b8b8c8; --vp-c-text-3: #8a8a9e; --vp-c-brand-1: #00d4aa; --vp-c-brand-2: #33eec9; --vp-c-brand-3: #00b38f; --vp-c-brand-soft: rgba(0, 212, 170, 0.15); --vp-c-warning-1: #f7b731; --vp-c-warning-2: #e0a01f; --vp-c-danger-1: #f43f5e; --vp-c-danger-2: #e11d48; } .VPHero .name { -webkit-background-clip: text; background-clip: text; -webkit-text-fill-color: transparent; } :root .VPHero .name { background-image: linear-gradient(135deg, #16a34a 0%, #0d9488 100%); } .dark .VPHero .name { background-image: linear-gradient(135deg, #9bda65 0%, #33c9a9 50%, #1ec6b7 100%); } .VPButton.brand { border-color: transparent; transition: all 0.2s ease; } :root .VPButton.brand { color: white !important; background-image: linear-gradient(135deg, #0d9488 0%, #0d7a6e 100%); } .dark .VPButton.brand { background-color: #00d4aa; background-image: none; color: #070a09 !important; font-weight: 600; } .dark .VPButton.brand:hover { background-color: #33eec9; color: #070a09 !important; } ::selection { background: rgba(0, 212, 170, 0.3); color: inherit; } ================================================ FILE: docs/.vitepress/theme/index.ts ================================================ import DefaultTheme from 'vitepress/theme' import './custom.css' export default DefaultTheme ================================================ FILE: docs/guide/configuration.md ================================================ # Configuration All configuration is done through environment variables. ## Required | Variable | Description | |----------|-------------| | `ENCRYPTION_KEY` | AES-256 key for encrypting stored credentials. Minimum 32 characters. | Generate a key: ```bash openssl rand -hex 32 ``` ## Server | Variable | Default | Description | |----------|---------|-------------| | `PORT` | `3000` | HTTP server port | | `DATABASE_PATH` | `./data/qbitwebui.db` | SQLite database location | | `SALT_PATH` | `./data/.salt` | Encryption salt file location | ## Authentication | Variable | Default | Description | |----------|---------|-------------| | `DISABLE_AUTH` | `false` | Skip authentication entirely | | `DISABLE_REGISTRATION` | `false` | Prevent new user signups | ### Disable Auth Use when running behind an authenticating reverse proxy (Authelia, Authentik, etc.): ```yaml environment: - DISABLE_AUTH=true ``` ::: danger Only use behind a properly secured reverse proxy. Anyone who can reach qbitwebui will have full access. ::: ### Disable Registration Lock down to existing users only. On first start with no users, generates a random admin password printed to logs: ```yaml environment: - DISABLE_REGISTRATION=true ``` ## Features | Variable | Default | Description | |----------|---------|-------------| | `DOWNLOADS_PATH` | - | Enable file browser at this path | | `ALLOW_SELF_SIGNED_CERTS` | `false` | Accept self-signed TLS certificates | ### File Browser Mount your downloads directory and set the path: ```yaml environment: - DOWNLOADS_PATH=/downloads volumes: - /path/to/downloads:/downloads:ro ``` The `:ro` makes it read-only. Remove for write access (delete, move, rename). ### Self-Signed Certificates If your qBittorrent uses HTTPS with a self-signed certificate: ```yaml environment: - ALLOW_SELF_SIGNED_CERTS=true ``` ## Database SQLite database stores: | Data | Security | |------|----------| | Users | Passwords hashed with bcrypt (cost 12) | | Sessions | Random tokens, 7-day expiry | | Instances | Credentials encrypted with AES-256-GCM | | Integrations | API keys encrypted with AES-256-GCM | ### Backup ```bash cp ./data/qbitwebui.db ./backup/ ``` ### Restore ```bash cp ./backup/qbitwebui.db ./data/ ``` Use the same `ENCRYPTION_KEY` after restore. ================================================ FILE: docs/guide/docker.md ================================================ # Docker Deployment ## Images | Image | Description | |-------|-------------| | `ghcr.io/maciejonos/qbitwebui` | Main application | | `ghcr.io/maciejonos/qbitwebui-agent` | Network diagnostics agent | Both support `linux/amd64` and `linux/arm64`. ## Quick Start ```bash docker run -d \ --name qbitwebui \ -p 3000:3000 \ -v ./data:/data \ -e ENCRYPTION_KEY=$(openssl rand -hex 32) \ ghcr.io/maciejonos/qbitwebui:latest ``` ## Docker Compose Examples ### Basic ```yaml services: qbitwebui: image: ghcr.io/maciejonos/qbitwebui:latest container_name: qbitwebui ports: - "3000:3000" volumes: - ./qbitwebui-data:/data environment: - ENCRYPTION_KEY=your-32-character-minimum-key-here restart: unless-stopped ``` ### With File Browser ```yaml services: qbitwebui: image: ghcr.io/maciejonos/qbitwebui:latest container_name: qbitwebui ports: - "3000:3000" volumes: - ./qbitwebui-data:/data - /path/to/your/downloads:/downloads:ro environment: - ENCRYPTION_KEY=your-32-character-minimum-key-here - DOWNLOADS_PATH=/downloads restart: unless-stopped ``` ### Full Stack (qBittorrent + Agent + qbitwebui) Complete setup with all components: ```yaml services: qbittorrent: image: linuxserver/qbittorrent:latest container_name: qbittorrent environment: - PUID=1000 - PGID=1000 - TZ=Europe/London - WEBUI_PORT=8080 volumes: - ./qbittorrent-config:/config - ./downloads:/downloads ports: - "8080:8080" # qBittorrent WebUI - "6881:6881" # BitTorrent TCP - "6881:6881/udp" # BitTorrent UDP - "9876:9876" # Network Agent restart: unless-stopped net-agent: image: ghcr.io/maciejonos/qbitwebui-agent:latest container_name: net-agent network_mode: "service:qbittorrent" environment: - QBT_URL=http://localhost:8080 depends_on: - qbittorrent restart: unless-stopped qbitwebui: image: ghcr.io/maciejonos/qbitwebui:latest container_name: qbitwebui ports: - "3000:3000" volumes: - ./qbitwebui-data:/data - ./downloads:/downloads:ro environment: - ENCRYPTION_KEY=your-32-character-minimum-key-here - DOWNLOADS_PATH=/downloads depends_on: - qbittorrent restart: unless-stopped ``` ### With VPN (Gluetun) Route qBittorrent through a VPN: ```yaml services: gluetun: image: qmcgaw/gluetun:latest container_name: gluetun cap_add: - NET_ADMIN devices: - /dev/net/tun:/dev/net/tun environment: - VPN_SERVICE_PROVIDER=mullvad # or your provider - VPN_TYPE=wireguard - WIREGUARD_PRIVATE_KEY=your-private-key - WIREGUARD_ADDRESSES=10.x.x.x/32 - SERVER_COUNTRIES=Sweden ports: - "8080:8080" # qBittorrent WebUI - "6881:6881" # BitTorrent - "6881:6881/udp" - "9876:9876" # Network Agent restart: unless-stopped qbittorrent: image: linuxserver/qbittorrent:latest container_name: qbittorrent network_mode: "service:gluetun" environment: - PUID=1000 - PGID=1000 - WEBUI_PORT=8080 volumes: - ./qbittorrent-config:/config - ./downloads:/downloads depends_on: - gluetun restart: unless-stopped net-agent: image: ghcr.io/maciejonos/qbitwebui-agent:latest container_name: net-agent network_mode: "service:gluetun" environment: - QBT_URL=http://localhost:8080 depends_on: - qbittorrent restart: unless-stopped qbitwebui: image: ghcr.io/maciejonos/qbitwebui:latest container_name: qbitwebui ports: - "3000:3000" volumes: - ./qbitwebui-data:/data - ./downloads:/downloads:ro environment: - ENCRYPTION_KEY=your-32-character-minimum-key-here - DOWNLOADS_PATH=/downloads depends_on: - qbittorrent restart: unless-stopped ``` ::: tip With VPN setup, use the Network Agent to verify your VPN is working correctly by checking the external IP. ::: ### Multiple Instances Manage multiple qBittorrent instances: ```yaml services: qbittorrent-1: image: linuxserver/qbittorrent:latest container_name: qbittorrent-1 environment: - PUID=1000 - PGID=1000 - WEBUI_PORT=8080 volumes: - ./qbt1-config:/config - ./downloads-1:/downloads ports: - "8080:8080" - "6881:6881" - "6881:6881/udp" restart: unless-stopped qbittorrent-2: image: linuxserver/qbittorrent:latest container_name: qbittorrent-2 environment: - PUID=1000 - PGID=1000 - WEBUI_PORT=8080 volumes: - ./qbt2-config:/config - ./downloads-2:/downloads ports: - "8081:8080" - "6882:6881" - "6882:6881/udp" restart: unless-stopped qbitwebui: image: ghcr.io/maciejonos/qbitwebui:latest container_name: qbitwebui ports: - "3000:3000" volumes: - ./qbitwebui-data:/data environment: - ENCRYPTION_KEY=your-32-character-minimum-key-here restart: unless-stopped ``` Add both instances in qbitwebui with their respective URLs (`http://host:8080` and `http://host:8081`). ## Reverse Proxy ### Nginx ```nginx server { listen 443 ssl http2; server_name qbit.example.com; ssl_certificate /path/to/cert.pem; ssl_certificate_key /path/to/key.pem; location / { proxy_pass http://localhost:3000; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "upgrade"; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; } } ``` ### Caddy ``` qbit.example.com { reverse_proxy localhost:3000 } ``` ### Traefik (Labels) ```yaml services: qbitwebui: image: ghcr.io/maciejonos/qbitwebui:latest labels: - "traefik.enable=true" - "traefik.http.routers.qbitwebui.rule=Host(`qbit.example.com`)" - "traefik.http.routers.qbitwebui.entrypoints=websecure" - "traefik.http.routers.qbitwebui.tls=true" - "traefik.http.routers.qbitwebui.tls.certresolver=letsencrypt" - "traefik.http.services.qbitwebui.loadbalancer.server.port=3000" # ... rest of config ``` ### With External Authentication Using Authelia, Authentik, or similar: ```yaml services: qbitwebui: image: ghcr.io/maciejonos/qbitwebui:latest environment: - ENCRYPTION_KEY=your-key - DISABLE_AUTH=true # Let reverse proxy handle auth # ... rest of config ``` ## Updating ### Manual ```bash docker compose pull docker compose up -d ``` ### Watchtower (Automatic) ```yaml services: watchtower: image: containrrr/watchtower:latest container_name: watchtower volumes: - /var/run/docker.sock:/var/run/docker.sock environment: - WATCHTOWER_CLEANUP=true - WATCHTOWER_SCHEDULE=0 0 4 * * * # 4 AM daily restart: unless-stopped ``` ## Volumes | Path | Description | |------|-------------| | `/data` | Database and encryption salt (required) | | `/downloads` | Downloads directory for file browser (optional) | ## Ports | Port | Service | |------|---------| | `3000` | qbitwebui web interface | | `9876` | Network agent (exposed through qBittorrent container) | ## Health Check qbitwebui exposes `/api/config` for health checks: ```yaml healthcheck: test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000/api/config"] interval: 30s timeout: 10s retries: 3 ``` ================================================ FILE: docs/guide/features.md ================================================ # Features ## Multi-Instance Dashboard Manage multiple qBittorrent instances from one interface: - Overview cards showing status, speeds, and torrent counts - Aggregate statistics across all instances - Quick switching between instances - Connection testing and version display ### Instance Options | Option | Description | |--------|-------------| | Skip Authentication | Use when qBittorrent has IP bypass enabled | | Enable Network Agent | Connect to net-agent for diagnostics | ## Torrent Management ### List View - Sortable columns (name, size, progress, speed, ratio, etc.) - Filter by status: All, Downloading, Seeding, Completed, Paused, Active, Inactive, Stalled, Checking, Error - Filter by category, tag, or tracker - Search by torrent name - Customizable columns with drag-to-reorder - Resizable column widths (persisted) ### Actions | Action | Description | |--------|-------------| | Start/Stop | Resume or pause torrents | | Recheck | Verify torrent data integrity | | Reannounce | Force tracker announce | | Delete | Remove torrent, optionally with files | | Rename | Change torrent name | | Export | Download .torrent file | | Set Category | Assign to a category | | Add/Remove Tags | Manage torrent tags | ### Details Panel Expandable panel showing: - **General**: Size, progress, ratio, ETA, speeds, seeds/peers, dates, save path - **Trackers**: List with status, add/remove trackers - **Peers**: Connected peers with client, flags, progress, speeds - **Files**: File tree with individual progress and priority control - **HTTP Sources**: Web seed URLs ### Keyboard Shortcuts | Key | Action | |-----|--------| | `↑` `↓` | Navigate between torrents | | `Ctrl+A` | Select all torrents | | `Escape` | Clear selection | ### Context Menu Right-click any torrent for quick actions including category/tag submenus. ## Custom Views Save your current filter and column setup: 1. Configure filters, columns, and sort order 2. Click the view selector → Save View 3. Name your view 4. Switch between saved views instantly ## Categories & Tags ### Categories - Create categories with optional custom save paths - Edit save paths for existing categories - Delete categories - Assign via context menu or details panel ### Tags - Create and delete tags - Add/remove tags from torrents - Filter torrents by tag ## Prowlarr Integration Search across all your indexers without leaving qbitwebui. ### Setup 1. Go to any instance → Settings icon → Integrations tab 2. Click **Add Prowlarr** 3. Enter Prowlarr URL and API key 4. Test connection and save ### Searching 1. Click the search icon in the header 2. Enter search query 3. Filter by indexer or category 4. View results with seeders, size, age, freeleech status 5. Click grab → select instance → optionally set category/path → confirm ## RSS Manager Manage RSS feeds and auto-download rules. ### Feeds - Add feeds by URL - Organize in folders - Refresh feeds manually - View articles with grab option ### Auto-Download Rules - Create rules with name patterns (regex supported) - Filter by category, episode, season - Set target category and save path - Preview matching articles ## File Browser Browse and manage downloaded files (requires `DOWNLOADS_PATH`). ### Operations | Operation | Description | |-----------|-------------| | Browse | Navigate directories | | Download | Download files or folders (as tar) | | Delete | Remove files/directories | | Move | Move to another location | | Copy | Copy to another location | | Rename | Rename file or directory | ## Cross-Seed (Experimental) Find cross-seeding opportunities using Prowlarr indexers. ### How It Works 1. Configure Prowlarr integration 2. Select which indexers to search 3. Run scan on your torrents 4. Review matches (size, name similarity) 5. Add matches to start cross-seeding ### Options - Match mode: Strict or Flexible - Dry run: Preview without adding - Category suffix for cross-seeded torrents - Blocklist for excluding certain releases ## Orphan Manager Detect and clean up problematic torrents. ### Detects - Torrents with missing files on disk - Torrents with unregistered tracker status ### Actions - Scan all instances at once - Bulk select orphans - Delete with or without files ## Statistics View transfer history with multiple time periods: - 15 minutes, 30 minutes, 1 hour - 4 hours, 12 hours, 24 hours - 7 days, 30 days, all-time Toggle between per-instance and aggregate views. ## Log Viewer View qBittorrent logs in real-time. ### Application Logs Filter by level: - Normal - Info - Warning - Critical ### Peer Logs Connection events with IP, client, and direction. Auto-refresh available for both. ## Settings Panel Edit qBittorrent preferences directly. ### Tabs | Tab | Settings | |-----|----------| | Behavior | Language, startup, power management | | Downloads | Save paths, pre-allocation, torrent handling | | Connection | Ports, protocols, proxy | | Speed | Global/per-torrent limits, scheduling | | BitTorrent | DHT, PeX, encryption, queueing | | RSS | Auto-download, refresh interval | | WebUI | Address, auth, HTTPS, custom UI | | Advanced | Memory, disk cache, network options | Only changed values are saved. ## Themes ### Built-in Themes Multiple themes available including Dark, Light, Catppuccin variants, Nord, and more. ### Custom Themes Create your own theme with the theme editor: - Background colors (primary, secondary, tertiary) - Text colors (primary, secondary, muted) - Accent color - Border colors - Status colors (success, warning, error) Themes are saved in browser localStorage. ## Mobile Support Fully responsive with dedicated mobile interface: - Touch-optimized torrent list - Swipe actions - Mobile-specific navigation - All features accessible ================================================ FILE: docs/guide/getting-started.md ================================================ # Getting Started ## Requirements - Docker (recommended) or Bun runtime - A running qBittorrent instance with WebUI enabled ## Quick Start ```bash docker run -d \ --name qbitwebui \ -p 3000:3000 \ -v ./data:/data \ -e ENCRYPTION_KEY=$(openssl rand -hex 32) \ ghcr.io/maciejonos/qbitwebui:latest ``` Open `http://localhost:3000` in your browser. ## First Setup 1. **Create Account** - Register with username and password (first user is admin) 2. **Add Instance** - Click **+** and enter your qBittorrent details: - Label: A name for this instance (e.g., "Seedbox") - URL: qBittorrent WebUI address (e.g., `http://192.168.1.100:8080`) - Username & Password: Your qBittorrent credentials 3. **Connect** - Click the instance card to start managing torrents ::: tip If qBittorrent has "Bypass authentication for clients on localhost" enabled, check **Skip authentication** when adding the instance. ::: ## Docker Compose ### Minimal Setup ```yaml services: qbitwebui: image: ghcr.io/maciejonos/qbitwebui:latest container_name: qbitwebui ports: - "3000:3000" volumes: - ./data:/data environment: - ENCRYPTION_KEY=generate-a-32-char-key-here restart: unless-stopped ``` ### With File Browser ```yaml services: qbitwebui: image: ghcr.io/maciejonos/qbitwebui:latest container_name: qbitwebui ports: - "3000:3000" volumes: - ./data:/data - /path/to/downloads:/downloads:ro environment: - ENCRYPTION_KEY=generate-a-32-char-key-here - DOWNLOADS_PATH=/downloads restart: unless-stopped ``` ## Generating Encryption Key The `ENCRYPTION_KEY` is used to encrypt stored credentials. Generate a secure one: ```bash openssl rand -hex 32 ``` ::: warning Save this key securely. If you lose it, you'll need to re-add all instances. ::: ## Next Steps - [Configuration](/guide/configuration) - Environment variables and options - [Features](/guide/features) - All features explained - [Docker](/guide/docker) - Full deployment examples - [Network Agent](/guide/network-agent/) - Network diagnostics ================================================ FILE: docs/guide/network-agent/index.md ================================================ # Network Agent A lightweight companion service that provides network diagnostics from your qBittorrent host's perspective. ## Why Use It? When qBittorrent runs behind a VPN or on a remote server, you need to verify the network from that machine's perspective: - **VPN Verification** - Check external IP to confirm VPN is active - **Speed Testing** - Run speedtests from the actual download location - **DNS Debugging** - View configured DNS servers, check for leaks - **Connectivity Testing** - Ping, traceroute, dig from the host The agent runs in the same network namespace as qBittorrent, so all diagnostics reflect exactly what qBittorrent sees. ## Features | Feature | Description | |---------|-------------| | **IP Check** | External IP, city, region, country, ISP via ipinfo.io | | **Speedtest** | Ookla speedtest with server selection | | **DNS** | View /etc/resolv.conf nameservers | | **Interfaces** | List network interfaces with IPs and status | | **Terminal** | Execute ping, dig, nslookup, traceroute, curl, wget | ## Setup ### Basic (alongside qBittorrent) ```yaml services: qbittorrent: image: linuxserver/qbittorrent:latest container_name: qbittorrent ports: - "8080:8080" - "9876:9876" # Agent port volumes: - ./config:/config - ./downloads:/downloads restart: unless-stopped net-agent: image: ghcr.io/Maciejonos/qbitwebui-agent:latest container_name: net-agent network_mode: "service:qbittorrent" environment: - QBT_URL=http://localhost:8080 depends_on: - qbittorrent restart: unless-stopped ``` ### With VPN (Gluetun) ```yaml services: gluetun: image: qmcgaw/gluetun container_name: gluetun cap_add: - NET_ADMIN devices: - /dev/net/tun:/dev/net/tun ports: - "8080:8080" - "9876:9876" volumes: - ./gluetun:/gluetun environment: - VPN_SERVICE_PROVIDER=your-provider - VPN_TYPE=wireguard # ... your VPN config healthcheck: test: ping -c 1 1.1.1.1 || exit 1 interval: 20s timeout: 10s retries: 5 restart: unless-stopped qbittorrent: image: lscr.io/linuxserver/qbittorrent:latest container_name: qbittorrent network_mode: service:gluetun depends_on: gluetun: condition: service_healthy restart: true environment: - PUID=1000 - PGID=1000 - TZ=Etc/UTC volumes: - ./qbittorrent:/config - ./downloads:/downloads healthcheck: test: ping -c 1 1.1.1.1 || exit 1 interval: 60s retries: 3 start_period: 20s timeout: 10s restart: unless-stopped net-agent: image: ghcr.io/maciejonos/qbitwebui-agent:latest container_name: net-agent network_mode: service:gluetun environment: - QBT_URL=http://localhost:8080 depends_on: qbittorrent: condition: service_healthy restart: true restart: unless-stopped ``` ::: tip With VPN setups, running the IP check will show the VPN's IP, not your home IP - confirming the VPN is working. ::: ## Enable in qbitwebui 1. Go to the dashboard 2. Click edit on your instance 3. Check **Enable Network Agent** 4. Save The **Network Tools** section will appear in the Tools menu. ## Environment Variables | Variable | Default | Description | |----------|---------|-------------| | `PORT` | `9876` | Port the agent listens on | | `QBT_URL` | `http://localhost:8080` | qBittorrent WebUI URL for auth | | `ALLOW_SELF_SIGNED_CERTS` | `false` | Accept self-signed TLS certificates | ## Authentication The agent validates requests by checking the qBittorrent session (SID). Only users with valid qBittorrent sessions can access the agent. **Auto-detection**: If qBittorrent has authentication disabled (localhost bypass), the agent automatically detects this and skips SID validation. ## Terminal Commands The terminal supports these commands: | Command | Example | |---------|---------| | `ping` | `ping -c 4 8.8.8.8` | | `dig` | `dig google.com` | | `nslookup` | `nslookup example.com` | | `traceroute` | `traceroute 1.1.1.1` | | `curl` | `curl -I https://example.com` | | `wget` | `wget -q -O- https://ifconfig.me` | ## Troubleshooting ### Agent shows "Offline" 1. **Check container is running**: ```bash docker ps | grep net-agent ``` 2. **Check logs**: ```bash docker logs net-agent ``` 3. **Verify port is exposed**: Port 9876 must be exposed on the container that owns the network: - Without VPN: on qBittorrent container - With VPN: on Gluetun container 4. **Test connectivity**: ```bash curl http://your-host:9876/health # Should return: {"status":"ok"} ``` ### Authentication Errors - **Wrong QBT_URL**: Verify the URL is correct and accessible from inside the container - **Self-signed cert**: Set `ALLOW_SELF_SIGNED_CERTS=true` if qBittorrent uses HTTPS with self-signed certificate - **qBittorrent not ready**: Ensure qBittorrent is fully started before agent tries to connect ### Speedtest Fails - Check agent logs for specific errors - Some networks block speedtest servers - The Ookla CLI auto-accepts license on first run ## API Endpoints For advanced users or automation: | Endpoint | Auth | Description | |----------|------|-------------| | `GET /health` | No | Health check | | `GET /ip` | Yes | External IP info | | `GET /speedtest` | Yes | Run speedtest (`?server=ID` optional) | | `GET /speedtest/servers` | Yes | List nearby servers | | `GET /dns` | Yes | DNS configuration | | `GET /interfaces` | Yes | Network interfaces | | `GET /exec?cmd=...` | Yes | Execute command | Pass authentication via `X-QBT-SID` header or `SID` cookie. ================================================ FILE: docs/index.md ================================================ --- layout: home hero: name: qbitwebui text: Modern qBittorrent Web UI actions: - theme: brand text: Get Started link: /guide/getting-started - theme: alt text: GitHub link: https://github.com/Maciejonos/qbitwebui features: - title: Multi-Instance details: Manage multiple qBittorrent instances from one dashboard - title: Prowlarr Search details: Search indexers and add torrents directly - title: Cross-Seed details: Find cross-seeding opportunities automatically - title: Network Agent details: Speedtest, IP check, and diagnostics from your qBittorrent host - title: File Browser details: Browse and manage downloaded files - title: Themes details: Built-in themes with custom theme editor --- ================================================ FILE: eslint.config.js ================================================ import js from '@eslint/js' import globals from 'globals' import reactHooks from 'eslint-plugin-react-hooks' import reactRefresh from 'eslint-plugin-react-refresh' import tseslint from 'typescript-eslint' import { defineConfig, globalIgnores } from 'eslint/config' export default defineConfig([ globalIgnores(['dist', 'docs']), { files: ['**/*.{ts,tsx}'], extends: [ js.configs.recommended, tseslint.configs.recommended, reactHooks.configs.flat.recommended, reactRefresh.configs.vite, ], languageOptions: { ecmaVersion: 2020, globals: globals.browser, }, }, ]) ================================================ FILE: index.html ================================================ qbitwebui
================================================ FILE: net-agent/Dockerfile ================================================ FROM golang:1.22-alpine AS builder WORKDIR /build COPY go.mod . COPY main.go . RUN CGO_ENABLED=0 go build -ldflags="-s -w" -o net-agent . FROM alpine:3.19 RUN apk add --no-cache \ curl \ wget \ ca-certificates \ bind-tools \ iproute2 \ iputils \ traceroute RUN ARCH=$(uname -m) && \ case "$ARCH" in \ x86_64) ARCH="x86_64" ;; \ aarch64) ARCH="aarch64" ;; \ *) echo "Unsupported arch: $ARCH" && exit 1 ;; \ esac && \ wget -q -O /tmp/speedtest.tgz \ "https://install.speedtest.net/app/cli/ookla-speedtest-1.2.0-linux-${ARCH}.tgz" && \ tar -xzf /tmp/speedtest.tgz -C /usr/local/bin speedtest && \ rm /tmp/speedtest.tgz && \ chmod +x /usr/local/bin/speedtest COPY --from=builder /build/net-agent /usr/local/bin/net-agent ENV PORT=9876 ENV QBT_URL=http://localhost:8080 EXPOSE 9876 ENTRYPOINT ["/usr/local/bin/net-agent"] ================================================ FILE: net-agent/README.md ================================================ # net-agent Lightweight network utility agent for qbitwebui. Runs alongside qBittorrent to provide network diagnostics from the same network perspective. ## Usage Deploy with `network_mode: "service:qbittorrent"` to share qBittorrent's network namespace: ```yaml services: qbittorrent: image: linuxserver/qbittorrent ports: - "8080:8080" - "9876:9876" # net-agent port net-agent: image: ghcr.io/mac-torreon/qbitwebui-agent:latest network_mode: "service:qbittorrent" environment: - QBT_URL=http://localhost:8080 # qBittorrent is on localhost due to shared network ``` ## Authentication All endpoints (except `/health`) require a valid qBittorrent session. Pass the SID via: - Header: `X-QBT-SID: ` - Cookie: `SID=` The agent validates the SID by calling qBittorrent's API. If qBittorrent has authentication disabled (IP bypass), the agent auto-detects this and skips SID validation. ## Endpoints | Endpoint | Description | |----------|-------------| | `GET /health` | Health check (no auth required) | | `GET /ip` | External IP info via ipinfo.io | | `GET /speedtest` | Run Ookla speedtest (optional `?server=ID`) | | `GET /speedtest/servers` | List nearby speedtest servers | | `GET /dns` | Show configured DNS servers | | `GET /interfaces` | Network interface information | | `GET /exec?cmd=...` | Execute allowed commands (curl, wget, dig, ping, etc.) | ## Environment Variables | Variable | Default | Description | |----------|---------|-------------| | `PORT` | `9876` | Port to listen on | | `QBT_URL` | `http://localhost:8080` | qBittorrent WebUI URL for auth validation | | `ALLOW_SELF_SIGNED_CERTS` | `false` | Accept self-signed TLS certificates for qBittorrent | ## Building ```bash docker build -t qbitwebui-agent . ``` ## Local Development ```bash go run main.go ``` ================================================ FILE: net-agent/go.mod ================================================ module github.com/mac-torreon/qbitwebui/net-agent go 1.22 ================================================ FILE: net-agent/main.go ================================================ package main import ( "crypto/tls" "encoding/json" "fmt" "io" "log" "net/http" "os" "os/exec" "strings" "time" ) var ( qbtURL string httpClient *http.Client skipAuth bool ) func main() { log.SetFlags(log.Ldate | log.Ltime) port := os.Getenv("PORT") if port == "" { port = "9876" } qbtURL = os.Getenv("QBT_URL") if qbtURL == "" { qbtURL = "http://localhost:8080" } if os.Getenv("ALLOW_SELF_SIGNED_CERTS") == "true" { httpClient = &http.Client{ Timeout: 5 * time.Second, Transport: &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: true}}, } } else { httpClient = &http.Client{Timeout: 5 * time.Second} } skipAuth = detectSkipAuth() if skipAuth { log.Printf("qBittorrent localhost auth enabled, using passthrough mode") } http.HandleFunc("/health", withLogging(handleHealth)) http.HandleFunc("/ip", withLogging(withAuth(handleIP))) http.HandleFunc("/speedtest", withLogging(withAuth(handleSpeedtest))) http.HandleFunc("/speedtest/servers", withLogging(withAuth(handleSpeedtestServers))) http.HandleFunc("/dns", withLogging(withAuth(handleDNS))) http.HandleFunc("/interfaces", withLogging(withAuth(handleInterfaces))) http.HandleFunc("/exec", withLogging(withAuth(handleExec))) log.Printf("net-agent listening on :%s (qbt: %s)", port, qbtURL) if err := http.ListenAndServe(":"+port, nil); err != nil { log.Fatalf("server error: %v", err) } } func detectSkipAuth() bool { req, err := http.NewRequest("GET", qbtURL+"/api/v2/app/version", nil) if err != nil { return false } resp, err := httpClient.Do(req) if err != nil { return false } defer resp.Body.Close() return resp.StatusCode == http.StatusOK } func withLogging(next http.HandlerFunc) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { start := time.Now() wrapped := &statusWriter{ResponseWriter: w, status: 200} next(wrapped, r) log.Printf("%s %s %d %v", r.Method, r.URL.Path, wrapped.status, time.Since(start).Round(time.Millisecond)) } } type statusWriter struct { http.ResponseWriter status int } func (w *statusWriter) WriteHeader(status int) { w.status = status w.ResponseWriter.WriteHeader(status) } func withAuth(next http.HandlerFunc) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { if skipAuth { next(w, r) return } sid := r.Header.Get("X-QBT-SID") if sid == "" { if cookie, err := r.Cookie("SID"); err == nil { sid = cookie.Value } } if sid == "" { log.Printf("auth failed: missing SID for %s", r.URL.Path) http.Error(w, `{"error":"missing SID"}`, http.StatusUnauthorized) return } if !validateSID(sid) { log.Printf("auth failed: invalid SID for %s", r.URL.Path) http.Error(w, `{"error":"invalid SID"}`, http.StatusUnauthorized) return } next(w, r) } } func validateSID(sid string) bool { req, err := http.NewRequest("GET", qbtURL+"/api/v2/app/version", nil) if err != nil { return false } req.AddCookie(&http.Cookie{Name: "SID", Value: sid}) resp, err := httpClient.Do(req) if err != nil { return false } defer resp.Body.Close() return resp.StatusCode == http.StatusOK } func handleHealth(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") json.NewEncoder(w).Encode(map[string]string{"status": "ok"}) } func handleIP(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") client := &http.Client{Timeout: 10 * time.Second} resp, err := client.Get("https://ipinfo.io/json") if err != nil { log.Printf("ip lookup failed: %v", err) http.Error(w, fmt.Sprintf(`{"error":"%s"}`, err.Error()), http.StatusBadGateway) return } defer resp.Body.Close() io.Copy(w, resp.Body) } func handleSpeedtest(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") serverID := r.URL.Query().Get("server") args := []string{"--accept-license", "--accept-gdpr", "--format=json"} if serverID != "" { args = append(args, "--server-id="+serverID) log.Printf("speedtest starting with server %s", serverID) } else { log.Printf("speedtest starting (auto server)") } out, err := exec.Command("speedtest", args...).Output() if err != nil { errMsg := err.Error() if exitErr, ok := err.(*exec.ExitError); ok { errMsg = string(exitErr.Stderr) } log.Printf("speedtest failed: %s", errMsg) http.Error(w, fmt.Sprintf(`{"error":"%s"}`, strings.ReplaceAll(errMsg, `"`, `\"`)), http.StatusInternalServerError) return } w.Write(out) } func handleSpeedtestServers(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") out, err := exec.Command("speedtest", "--accept-license", "--accept-gdpr", "--format=json", "--servers").Output() if err != nil { errMsg := err.Error() if exitErr, ok := err.(*exec.ExitError); ok { errMsg = string(exitErr.Stderr) } log.Printf("speedtest servers failed: %s", errMsg) http.Error(w, fmt.Sprintf(`{"error":"%s"}`, strings.ReplaceAll(errMsg, `"`, `\"`)), http.StatusInternalServerError) return } w.Write(out) } func handleDNS(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") out, err := exec.Command("cat", "/etc/resolv.conf").Output() if err != nil { http.Error(w, fmt.Sprintf(`{"error":"%s"}`, err.Error()), http.StatusInternalServerError) return } lines := strings.Split(string(out), "\n") var servers []string for _, line := range lines { line = strings.TrimSpace(line) if strings.HasPrefix(line, "nameserver") { parts := strings.Fields(line) if len(parts) >= 2 { servers = append(servers, parts[1]) } } } json.NewEncoder(w).Encode(map[string]interface{}{"servers": servers, "raw": string(out)}) } func handleInterfaces(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") out, err := exec.Command("ip", "-j", "addr").Output() if err != nil { outFallback, errFallback := exec.Command("ip", "addr").Output() if errFallback != nil { http.Error(w, fmt.Sprintf(`{"error":"%s"}`, err.Error()), http.StatusInternalServerError) return } json.NewEncoder(w).Encode(map[string]string{"raw": string(outFallback)}) return } w.Write(out) } func handleExec(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") cmd := r.URL.Query().Get("cmd") if cmd == "" { http.Error(w, `{"error":"missing cmd parameter"}`, http.StatusBadRequest) return } allowed := map[string]bool{"curl": true, "wget": true, "dig": true, "nslookup": true, "ping": true, "traceroute": true} parts := strings.Fields(cmd) if len(parts) == 0 { http.Error(w, `{"error":"empty command"}`, http.StatusBadRequest) return } if !allowed[parts[0]] { log.Printf("exec blocked: command not allowed: %s", parts[0]) http.Error(w, fmt.Sprintf(`{"error":"command not allowed: %s"}`, parts[0]), http.StatusForbidden) return } log.Printf("exec: %s", cmd) execCmd := exec.CommandContext(r.Context(), parts[0], parts[1:]...) out, err := execCmd.CombinedOutput() if err != nil { log.Printf("exec error: %v", err) json.NewEncoder(w).Encode(map[string]interface{}{"output": string(out), "error": err.Error()}) return } json.NewEncoder(w).Encode(map[string]string{"output": string(out)}) } ================================================ FILE: package.json ================================================ { "name": "qbitwebui", "private": true, "version": "2.43.0", "type": "module", "packageManager": "bun@1.3.2", "engines": { "bun": ">=1.3.2" }, "scripts": { "preinstall": "npx only-allow bun", "dev": "concurrently \"bun run dev:server\" \"vite\"", "dev:server": "bun run --watch src/server/index.ts", "dev:client": "vite", "build": "tsc -b && vite build", "start": "bun run src/server/index.ts", "check": "tsc --noEmit", "lint": "eslint .", "format": "prettier --write src/", "preview": "vite preview", "test": "vitest run", "test:watch": "vitest", "test:coverage": "vitest run --coverage", "docs:dev": "vitepress dev docs", "docs:build": "vitepress build docs", "docs:preview": "vitepress preview docs" }, "dependencies": { "@tanstack/react-query": "^5.99.0", "colord": "^2.9.3", "hono": "^4.12.14", "jszip": "^3.10.1", "lucide-react": "^0.563.0", "react": "^19.2.5", "react-colorful": "^5.6.1", "react-dom": "^19.2.5", "tar-stream": "^3.1.8", "vaul": "^1.1.2", "xml2js": "^0.6.2" }, "devDependencies": { "@eslint/js": "^9.39.4", "@tailwindcss/vite": "^4.2.2", "@testing-library/dom": "^10.4.1", "@testing-library/react": "^16.3.2", "@types/bun": "^1.3.12", "@types/jsdom": "^27.0.0", "@types/node": "^24.12.2", "@types/react": "^19.2.14", "@types/react-dom": "^19.2.3", "@types/tar-stream": "^3.1.4", "@types/xml2js": "^0.4.14", "@vitejs/plugin-react": "^5.2.0", "@vitest/coverage-v8": "4.0.17", "concurrently": "^9.2.1", "eslint": "^9.39.4", "eslint-plugin-react-hooks": "^7.0.1", "eslint-plugin-react-refresh": "^0.4.26", "globals": "^16.5.0", "jsdom": "^27.4.0", "picocolors": "^1.1.1", "prettier": "^3.8.3", "tailwindcss": "^4.2.2", "typescript": "~5.9.3", "typescript-eslint": "^8.58.2", "vite": "^7.3.2", "vitepress": "^1.6.4", "vitest": "^4.1.4" } } ================================================ FILE: src/App.tsx ================================================ import { useState, useEffect, useCallback, lazy, Suspense } from 'react' import { QueryClient, QueryClientProvider } from '@tanstack/react-query' import { ThemeProvider } from './contexts/ThemeProvider' import { InstanceProvider } from './contexts/InstanceProvider' import { PaginationProvider } from './contexts/PaginationProvider' import { Layout } from './components/Layout' import { AuthForm } from './components/AuthForm' import { InstanceManager } from './components/InstanceManager' import { TorrentList } from './components/TorrentList' import { getMe, type User } from './api/auth' import { getInstances, type Instance } from './api/instances' const MobileApp = lazy(() => import('./mobile/MobileApp').then((m) => ({ default: m.MobileApp }))) const queryClient = new QueryClient({ defaultOptions: { queries: { retry: 1, staleTime: 1000, }, }, }) const isMobile = () => window.innerWidth < 768 type View = 'loading' | 'auth' | 'instances' | 'torrents' | 'mobile' type Tab = 'dashboard' | 'tools' type Tool = 'indexers' | 'files' | 'orphans' | 'rss' | 'logs' | 'cross-seed' | 'statistics' | 'network' | null function parseHash(): { tab: Tab; instanceId: number | null; tool: Tool } { const hash = window.location.hash.slice(1) if (hash === 'tools') return { tab: 'tools', instanceId: null, tool: null } if (hash.startsWith('tools/')) { const toolName = hash.slice(6) as Tool const validTools: Tool[] = ['indexers', 'files', 'orphans', 'rss', 'logs', 'cross-seed', 'statistics', 'network'] if (validTools.includes(toolName)) { return { tab: 'tools', instanceId: null, tool: toolName } } return { tab: 'tools', instanceId: null, tool: null } } if (hash.startsWith('instance/')) { const id = parseInt(hash.slice(9), 10) if (!isNaN(id)) return { tab: 'dashboard', instanceId: id, tool: null } } return { tab: 'dashboard', instanceId: null, tool: null } } function setHash(tab: Tab, instanceId: number | null, tool?: Tool) { if (instanceId) { window.location.hash = `instance/${instanceId}` } else if (tab === 'tools') { window.location.hash = tool ? `tools/${tool}` : 'tools' } else { window.location.hash = '' } } export default function App() { const [view, setView] = useState('loading') const [user, setUser] = useState(null) const [currentInstance, setCurrentInstance] = useState(null) const [authDisabled, setAuthDisabled] = useState(false) const [initialTab, setInitialTab] = useState('dashboard') const [initialTool, setInitialTool] = useState(null) const applyRoute = useCallback(async (authenticated: boolean) => { if (!authenticated) return const { tab, instanceId, tool } = parseHash() setInitialTab(tab) setInitialTool(tool) if (instanceId) { const instances = await getInstances().catch(() => []) const instance = instances.find((i) => i.id === instanceId) if (instance) { setCurrentInstance(instance) setView('torrents') return } } const autoSelect = localStorage.getItem('autoSelectSingleInstance') === 'true' if (autoSelect) { const instances = await getInstances().catch(() => []) if (instances.length === 1) { setCurrentInstance(instances[0]) setView('torrents') setHash('dashboard', instances[0].id) return } } setCurrentInstance(null) setView(isMobile() ? 'mobile' : 'instances') }, []) useEffect(() => { fetch('/api/config') .then((r) => r.json()) .then(({ authDisabled }) => { if (authDisabled) { setAuthDisabled(true) setUser({ id: 1, username: 'guest' }) applyRoute(true) return } getMe() .then((u) => { if (u) { setUser(u) applyRoute(true) } else { setView('auth') } }) .catch(() => setView('auth')) }) .catch(() => setView('auth')) }, [applyRoute]) useEffect(() => { function handleHashChange() { const { tab, instanceId, tool } = parseHash() setInitialTab(tab) setInitialTool(tool) if (instanceId && currentInstance?.id !== instanceId) { getInstances() .then((instances) => { const instance = instances.find((i) => i.id === instanceId) if (instance) { setCurrentInstance(instance) setView('torrents') } else { setCurrentInstance(null) setView('instances') } }) .catch(() => { setCurrentInstance(null) setView('instances') }) } else if (!instanceId && currentInstance !== null) { setCurrentInstance(null) setView(isMobile() ? 'mobile' : 'instances') } } window.addEventListener('hashchange', handleHashChange) return () => window.removeEventListener('hashchange', handleHashChange) }, [currentInstance]) function selectInstance(instance: Instance) { setCurrentInstance(instance) setInitialTab('dashboard') setView('torrents') setHash('dashboard', instance.id) } function goToTab(tab: Tab) { setCurrentInstance(null) setInitialTab(tab) setInitialTool(null) setView('instances') setHash(tab, null) } function handleTabChange(tab: Tab) { setInitialTab(tab) setInitialTool(null) setHash(tab, null) } function handleToolChange(tool: Tool) { setInitialTool(tool) setHash('tools', null, tool) } if (view === 'loading') { return (
Loading...
) } if (view === 'auth') { return ( { setUser(u) setView(isMobile() ? 'mobile' : 'instances') }} /> ) } if (view === 'mobile') { return (
Loading...
} > { setUser(null) setView('auth') }} authDisabled={authDisabled} />
) } if (view === 'instances' || !currentInstance) { return ( { setUser(null) setCurrentInstance(null) setView('auth') }} authDisabled={authDisabled} initialTab={initialTab} initialTool={initialTool} onTabChange={handleTabChange} onToolChange={handleToolChange} /> ) } return ( { setUser(null) setCurrentInstance(null) setView('auth') }} > ) } ================================================ FILE: src/api/auth.ts ================================================ export interface User { id: number username: string } export async function register(username: string, password: string): Promise { const res = await fetch('/api/auth/register', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ username, password }), }) if (!res.ok) { const data = await res.json() throw new Error(data.error || 'Registration failed') } return res.json() } export async function login(username: string, password: string): Promise { const res = await fetch('/api/auth/login', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ username, password }), }) if (!res.ok) { const data = await res.json() throw new Error(data.error || 'Login failed') } return res.json() } export async function logout(): Promise { await fetch('/api/auth/logout', { method: 'POST', credentials: 'include', }) } export async function getMe(): Promise { const res = await fetch('/api/auth/me', { credentials: 'include', }) if (!res.ok) return null return res.json() } export async function changePassword(currentPassword: string, newPassword: string): Promise { const res = await fetch('/api/auth/password', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ currentPassword, newPassword }), }) if (!res.ok) { const data = await res.json() throw new Error(data.error || 'Failed to change password') } } ================================================ FILE: src/api/crossSeed.ts ================================================ export type MatchMode = 'strict' | 'flexible' export interface CrossSeedConfig { instance_id: number enabled: boolean interval_hours: number delay_seconds: number dry_run: boolean category_suffix: string tag: string skip_recheck: boolean integration_id: number | null indexer_ids: number[] match_mode: MatchMode link_dir: string | null blocklist: string[] include_single_episodes: boolean last_run: number | null next_run: number | null } export interface TorznabIndexer { id: number name: string protocol: string supportsSearch: boolean categories: number[] } export interface ScanResult { instanceId: number torrentsTotal: number torrentsScanned: number torrentsSkipped: number matchesFound: number torrentsAdded: number errors: string[] dryRun: boolean startedAt: number completedAt: number } export interface SchedulerStatus { instanceId: number instanceLabel: string enabled: boolean intervalHours: number dryRun: boolean lastRun: number | null nextRun: number | null running: boolean lastResult: ScanResult | null } export interface CacheStats { cache: { count: number; totalSize: number } output: { count: number; files: string[] } } export interface Searchee { id: number instance_id: number torrent_hash: string torrent_name: string total_size: number file_count: number file_sizes: string first_searched: number last_searched: number decision_count: number } export interface Decision { id: number searchee_id: number guid: string info_hash: string | null candidate_name: string candidate_size: number | null decision: string first_seen: number last_seen: number } export async function getCrossSeedConfig(instanceId: number): Promise { const res = await fetch(`/api/cross-seed/config/${instanceId}`, { credentials: 'include' }) if (!res.ok) throw new Error('Failed to fetch cross-seed config') return res.json() } export async function updateCrossSeedConfig( instanceId: number, config: Partial> ): Promise<{ linkDirValid?: boolean }> { const res = await fetch(`/api/cross-seed/config/${instanceId}`, { method: 'PUT', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify(config), }) if (!res.ok) { const err = await res.json() throw new Error(err.error || 'Failed to update config') } return res.json() } export async function getIndexers(instanceId: number, integrationId?: number): Promise { const params = integrationId ? `?integrationId=${integrationId}` : '' const res = await fetch(`/api/cross-seed/indexers/${instanceId}${params}`, { credentials: 'include' }) if (!res.ok) { const err = await res.json() throw new Error(err.error || 'Failed to fetch indexers') } return res.json() } export async function triggerScan(instanceId: number, force = false): Promise { const res = await fetch(`/api/cross-seed/scan/${instanceId}`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ force }), }) if (!res.ok) { const err = await res.json() throw new Error(err.error || 'Scan failed') } return res.json() } export async function getSchedulerStatus(): Promise { const res = await fetch('/api/cross-seed/status', { credentials: 'include' }) if (!res.ok) throw new Error('Failed to fetch scheduler status') return res.json() } export async function getInstanceStatus(instanceId: number): Promise { const res = await fetch(`/api/cross-seed/status/${instanceId}`, { credentials: 'include' }) if (!res.ok) throw new Error('Failed to fetch instance status') return res.json() } export async function clearCache(instanceId: number): Promise<{ cacheCleared: number; outputCleared: number }> { const res = await fetch(`/api/cross-seed/cache/${instanceId}/clear`, { method: 'POST', credentials: 'include', }) if (!res.ok) throw new Error('Failed to clear cache') return res.json() } export async function getCacheStats(instanceId: number): Promise { const res = await fetch(`/api/cross-seed/cache/${instanceId}/stats`, { credentials: 'include' }) if (!res.ok) throw new Error('Failed to fetch cache stats') return res.json() } export async function getSearchHistory( instanceId: number, limit = 100, offset = 0 ): Promise<{ searchees: Searchee[]; total: number }> { const params = new URLSearchParams({ limit: String(limit), offset: String(offset) }) const res = await fetch(`/api/cross-seed/history/${instanceId}?${params}`, { credentials: 'include' }) if (!res.ok) throw new Error('Failed to fetch search history') return res.json() } export async function getDecisions(instanceId: number, searcheeId: number): Promise { const res = await fetch(`/api/cross-seed/history/${instanceId}/${searcheeId}/decisions`, { credentials: 'include', }) if (!res.ok) throw new Error('Failed to fetch decisions') return res.json() } export async function stopScan(instanceId: number): Promise<{ stopped: boolean }> { const res = await fetch(`/api/cross-seed/stop/${instanceId}`, { method: 'POST', credentials: 'include', }) if (!res.ok) { const err = await res.json() throw new Error(err.error || 'Failed to stop scan') } return res.json() } export interface LogEntry { timestamp: string level: 'INFO' | 'WARN' | 'ERROR' message: string } export async function getLogs(limit = 100): Promise { const res = await fetch(`/api/cross-seed/logs?limit=${limit}`, { credentials: 'include' }) if (!res.ok) throw new Error('Failed to fetch logs') return res.json() } ================================================ FILE: src/api/files.ts ================================================ export interface FileEntry { name: string size: number isDirectory: boolean modified: number } export async function listFiles(path: string): Promise { const res = await fetch(`/api/files?path=${encodeURIComponent(path)}`, { credentials: 'include', }) if (!res.ok) { const error = await res.json() throw new Error(error.error || 'Failed to list files') } return res.json() } export function getDownloadUrl(path: string): string { return `/api/files/download?path=${encodeURIComponent(path)}` } export async function checkWritable(): Promise { const res = await fetch('/api/files/writable', { credentials: 'include' }) if (!res.ok) return false const data = await res.json() return data.writable } export async function deleteFiles(paths: string[]): Promise { const res = await fetch('/api/files/delete', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ paths }), }) if (!res.ok) { const error = await res.json() throw new Error(error.error || 'Failed to delete files') } } export async function moveFiles(paths: string[], destination: string): Promise { const res = await fetch('/api/files/move', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ paths, destination }), }) if (!res.ok) { const error = await res.json() throw new Error(error.error || 'Failed to move files') } } export async function copyFiles(paths: string[], destination: string): Promise { const res = await fetch('/api/files/copy', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ paths, destination }), }) if (!res.ok) { const error = await res.json() throw new Error(error.error || 'Failed to copy files') } } export async function renameFile(path: string, newName: string): Promise { const res = await fetch('/api/files/rename', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ path, newName }), }) if (!res.ok) { const error = await res.json() throw new Error(error.error || 'Failed to rename file') } } ================================================ FILE: src/api/instances.ts ================================================ export interface Instance { id: number label: string url: string qbt_username: string | null skip_auth: boolean agent_enabled: boolean agent_url: string | null created_at: number } export interface CreateInstanceData { label: string url: string qbt_username?: string qbt_password?: string skip_auth?: boolean agent_enabled?: boolean agent_url?: string } export interface UpdateInstanceData { label?: string url?: string qbt_username?: string qbt_password?: string skip_auth?: boolean agent_enabled?: boolean agent_url?: string | null } export async function getInstances(): Promise { const res = await fetch('/api/instances', { credentials: 'include', }) if (!res.ok) throw new Error('Failed to fetch instances') return res.json() } export async function createInstance(data: CreateInstanceData): Promise { const res = await fetch('/api/instances', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify(data), }) if (!res.ok) { const error = await res.json() throw new Error(error.error || 'Failed to create instance') } return res.json() } export async function updateInstance(id: number, data: UpdateInstanceData): Promise { const res = await fetch(`/api/instances/${id}`, { method: 'PUT', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify(data), }) if (!res.ok) { const error = await res.json() throw new Error(error.error || 'Failed to update instance') } return res.json() } export async function deleteInstance(id: number): Promise { const res = await fetch(`/api/instances/${id}`, { method: 'DELETE', credentials: 'include', }) if (!res.ok) throw new Error('Failed to delete instance') } ================================================ FILE: src/api/integrations.ts ================================================ export interface Integration { id: number type: string label: string url: string created_at: number } export interface CreateIntegrationData { type: string label: string url: string api_key: string } export interface Indexer { id: number name: string enable: boolean protocol: string } export interface ProwlarrCategory { id: number name: string subCategories?: ProwlarrCategory[] } export interface SearchResult { guid: string indexerId: number indexer: string title: string size: number publishDate: string downloadUrl?: string magnetUrl?: string seeders?: number leechers?: number categories: { id: number; name: string }[] indexerFlags?: string[] } export async function getIntegrations(): Promise { const res = await fetch('/api/integrations', { credentials: 'include' }) if (!res.ok) throw new Error('Failed to fetch integrations') return res.json() } export async function createIntegration(data: CreateIntegrationData): Promise { const res = await fetch('/api/integrations', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify(data), }) if (!res.ok) { const err = await res.json() throw new Error(err.error || 'Failed to create integration') } return res.json() } export async function deleteIntegration(id: number): Promise { const res = await fetch(`/api/integrations/${id}`, { method: 'DELETE', credentials: 'include', }) if (!res.ok) throw new Error('Failed to delete integration') } export async function testIntegrationConnection( url: string, apiKey: string ): Promise<{ success: boolean; version?: string; error?: string }> { const res = await fetch('/api/integrations/test', { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ url, api_key: apiKey }), }) if (!res.ok) { const err = await res.json().catch(() => ({})) return { success: false, error: err.error || 'Connection test failed' } } return res.json() } export async function getIndexers(integrationId: number): Promise { const res = await fetch(`/api/integrations/${integrationId}/indexers`, { credentials: 'include' }) if (!res.ok) throw new Error('Failed to fetch indexers') return res.json() } export async function getProwlarrCategories(integrationId: number): Promise { const res = await fetch(`/api/integrations/${integrationId}/categories`, { credentials: 'include' }) if (!res.ok) throw new Error('Failed to fetch categories') return res.json() } export async function search( integrationId: number, query: string, options: { indexerIds?: string; categories?: string; type?: string } = {} ): Promise { const params = new URLSearchParams({ query }) if (options.indexerIds) params.set('indexerIds', options.indexerIds) if (options.categories) params.set('categories', options.categories) if (options.type) params.set('type', options.type) const res = await fetch(`/api/integrations/${integrationId}/search?${params}`, { credentials: 'include' }) if (!res.ok) { const err = await res.json() throw new Error(err.error || 'Search failed') } return res.json() } export async function grabRelease( integrationId: number, release: { guid: string; indexerId: number; downloadUrl?: string; magnetUrl?: string }, instanceId: number, options?: { category?: string; savepath?: string; downloadPath?: string } ): Promise { const res = await fetch(`/api/integrations/${integrationId}/grab`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, credentials: 'include', body: JSON.stringify({ ...release, instanceId, ...options }), }) if (!res.ok) { const err = await res.json() throw new Error(err.error || 'Failed to grab release') } } ================================================ FILE: src/api/netAgent.ts ================================================ export interface IpInfo { ip: string city: string region: string country: string loc: string org: string postal: string timezone: string } export interface SpeedtestResult { type: string timestamp: string ping: { jitter: number; latency: number; low: number; high: number } download: { bandwidth: number; bytes: number; elapsed: number } upload: { bandwidth: number; bytes: number; elapsed: number } isp: string interface: { internalIp: string; name: string; externalIp: string; isVpn: boolean } server: { id: number; host: string; name: string; location: string; country: string } result: { url: string } } export interface SpeedtestServer { id: number host: string port: number name: string location: string country: string } export interface DnsInfo { servers: string[] raw: string } export interface NetworkInterface { ifindex: number ifname: string flags: string[] mtu: number operstate: string address: string addr_info: { family: string; local: string; prefixlen: number; scope: string }[] } async function agentRequest(instanceId: number, endpoint: string): Promise { const res = await fetch(`/api/instances/${instanceId}/agent${endpoint}`, { credentials: 'include', }) if (!res.ok) { const text = await res.text() throw new Error(text || `Agent error: ${res.status}`) } return res.json() } export async function getIpInfo(instanceId: number): Promise { return agentRequest(instanceId, '/ip') } export async function runSpeedtest(instanceId: number, serverId?: number): Promise { const endpoint = serverId ? `/speedtest?server=${serverId}` : '/speedtest' return agentRequest(instanceId, endpoint) } export async function getSpeedtestServers(instanceId: number): Promise<{ servers: SpeedtestServer[] }> { return agentRequest<{ servers: SpeedtestServer[] }>(instanceId, '/speedtest/servers') } export async function getDnsInfo(instanceId: number): Promise { return agentRequest(instanceId, '/dns') } export async function getInterfaces(instanceId: number): Promise { return agentRequest(instanceId, '/interfaces') } export async function execCommand(instanceId: number, cmd: string): Promise<{ output: string; error?: string }> { return agentRequest<{ output: string; error?: string }>(instanceId, `/exec?cmd=${encodeURIComponent(cmd)}`) } export async function checkAgentHealth(instanceId: number): Promise { try { const res = await fetch(`/api/instances/${instanceId}/agent/health`, { credentials: 'include', }) return res.ok } catch { return false } } ================================================ FILE: src/api/qbittorrent.ts ================================================ import JSZip from 'jszip' import type { Torrent, TorrentFilter, TransferInfo, SyncMaindata } from '../types/qbittorrent' import type { TorrentProperties, Tracker, PeersResponse, TorrentFile, WebSeed } from '../types/torrentDetails' import type { QBittorrentPreferences } from '../types/preferences' import type { RSSItems, RSSRules, RSSRule, MatchingArticles } from '../types/rss' function getBase(instanceId: number): string { return `/api/instances/${instanceId}/qbt/v2` } async function request(instanceId: number, endpoint: string, options?: RequestInit): Promise { const res = await fetch(`${getBase(instanceId)}${endpoint}`, { ...options, credentials: 'include', }) if (!res.ok) { throw new Error(`API error: ${res.status}`) } const text = await res.text() if (!text) { throw new Error('Empty response from API') } try { return JSON.parse(text) } catch { throw new Error(`Invalid JSON response: ${text.slice(0, 100)}`) } } async function action(instanceId: number, endpoint: string, options?: RequestInit): Promise { const res = await fetch(`${getBase(instanceId)}${endpoint}`, { ...options, credentials: 'include', }) if (!res.ok) { throw new Error(`API error: ${res.status}`) } } export interface TorrentFilterOptions { filter?: TorrentFilter category?: string tag?: string } export async function getTorrents(instanceId: number, options: TorrentFilterOptions = {}): Promise { const params = new URLSearchParams() if (options.filter && options.filter !== 'all') params.set('filter', options.filter) if (options.category) params.set('category', options.category) if (options.tag) params.set('tag', options.tag) const query = params.toString() return request(instanceId, `/torrents/info${query ? `?${query}` : ''}`) } export async function getTransferInfo(instanceId: number): Promise { return request(instanceId, '/transfer/info') } export async function getSyncMaindata(instanceId: number): Promise { return request(instanceId, '/sync/maindata?rid=0') } export async function stopTorrents(instanceId: number, hashes: string[]): Promise { await action(instanceId, '/torrents/stop', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hashes: hashes.join('|') }), }) } export async function startTorrents(instanceId: number, hashes: string[]): Promise { await action(instanceId, '/torrents/start', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hashes: hashes.join('|') }), }) } export async function recheckTorrents(instanceId: number, hashes: string[]): Promise { await action(instanceId, '/torrents/recheck', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hashes: hashes.join('|') }), }) } export async function reannounceTorrents(instanceId: number, hashes: string[]): Promise { await action(instanceId, '/torrents/reannounce', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hashes: hashes.join('|') }), }) } export async function deleteTorrents(instanceId: number, hashes: string[], deleteFiles = false): Promise { await action(instanceId, '/torrents/delete', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hashes: hashes.join('|'), deleteFiles: deleteFiles.toString(), }), }) } export interface AddTorrentOptions { urls?: string savepath?: string category?: string tags?: string paused?: boolean sequentialDownload?: boolean firstLastPiecePrio?: boolean autoTMM?: boolean } export async function addTorrent(instanceId: number, options: AddTorrentOptions, files?: File[]): Promise { const formData = new FormData() if (files) { files.forEach((file) => formData.append('torrents', file)) } if (options.urls) { formData.append('urls', options.urls) } if (options.savepath) { formData.append('savepath', options.savepath) } if (options.category) { formData.append('category', options.category) } if (options.tags) { formData.append('tags', options.tags) } if (options.paused !== undefined) { formData.append('paused', options.paused.toString()) } if (options.sequentialDownload) { formData.append('sequentialDownload', 'true') } if (options.firstLastPiecePrio) { formData.append('firstLastPiecePrio', 'true') } if (options.autoTMM !== undefined) { formData.append('autoTMM', options.autoTMM.toString()) } const res = await fetch(`${getBase(instanceId)}/torrents/add`, { method: 'POST', credentials: 'include', body: formData, }) if (!res.ok) { throw new Error(`Failed to add torrent: ${res.status}`) } } export interface Category { name: string savePath: string } export async function getCategories(instanceId: number): Promise> { return request>(instanceId, '/torrents/categories') } export async function getTorrentProperties(instanceId: number, hash: string): Promise { return request(instanceId, `/torrents/properties?hash=${hash}`) } export async function getTorrentTrackers(instanceId: number, hash: string): Promise { return request(instanceId, `/torrents/trackers?hash=${hash}`) } export async function getTorrentPeers(instanceId: number, hash: string): Promise { return request(instanceId, `/sync/torrentPeers?hash=${hash}`) } export async function getTorrentFiles(instanceId: number, hash: string): Promise { return request(instanceId, `/torrents/files?hash=${hash}`) } export async function getTorrentWebSeeds(instanceId: number, hash: string): Promise { return request(instanceId, `/torrents/webseeds?hash=${hash}`) } export async function setCategory(instanceId: number, hashes: string[], category: string): Promise { await action(instanceId, '/torrents/setCategory', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hashes: hashes.join('|'), category }), }) } export async function addTags(instanceId: number, hashes: string[], tags: string): Promise { await action(instanceId, '/torrents/addTags', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hashes: hashes.join('|'), tags }), }) } export async function removeTags(instanceId: number, hashes: string[], tags: string): Promise { await action(instanceId, '/torrents/removeTags', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hashes: hashes.join('|'), tags }), }) } export async function getTags(instanceId: number): Promise { return request(instanceId, '/torrents/tags') } export async function createTags(instanceId: number, tags: string): Promise { await action(instanceId, '/torrents/createTags', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ tags }), }) } export async function deleteTags(instanceId: number, tags: string): Promise { await action(instanceId, '/torrents/deleteTags', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ tags }), }) } export async function createCategory(instanceId: number, category: string, savePath?: string): Promise { const params: Record = { category } if (savePath) params.savePath = savePath await action(instanceId, '/torrents/createCategory', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams(params), }) } export async function editCategory(instanceId: number, category: string, savePath: string): Promise { await action(instanceId, '/torrents/editCategory', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ category, savePath }), }) } export async function removeCategories(instanceId: number, categories: string[]): Promise { await action(instanceId, '/torrents/removeCategories', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ categories: categories.join('\n') }), }) } export async function setFilePriority( instanceId: number, hash: string, ids: number[], priority: number ): Promise { await action(instanceId, '/torrents/filePrio', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hash, id: ids.join('|'), priority: priority.toString() }), }) } export async function renameTorrent(instanceId: number, hash: string, name: string): Promise { await action(instanceId, '/torrents/rename', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hash, name }), }) } export async function setTorrentLocation(instanceId: number, hashes: string[], location: string): Promise { await action(instanceId, '/torrents/setLocation', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hashes: hashes.join('|'), location }), }) } export async function setTorrentDownloadPath(instanceId: number, hashes: string[], downloadPath: string): Promise { await action(instanceId, '/torrents/setDownloadPath', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hashes: hashes.join('|'), downloadPath }), }) } export async function addTrackers(instanceId: number, hash: string, urls: string[]): Promise { await action(instanceId, '/torrents/addTrackers', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hash, urls: urls.join('\n') }), }) } export async function removeTrackers(instanceId: number, hash: string, urls: string[]): Promise { await action(instanceId, '/torrents/removeTrackers', { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ hash, urls: urls.join('|') }), }) } async function fetchTorrentBlob(instanceId: number, hash: string): Promise { const res = await fetch(`${getBase(instanceId)}/torrents/export?hash=${hash}`, { credentials: 'include' }) if (!res.ok) throw new Error(`Export failed: ${res.status}`) return res.blob() } function downloadBlob(blob: Blob, filename: string) { const url = URL.createObjectURL(blob) const a = document.createElement('a') a.href = url a.download = filename a.click() URL.revokeObjectURL(url) } export async function exportTorrents(instanceId: number, torrents: { hash: string; name: string }[]): Promise { if (torrents.length === 1) { const blob = await fetchTorrentBlob(instanceId, torrents[0].hash) downloadBlob(blob, `${torrents[0].name}.torrent`) return } const zip = new JSZip() for (const t of torrents) { const blob = await fetchTorrentBlob(instanceId, t.hash) zip.file(`${t.name}.torrent`, blob) } const zipBlob = await zip.generateAsync({ type: 'blob' }) downloadBlob(zipBlob, 'torrents.zip') } export async function getSpeedLimitsMode(instanceId: number): Promise { const res = await fetch(`${getBase(instanceId)}/transfer/speedLimitsMode`, { credentials: 'include' }) return Number(await res.text()) } export async function toggleSpeedLimitsMode(instanceId: number): Promise { await fetch(`${getBase(instanceId)}/transfer/toggleSpeedLimitsMode`, { method: 'POST', credentials: 'include', }) } export async function getPreferences(instanceId: number): Promise { return request(instanceId, '/app/preferences') } export async function setPreferences(instanceId: number, prefs: Partial): Promise { const res = await fetch(`${getBase(instanceId)}/app/setPreferences`, { method: 'POST', credentials: 'include', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ json: JSON.stringify(prefs) }), }) if (!res.ok) { throw new Error(`Failed to save preferences: ${res.status}`) } } export async function getRSSItems(instanceId: number, withData = false): Promise { return request(instanceId, `/rss/items?withData=${withData}`) } async function postRSS(instanceId: number, endpoint: string, params: Record): Promise { const res = await fetch(`${getBase(instanceId)}${endpoint}`, { method: 'POST', credentials: 'include', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams(params), }) if (!res.ok) { const text = await res.text() throw new Error(text || `API error: ${res.status}`) } } export async function addRSSFeed(instanceId: number, url: string, path?: string): Promise { const params: Record = { url } if (path) params.path = path await postRSS(instanceId, '/rss/addFeed', params) } export async function addRSSFolder(instanceId: number, path: string): Promise { await postRSS(instanceId, '/rss/addFolder', { path }) } export async function removeRSSItem(instanceId: number, path: string): Promise { await postRSS(instanceId, '/rss/removeItem', { path }) } export async function moveRSSItem(instanceId: number, itemPath: string, destPath: string): Promise { await postRSS(instanceId, '/rss/moveItem', { itemPath, destPath }) } export async function refreshRSSItem(instanceId: number, itemPath: string): Promise { await postRSS(instanceId, '/rss/refreshItem', { itemPath }) } export async function markRSSAsRead(instanceId: number, itemPath: string, articleId?: string): Promise { const params: Record = { itemPath } if (articleId) params.articleId = articleId await postRSS(instanceId, '/rss/markAsRead', params) } export async function getRSSRules(instanceId: number): Promise { return request(instanceId, '/rss/rules') } export async function setRSSRule(instanceId: number, ruleName: string, ruleDef: Partial): Promise { await postRSS(instanceId, '/rss/setRule', { ruleName, ruleDef: JSON.stringify(ruleDef) }) } export async function removeRSSRule(instanceId: number, ruleName: string): Promise { await postRSS(instanceId, '/rss/removeRule', { ruleName }) } export async function renameRSSRule(instanceId: number, ruleName: string, newRuleName: string): Promise { await postRSS(instanceId, '/rss/renameRule', { ruleName, newRuleName }) } export async function getMatchingArticles(instanceId: number, ruleName: string): Promise { return request(instanceId, `/rss/matchingArticles?ruleName=${encodeURIComponent(ruleName)}`) } export interface LogEntry { id: number message: string timestamp: number type: number } export interface PeerLogEntry { id: number ip: string timestamp: number blocked: boolean reason: string } export interface LogOptions { normal?: boolean info?: boolean warning?: boolean critical?: boolean lastKnownId?: number } export async function getLog(instanceId: number, options: LogOptions = {}): Promise { const params = new URLSearchParams() if (options.normal !== undefined) params.set('normal', String(options.normal)) if (options.info !== undefined) params.set('info', String(options.info)) if (options.warning !== undefined) params.set('warning', String(options.warning)) if (options.critical !== undefined) params.set('critical', String(options.critical)) if (options.lastKnownId !== undefined) params.set('last_known_id', String(options.lastKnownId)) const query = params.toString() return request(instanceId, `/log/main${query ? `?${query}` : ''}`) } export async function getPeerLog(instanceId: number, lastKnownId?: number): Promise { const params = lastKnownId !== undefined ? `?last_known_id=${lastKnownId}` : '' return request(instanceId, `/log/peers${params}`) } ================================================ FILE: src/api/stats.ts ================================================ export interface PeriodStats { instanceId: number instanceLabel: string uploaded: number downloaded: number hasData: boolean dataPoints: number } export async function getStats(period: string): Promise { const res = await fetch(`/api/stats?period=${period}`, { credentials: 'include' }) if (!res.ok) throw new Error('Failed to fetch stats') return res.json() } export async function getPeriods(): Promise { const res = await fetch('/api/stats/periods', { credentials: 'include' }) if (!res.ok) throw new Error('Failed to fetch periods') return res.json() } ================================================ FILE: src/components/AddTorrentModal.tsx ================================================ import { useState, useRef } from 'react' import { Plus, X, Upload, CheckCircle, Check } from 'lucide-react' import { useAddTorrent, useCategories } from '../hooks/useTorrents' interface Props { open: boolean onClose: () => void } type Tab = 'link' | 'file' export function AddTorrentModal({ open, onClose }: Props) { const [tab, setTab] = useState('link') const [url, setUrl] = useState('') const [files, setFiles] = useState([]) const [category, setCategory] = useState('') const [tags, setTags] = useState('') const [savepath, setSavepath] = useState('') const [startTorrent, setStartTorrent] = useState(true) const [sequential, setSequential] = useState(false) const fileInputRef = useRef(null) const { data: categories = {} } = useCategories() const addMutation = useAddTorrent() if (!open) return null function handleSubmit(e: React.FormEvent) { e.preventDefault() if (tab === 'link' && !url.trim()) return if (tab === 'file' && files.length === 0) return addMutation.mutate( { options: { urls: tab === 'link' ? url.trim() : undefined, category: category || undefined, tags: tags || undefined, savepath: savepath || undefined, paused: !startTorrent, sequentialDownload: sequential, }, files: tab === 'file' ? files : undefined, }, { onSuccess: () => { setUrl('') setFiles([]) setCategory('') setTags('') setSavepath('') setStartTorrent(true) setSequential(false) onClose() }, } ) } function handleFileChange(e: React.ChangeEvent) { const selected = Array.from(e.target.files || []).filter((f) => f.name.endsWith('.torrent')) if (selected.length > 0) setFiles((prev) => [...prev, ...selected]) } function handleDrop(e: React.DragEvent) { e.preventDefault() const dropped = Array.from(e.dataTransfer.files).filter((f) => f.name.endsWith('.torrent')) if (dropped.length > 0) { setFiles((prev) => [...prev, ...dropped]) setTab('file') } } function removeFile(index: number) { setFiles((prev) => prev.filter((_, i) => i !== index)) } return (
e.preventDefault()} onDrop={handleDrop} >

Add Torrent

{tab === 'link' ? (