[
  {
    "path": ".copier-answers.yml",
    "content": "_commit: v0.0.66\n_src_path: gh:serious-scaffold/ss-python\nauthor_email: i@huxuan.org\nauthor_name: huxuan\ncopyright_holder: huxuan\ncopyright_license: MIT License\ncopyright_year: 2019-2025\ncoverage_threshold: 0\ndefault_py: '3.12'\ndevelopment_status: Beta\nmax_py: '3.13'\nmin_py: '3.10'\nmodule_name: iptvtools\norganization_name: huxuan.org\npackage_name: iptvtools\nplatforms:\n- macos\n- linux\nproject_description: A set of scripts that help to better IPTV experience.\nproject_keywords: iptvtools, iptvtools-cli, m3u filter\nproject_name: IPTVTools\nreadme_content: \"## Features\\n\\nScripts currently provided:\\n\\n- iptvtools-cli filter\\n\\\n    \\  - Merge from different resources.\\n  - Check the tcp/udp connectivity.\\n  -\\\n    \\ Filter by custom criteria, e.g. resolution.\\n  - Match with templates and EPG.\\n\\\n    \\  - Format the url with UDPxy if provided.\\n  - Unify channels' titles.\\n\\nFeatures\\\n    \\ planned on the road:\\n\\n- [ ] Scan certain ip and port range to find new channels.\\n\\\n    - [ ] Establish a lightweight database for routine maintenance.\\n\\nBesides, all\\\n    \\ scripts should be lightweight and able to keep running regularly after proper\\\n    \\ configuration.\\n\\nLast but not least, any ideas, comments and suggestions are\\\n    \\ welcome!\\n\\n## Prerequisites\\n\\nTo filter by stream information, e.g., resolution/height,\\\n    \\ [ffmpeg](https://www.ffmpeg.org/) (or [ffprobe](https://www.ffmpeg.org/ffprobe.html)\\\n    \\ more precisely) is needed, please install according to the [documentation](https://www.ffmpeg.org/download.html).\\n\\\n    \\n## Installation\\n\\nIt is recommended to manage iptvtools via [pipx](https://github.com/pypa/pipx):\\n\\\n    \\n```shell\\npipx install iptvtools\\n```\\n\\n## Usage\\n\\nPlease refer to the [documentation](https://iptvtools.readthedocs.io/)\\\n    \\ while some useful information in [wiki](https://github.com/huxuan/iptvtools/wiki).\"\nrepo_name: iptvtools\nrepo_namespace: huxuan\nrepo_platform: github\n"
  },
  {
    "path": ".devcontainer/Dockerfile",
    "content": "# syntax=docker/dockerfile:1\n\nARG PYTHON_VERSION=3.12\n\n########################################################################################\n# Dev image is used for development and cicd.\n########################################################################################\n\nFROM python:${PYTHON_VERSION} AS dev\n\n# NOTE: python docker image has env `PYTHON_VERSION` but with patch version.\n# ARG is used here for temporary override without changing the original env.\nARG PYTHON_VERSION\n\n# Config Python\nENV PYTHONDONTWRITEBYTECODE=1\nENV PYTHONHASHSEED=0\nENV PYTHONUNBUFFERED=1\n\n# Config pipx\nENV PIPX_HOME=/usr/local/pipx\nENV PIPX_BIN_DIR=/usr/local/bin\nENV PIPX_DEFAULT_PYTHON=/usr/local/bin/python\n\n# renovate: depName=debian_12/bash-completion\nARG BASH_COMPLETION_VERSION=\"1:2.11-6\"\n# renovate: depName=debian_12/pipx\nARG PIPX_VERSION=\"1.1.0-1\"\n# renovate: depName=debian_12/sudo\nARG SUDO_VERSION=\"1.9.13p3-1+deb12u1\"\n# renovate: depName=debian_12/vim\nARG VIM_VERSION=\"2:9.0.1378-2\"\n\n# Install system dependencies and override pipx with a newer version\nRUN apt-get update && apt-get install -y --no-install-recommends \\\n    bash-completion=\"${BASH_COMPLETION_VERSION}\" \\\n    pipx=\"${PIPX_VERSION}\" \\\n    sudo=\"${SUDO_VERSION}\" \\\n    vim=\"${VIM_VERSION}\" \\\n    && pipx install pipx==1.7.1 \\\n    && apt-get purge -y --autoremove pipx \\\n    && apt-get clean -y \\\n    && rm -rf /var/lib/apt/lists/* \\\n    && hash -r\n\n# Install prerequisites\nRUN --mount=source=Makefile,target=Makefile \\\n    make prerequisites\n\n# Create a non-root user with sudo permission\nARG USERNAME=iptvtools\nARG USER_UID=1000\nARG USER_GID=$USER_UID\n\nRUN groupadd --gid $USER_GID $USERNAME \\\n    && useradd --create-home --uid $USER_UID --gid $USER_GID $USERNAME -s /bin/bash \\\n    && echo $USERNAME ALL=\\(root\\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME \\\n    && chmod 0440 /etc/sudoers.d/$USERNAME\n\n# Set permission for related folders\nRUN chown -R $USER_UID:$USER_GID $PIPX_HOME $PIPX_BIN_DIR\n\n# Set default working directory\nWORKDIR /workspace\n\n########################################################################################\n# Build image is an intermediate image used for building the project.\n########################################################################################\n\nFROM dev AS build\n\n# Install dependencies and project into the local packages directory.\nARG SCM_VERSION\nRUN --mount=source=README.md,target=README.md \\\n    --mount=source=pdm.lock,target=pdm.lock \\\n    --mount=source=pyproject.toml,target=pyproject.toml \\\n    --mount=source=src,target=src,rw \\\n    mkdir __pypackages__ && SETUPTOOLS_SCM_PRETEND_VERSION_FOR_IPTVTOOLS=${SCM_VERSION} pdm sync --prod --no-editable\n\n########################################################################################\n# Prod image is used for deployment and distribution.\n########################################################################################\n\nFROM python:${PYTHON_VERSION}-slim AS prod\n\n# NOTE: python docker image has env `PYTHON_VERSION` but with patch version.\n# ARG is used here for temporary override without changing the original env.\nARG PYTHON_VERSION\n\n# Config Python\nENV PYTHONDONTWRITEBYTECODE=1\nENV PYTHONHASHSEED=0\nENV PYTHONUNBUFFERED=1\n\n# Retrieve packages from build stage.\nENV PYTHONPATH=/workspace/pkgs\nCOPY --from=build /workspace/__pypackages__/${PYTHON_VERSION}/lib /workspace/pkgs\n\n# Retrieve executables from build stage.\nCOPY --from=build /workspace/__pypackages__/${PYTHON_VERSION}/bin/* /usr/local/bin/\n\n# Set command to run the cli by default.\nENTRYPOINT [\"iptvtools-cli\"]\n"
  },
  {
    "path": ".devcontainer/Dockerfile.dockerignore",
    "content": "*\n.*\n!/Makefile\n!/README.md\n!/pdm.lock\n!/pyproject.toml\n!/src/\n"
  },
  {
    "path": ".devcontainer/devcontainer.json",
    "content": "{\n    \"customizations\": {\n        // Configure extensions specific to VS Code.\n        \"vscode\": {\n            \"extensions\": [\n                \"DavidAnson.vscode-markdownlint\",\n                \"ExecutableBookProject.myst-highlight\",\n                \"charliermarsh.ruff\",\n                \"ms-python.mypy-type-checker\",\n                \"ms-python.python\",\n                \"richie5um2.vscode-sort-json\",\n                \"streetsidesoftware.code-spell-checker\"\n            ]\n        }\n    },\n    \"image\": \"ghcr.io/huxuan/iptvtools/dev:py3.12\",\n    // Force the image update to ensure the latest version which might be a bug.\n    // Reference: https://github.com/microsoft/vscode-remote-release/issues/9391\n    \"initializeCommand\": \"docker pull ghcr.io/huxuan/iptvtools/dev:py3.12\",\n    // Use a targeted named volume for .venv folder to improve disk performance.\n    // Reference: https://code.visualstudio.com/remote/advancedcontainers/improve-performance#_use-a-targeted-named-volume\n    \"mounts\": [\n        \"source=${localWorkspaceFolderBasename}-venv,target=${containerWorkspaceFolder}/.venv,type=volume\"\n    ],\n    \"name\": \"iptvtools\",\n    // Set proper permission for the .venv folder when the container created.\n    \"postCreateCommand\": \"sudo chown iptvtools:iptvtools .venv\",\n    // Prepare the development environment when the container starts.\n    \"postStartCommand\": \"make dev\",\n    // Use the non-root user in the container.\n    \"remoteUser\": \"iptvtools\"\n}\n"
  },
  {
    "path": ".github/FUNDING.yml",
    "content": "github:\n  - huxuan\n"
  },
  {
    "path": ".github/workflows/ci.yml",
    "content": "name: CI\n\non:\n  pull_request:\n  push:\n    branches:\n      - main\n\nconcurrency:\n  cancel-in-progress: true\n  group: ${{ github.workflow }}-${{ github.ref }}\n\njobs:\n  ci:\n    if: ${{ !cancelled() && ! failure() }}\n    runs-on: ${{ matrix.os }}\n    steps:\n      - name: Checkout repository\n        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2\n        with:\n          fetch-depth: 0\n      - name: Set up PDM\n        uses: pdm-project/setup-pdm@b2472ca4258a9ea3aee813980a0100a2261a42fc # v4.2\n        with:\n          cache: true\n          python-version: ${{ matrix.python-version }}\n          version: 2.22.3\n          cache-dependency-path: |\n            ./pdm.dev.lock\n            ./pdm.lock\n      - run: env | sort\n      - run: make prerequisites\n      - run: make dev\n      - run: make lint test doc build\n    strategy:\n      matrix:\n        os:\n          # renovate: github-runner\n          - macos-14\n          # renovate: github-runner\n          - ubuntu-24.04\n        python-version:\n          - '3.10'\n          - '3.11'\n          - '3.12'\n          - '3.13'\n"
  },
  {
    "path": ".github/workflows/commitlint.yml",
    "content": "name: CommitLint\nconcurrency:\n  cancel-in-progress: true\n  group: ${{ github.workflow }}-${{ github.ref }}\njobs:\n  commitlint:\n    container:\n      image: commitlint/commitlint:19.7.1@sha256:af27e796a83d69dfeb6307b1734942e959543eecd18736585db13a83ae1ca307\n    runs-on: ubuntu-24.04\n    steps:\n      - run: env | sort\n      - name: Validate the latest commit message with commitlint\n        if: github.event_name == 'push'\n        run: echo \"${{ github.event.head_commit.message }}\" | npx commitlint -x @commitlint/config-conventional\n      - name: Validate pull request title with commitlint\n        if: github.event_name == 'pull_request'\n        run: echo \"${{ github.event.pull_request.title }}\" | npx commitlint -x @commitlint/config-conventional\non:\n  pull_request:\n    types:\n      - opened\n      - synchronize\n      - reopened\n      - edited\n  push:\n    branches:\n      - main\n"
  },
  {
    "path": ".github/workflows/delete-untagged-packages.yml",
    "content": "name: Delete Untagged Packages\n\non:\n  schedule:\n    - cron: \"0 2 * * 0\"\n  workflow_dispatch: null\n\npermissions:\n  packages: write\n\njobs:\n  delete-untagged-packages:\n    runs-on: ubuntu-24.04\n    steps:\n      - name: Delete untagged dev-cache packages\n        uses: actions/delete-package-versions@e5bc658cc4c965c472efe991f8beea3981499c55 # v5.0.0\n        with:\n          package-name: \"iptvtools/dev-cache\"\n          package-type: \"container\"\n          delete-only-untagged-versions: \"true\"\n      - name: Delete untagged development packages\n        uses: actions/delete-package-versions@e5bc658cc4c965c472efe991f8beea3981499c55 # v5.0.0\n        with:\n          package-name: \"iptvtools/dev\"\n          package-type: \"container\"\n          delete-only-untagged-versions: \"true\"\n      - name: Delete untagged production packages\n        uses: actions/delete-package-versions@e5bc658cc4c965c472efe991f8beea3981499c55 # v5.0.0\n        with:\n          package-name: \"iptvtools\"\n          package-type: \"container\"\n          delete-only-untagged-versions: \"true\"\n"
  },
  {
    "path": ".github/workflows/devcontainer.yml",
    "content": "name: DevContainer\n\non:\n  pull_request:\n    paths:\n      - .devcontainer/Dockerfile\n      - .devcontainer/Dockerfile.dockerignore\n      - .github/workflows/devcontainer.yml\n      - Makefile\n  push:\n    branches:\n      - main\n    paths:\n      - .devcontainer/Dockerfile\n      - .devcontainer/Dockerfile.dockerignore\n      - .github/workflows/devcontainer.yml\n      - Makefile\n  workflow_dispatch: null\n\nconcurrency:\n  cancel-in-progress: true\n  group: ${{ github.workflow }}-${{ github.ref }}\n\njobs:\n  dev-container-publish:\n    permissions:\n      packages: write\n    runs-on: ubuntu-24.04\n    steps:\n      - run: env | sort\n      - name: Checkout repository\n        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2\n      - name: Set up authentication\n        run: docker login -u ${{ github.actor }} -p ${{ secrets.GITHUB_TOKEN }} ghcr.io\n      - name: Set up BuildKit\n        run: |\n          docker context create builder\n          docker buildx create builder --name container --driver docker-container --use\n          docker buildx inspect --bootstrap --builder container\n      - name: Build the dev container\n        run: |\n          docker buildx build . \\\n            --build-arg PYTHON_VERSION=${{ matrix.python-version }} \\\n            --cache-from type=registry,ref=ghcr.io/${{ github.repository }}/dev-cache:py${{ matrix.python-version }} \\\n            --file .devcontainer/Dockerfile \\\n            --load \\\n            --tag ghcr.io/${{ github.repository }}/dev:py${{ matrix.python-version }} \\\n            --target dev\n      - name: Test the dev container\n        run: |\n          docker run --rm \\\n            -e CI=true \\\n            -v ${PWD}:/workspace \\\n            ghcr.io/${{ github.repository }}/dev:py${{ matrix.python-version }} \\\n            make dev lint test doc build\n      - name: Build the prod container\n        run: |\n          docker buildx build . \\\n            --build-arg PYTHON_VERSION=${{ matrix.python-version }} \\\n            --file .devcontainer/Dockerfile \\\n            --load \\\n            --tag ghcr.io/${{ github.repository }}:py${{ matrix.python-version }} \\\n            --target prod\n      - name: Test the prod container\n        run: docker run --rm ghcr.io/${{ github.repository }}:py${{ matrix.python-version }}\n      - name: Push the dev container\n        if: github.event_name != 'pull_request'\n        run: |\n          docker buildx build . \\\n            --build-arg PYTHON_VERSION=${{ matrix.python-version }} \\\n            --cache-to type=registry,ref=ghcr.io/${{ github.repository }}/dev-cache:py${{ matrix.python-version }},mode=max \\\n            --file .devcontainer/Dockerfile \\\n            --push \\\n            --tag ghcr.io/${{ github.repository }}/dev:py${{ matrix.python-version }} \\\n            --target dev\n    strategy:\n      matrix:\n        python-version:\n          - '3.10'\n          - '3.11'\n          - '3.12'\n          - '3.13'\n"
  },
  {
    "path": ".github/workflows/readthedocs-preview.yml",
    "content": "name: Read the Docs Pull Request Preview\nconcurrency:\n  cancel-in-progress: true\n  group: ${{ github.workflow }}-${{ github.ref }}\njobs:\n  documentation-links:\n    runs-on: ubuntu-24.04\n    steps:\n      - name: Add Read the Docs preview's link to pull request\n        uses: readthedocs/actions/preview@b8bba1484329bda1a3abe986df7ebc80a8950333 # v1.5\n        with:\n          project-slug: iptvtools\non:\n  pull_request_target:\n    types:\n      - opened\n    paths:\n      - .github/workflows/readthedocs-preview.yml\n      - .readthedocs.yaml\n      - Makefile\n      - README.md\n      - docs/**\n      - pdm.dev.lock\n      - pdm.lock\npermissions:\n  pull-requests: write\n"
  },
  {
    "path": ".github/workflows/release.yml",
    "content": "name: Release\n\non:\n  release:\n    types:\n      - published\n\nconcurrency:\n  cancel-in-progress: true\n  group: ${{ github.workflow }}-${{ github.ref }}\n\njobs:\n  pages-build:\n    runs-on: ubuntu-24.04\n    steps:\n      - name: Checkout repository\n        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2\n        with:\n          fetch-depth: 0\n      - name: Set up PDM\n        uses: pdm-project/setup-pdm@b2472ca4258a9ea3aee813980a0100a2261a42fc # v4.2\n        with:\n          cache: true\n          python-version: '3.12'\n          version: 2.22.3\n          cache-dependency-path: |\n            ./pdm.dev.lock\n            ./pdm.lock\n      - run: env | sort\n      - run: make dev-doc\n      - run: make doc\n      - name: Upload pages artifact\n        uses: actions/upload-pages-artifact@56afc609e74202658d3ffba0e8f6dda462b719fa # v3.0.1\n        with:\n          path: public\n  pages:\n    needs:\n      - pages-build\n    permissions:\n      id-token: write\n      pages: write\n    runs-on: ubuntu-24.04\n    steps:\n      - id: deployment\n        name: Deploy to GitHub Pages\n        uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4.0.5\n  container-publish:\n    permissions:\n      packages: write\n    runs-on: ubuntu-24.04\n    steps:\n      - run: env | sort\n      - name: Checkout repository\n        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2\n      - name: Set up authentication\n        run: docker login -u ${{ github.actor }} -p ${{ secrets.GITHUB_TOKEN }} ghcr.io\n      - name: Set up BuildKit\n        run: |\n          docker context create builder\n          docker buildx create builder --name container --driver docker-container --use\n          docker buildx inspect --bootstrap --builder container\n      - name: Build the dev container\n        run: |\n          docker buildx build . \\\n            --build-arg PYTHON_VERSION=${{ matrix.python-version }} \\\n            --cache-from type=registry,ref=ghcr.io/${{ github.repository }}/dev-cache:py${{ matrix.python-version }} \\\n            --file .devcontainer/Dockerfile \\\n            --load \\\n            --tag ghcr.io/${{ github.repository }}/dev:py${{ matrix.python-version }} \\\n            --tag ghcr.io/${{ github.repository }}/dev:py${{ matrix.python-version }}-${{ github.ref_name }} \\\n            --target dev\n      - name: Test the dev container\n        run: |\n          docker run --rm \\\n            -e CI=true \\\n            -v ${PWD}:/workspace \\\n            ghcr.io/${{ github.repository }}/dev:py${{ matrix.python-version }} \\\n            make dev lint test doc build\n      - name: Build the prod container\n        run: |\n          docker buildx build . \\\n            --build-arg SCM_VERSION=${{ github.ref_name }} \\\n            --build-arg PYTHON_VERSION=${{ matrix.python-version }} \\\n            --file .devcontainer/Dockerfile \\\n            --load \\\n            --tag ghcr.io/${{ github.repository }}:py${{ matrix.python-version }} \\\n            --tag ghcr.io/${{ github.repository }}:py${{ matrix.python-version }}-${{ github.ref_name }} \\\n            --target prod\n      - name: Test the prod container\n        run: docker run --rm ghcr.io/${{ github.repository }}:py${{ matrix.python-version }}\n      - name: Push the dev container\n        run: |\n          docker buildx build . \\\n            --build-arg PYTHON_VERSION=${{ matrix.python-version }} \\\n            --cache-to type=registry,ref=ghcr.io/${{ github.repository }}/dev-cache:py${{ matrix.python-version }},mode=max \\\n            --file .devcontainer/Dockerfile \\\n            --push \\\n            --tag ghcr.io/${{ github.repository }}/dev:py${{ matrix.python-version }} \\\n            --tag ghcr.io/${{ github.repository }}/dev:py${{ matrix.python-version }}-${{ github.ref_name }} \\\n            --target dev\n      - name: Push the prod container\n        run: |\n          docker buildx build . \\\n            --build-arg SCM_VERSION=${{ github.ref_name }} \\\n            --build-arg PYTHON_VERSION=${{ matrix.python-version }} \\\n            --file .devcontainer/Dockerfile \\\n            --push \\\n            --tag ghcr.io/${{ github.repository }}:py${{ matrix.python-version }} \\\n            --tag ghcr.io/${{ github.repository }}:py${{ matrix.python-version }}-${{ github.ref_name }} \\\n            --target prod\n    strategy:\n      matrix:\n        python-version:\n          - '3.10'\n          - '3.11'\n          - '3.12'\n          - '3.13'\n  package-publish:\n    runs-on: ubuntu-24.04\n    permissions:\n      contents: read\n      id-token: write\n    steps:\n      - name: Checkout repository\n        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2\n      - name: Set up PDM\n        uses: pdm-project/setup-pdm@b2472ca4258a9ea3aee813980a0100a2261a42fc # v4.2\n        with:\n          cache: true\n          python-version: '3.12'\n          version: 2.22.3\n          cache-dependency-path: |\n            ./pdm.dev.lock\n            ./pdm.lock\n      - run: env | sort\n      - env:\n          PDM_PUBLISH_PASSWORD: ${{ secrets.PDM_PUBLISH_PASSWORD }}\n          PDM_PUBLISH_USERNAME: ${{ vars.PDM_PUBLISH_USERNAME || '__token__' }}\n        run: make publish\n"
  },
  {
    "path": ".github/workflows/renovate.yml",
    "content": "name: Renovate\njobs:\n  renovate:\n    container:\n      env:\n        LOG_LEVEL: debug\n        RENOVATE_ALLOWED_POST_UPGRADE_COMMANDS: '[\"^find\", \"^pdm\"]'\n        RENOVATE_BRANCH_PREFIX: renovate-github/\n        RENOVATE_ENABLED: ${{ vars.RENOVATE_ENABLED || true }}\n        RENOVATE_ENABLED_MANAGERS: '[\"copier\", \"github-actions\", \"pep621\", \"pre-commit\", \"regex\"]'\n        RENOVATE_OPTIMIZE_FOR_DISABLED: \"true\"\n        RENOVATE_PLATFORM: github\n        RENOVATE_REPOSITORIES: '[\"${{ github.repository }}\"]'\n        RENOVATE_REPOSITORY_CACHE: enabled\n      image: ghcr.io/renovatebot/renovate:39.156.1@sha256:33153a313777d4640e37dccdac5ec67263c00edd5d470748599eba25790dea93\n      options: \"--user root\"\n    runs-on: ubuntu-24.04\n    steps:\n      - run: env | sort\n      - id: generate-token\n        name: Generate a token with GitHub App if App ID exists\n        if: vars.BOT_APP_ID\n        uses: actions/create-github-app-token@0d564482f06ca65fa9e77e2510873638c82206f2 # v1.11.5\n        with:\n          app-id: ${{ vars.BOT_APP_ID }}\n          private-key: ${{ secrets.BOT_PRIVATE_KEY }}\n      - name: Warn if use GITHUB_TOKEN\n        run: |\n          if [ -z \"${{ steps.generate-token.outputs.token || secrets.PAT }}\" ]; then\n            echo \"# :warning: GITHUB_TOKEN is used for renovate\" >> $GITHUB_STEP_SUMMARY\n            echo \"The GITHUB_TOKEN is used instead of a bot token or PAT and will not emit the checks for the pull requests.\" >> $GITHUB_STEP_SUMMARY\n          fi\n      - name: Warn if RENOVATE_GIT_AUTHOR is set while using GitHub App token\n        if: steps.generate-token.outputs.token && vars.RENOVATE_GIT_AUTHOR\n        run: |\n          echo \"# :warning: `RENOVATE_GIT_AUTHOR` is set explicitly while using GitHub App token\" >> $GITHUB_STEP_SUMMARY\n          echo \"Generally, Renovate automatically detects the git author and email using the token. However, explicitly setting the `RENOVATE_GIT_AUTHOR` will override this behavior.\" >> $GITHUB_STEP_SUMMARY\n      - name: Run Renovate\n        env:\n          RENOVATE_GIT_AUTHOR: ${{ vars.RENOVATE_GIT_AUTHOR }}\n          RENOVATE_PLATFORM_COMMIT: ${{ steps.generate-token.outputs.token && true || false }}\n          RENOVATE_TOKEN: ${{ steps.generate-token.outputs.token || secrets.PAT || secrets.GITHUB_TOKEN }}\n        run: |\n          if [ -z \"$RENOVATE_TOKEN\" ]; then\n            echo \"RENOVATE_TOKEN is not properly configured, skipping ...\"\n          else\n            renovate $RENOVATE_EXTRA_FLAG\n          fi\non:\n  schedule:\n    # * is a special character in YAML so you have to quote this string\n    - cron: \"*/15 0-3 * * 1\"\n  workflow_dispatch: null\n"
  },
  {
    "path": ".github/workflows/semantic-release.yml",
    "content": "name: Semantic Release\n\non:\n  workflow_run:\n    workflows: [CI]\n    types: [completed]\n    branches: [main]\n\njobs:\n  semantic-release:\n    name: Semantic Release\n    runs-on: ubuntu-24.04\n    # Ensure CI workflow is succeeded and avoid semantic release on forked repository\n    if: github.event.workflow_run.conclusion == 'success' && github.repository == 'huxuan/iptvtools'\n    permissions:\n      contents: write\n      id-token: write\n      issues: write\n      pull-requests: write\n    steps:\n      - id: generate-token\n        name: Generate a token with GitHub App if App ID exists\n        if: vars.BOT_APP_ID\n        uses: actions/create-github-app-token@0d564482f06ca65fa9e77e2510873638c82206f2 # v1.11.5\n        with:\n          app-id: ${{ vars.BOT_APP_ID }}\n          private-key: ${{ secrets.BOT_PRIVATE_KEY }}\n      - name: Warn if use GITHUB_TOKEN\n        run: |\n          if [ -z \"${{ steps.generate-token.outputs.token || secrets.PAT }}\" ]; then\n            echo \"# :warning: GITHUB_TOKEN is used for semantic-release\" >> $GITHUB_STEP_SUMMARY\n            echo \"The GITHUB_TOKEN is used instead of a bot token or PAT and will not emit the released publish event for the released workflow.\" >> $GITHUB_STEP_SUMMARY\n          fi\n      - name: Checkout repository\n        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2\n        with:\n          fetch-depth: 0\n          persist-credentials: false\n      - name: Setup Node.js\n        uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0\n        with:\n          node-version: 'lts/*'\n      - name: Semantic Release\n        env:\n          GITHUB_TOKEN: ${{ steps.generate-token.outputs.token || secrets.PAT || secrets.GITHUB_TOKEN }}\n        run: >\n          npx\n          --package conventional-changelog-conventionalcommits@8.0.0\n          --package semantic-release@24.2.1\n          semantic-release\n"
  },
  {
    "path": ".gitignore",
    "content": "# Custom\n*.m3u\n*.swp\n.DS_Store\nPipfile\npublic\n\n# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\nshare/python-wheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.nox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n*.py,cover\n.hypothesis/\n.pytest_cache/\ncover/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\ndb.sqlite3-journal\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\n.pybuilder/\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# IPython\nprofile_default/\nipython_config.py\n\n# pyenv\n#   For a library or package, you might want to ignore these files since the code is\n#   intended to run in multiple environments; otherwise, check them in:\n# .python-version\n\n# pipenv\n#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.\n#   However, in case of collaboration, if having platform-specific dependencies or dependencies\n#   having no cross-platform support, pipenv may install dependencies that don't work, or not\n#   install all needed dependencies.\n#Pipfile.lock\n\n# poetry\n#   Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.\n#   This is especially recommended for binary packages to ensure reproducibility, and is more\n#   commonly ignored for libraries.\n#   https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control\n#poetry.lock\n\n# pdm\n#   Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.\n#pdm.lock\n#   pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it\n#   in version control.\n#   https://pdm.fming.dev/latest/usage/project/#working-with-version-control\n.pdm.toml\n.pdm-python\n.pdm-build/\n\n# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm\n__pypackages__/\n\n# Celery stuff\ncelerybeat-schedule\ncelerybeat.pid\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mkdocs documentation\n/site\n\n# mypy\n.mypy_cache/\n.dmypy.json\ndmypy.json\n\n# Pyre type checker\n.pyre/\n\n# pytype static type analyzer\n.pytype/\n\n# Cython debug symbols\ncython_debug/\n\n# PyCharm\n#  JetBrains specific template is maintained in a separate JetBrains.gitignore that can\n#  be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore\n#  and can be added to the global gitignore or merged into this file.  For a more nuclear\n#  option (not recommended) you can uncomment the following to ignore the entire idea folder.\n#.idea/\n"
  },
  {
    "path": ".pre-commit-config.yaml",
    "content": "default_install_hook_types:\n  - post-checkout\n  - post-merge\n  - post-rewrite\n  - pre-push\ndefault_stages:\n  - manual\n  - pre-push\nrepos:\n  - repo: https://github.com/pre-commit/pre-commit-hooks\n    rev: v5.0.0\n    hooks:\n      - id: check-added-large-files\n      - id: check-docstring-first\n      - id: check-merge-conflict\n        args:\n          - '--assume-in-merge'\n      - id: check-toml\n      - id: check-xml\n      - id: check-yaml\n      - id: end-of-file-fixer\n      - id: forbid-new-submodules\n      - id: mixed-line-ending\n      - id: name-tests-test\n      - id: no-commit-to-branch\n        stages:\n          - pre-push\n      - id: sort-simple-yaml\n        files: .pre-commit-config.yaml\n      - id: trailing-whitespace\n  - repo: https://github.com/renovatebot/pre-commit-hooks\n    rev: 39.156.1\n    hooks:\n      - id: renovate-config-validator\n  - repo: local\n    hooks:\n      - id: pdm-sync\n        name: pdm-sync\n        entry: pdm sync\n        language: python\n        stages:\n          - post-checkout\n          - post-merge\n          - post-rewrite\n        always_run: true\n        pass_filenames: false\n      - id: pdm-dev-sync\n        name: pdm-dev-sync\n        entry: pdm sync --lockfile pdm.dev.lock\n        language: python\n        stages:\n          - post-checkout\n          - post-merge\n          - post-rewrite\n        always_run: true\n        pass_filenames: false\n      - id: pdm-lock-check\n        name: pdm-lock-check\n        entry: pdm lock --check\n        language: python\n        files: ^pyproject.toml$\n        pass_filenames: false\n      - id: pdm-dev-lock-check\n        name: pdm-dev-lock-check\n        entry: pdm lock --check --lockfile pdm.dev.lock\n        language: python\n        files: ^pyproject.toml$\n        pass_filenames: false\n      - id: mypy\n        name: mypy\n        entry: pdm run python -m mypy\n        language: system\n        types_or:\n          - python\n          - pyi\n        require_serial: true\n      - id: ruff\n        name: ruff\n        entry: ruff check --force-exclude\n        language: system\n        types_or:\n          - python\n          - pyi\n        require_serial: true\n      - id: ruff-format\n        name: ruff-format\n        entry: ruff format --force-exclude\n        language: system\n        types_or:\n          - python\n          - pyi\n        require_serial: true\n      - id: pyproject-fmt\n        name: pyproject-fmt\n        entry: pyproject-fmt\n        language: python\n        files: '(^|/)pyproject\\.toml$'\n        types:\n          - toml\n      - id: codespell\n        name: codespell\n        entry: codespell\n        language: python\n        types:\n          - text\n      - id: check-jsonschema\n        name: check-jsonschema\n        entry: make check-jsonschema\n        language: python\n        files: (?x)^(\n          \\.github/workflows/[^/]+|\n          \\.gitlab-ci\\.yml|\n          \\.gitlab/workflows/[^/]+|\n          \\.readthedocs\\.yaml|\n          \\.renovaterc\\.json\n          )$\n        pass_filenames: false\n      - id: forbidden-files\n        name: forbidden files\n        entry: found Copier update rejection files; review them and remove them\n        language: fail\n        files: \\.rej$\n"
  },
  {
    "path": ".readthedocs.yaml",
    "content": "build:\n  apt_packages:\n    - pipx\n  jobs:\n    post_checkout:\n      - git fetch --unshallow || true\n      # Cancel building pull requests when there aren't changed in the related files and folders.\n      # If there are no changes (git diff exits with 0) we force the command to return with 183.\n      # This is a special exit code on Read the Docs that will cancel the build immediately.\n      # Ref: https://docs.readthedocs.io/en/stable/build-customization.html#cancel-build-based-on-a-condition\n      - |\n        if [ \"$READTHEDOCS_VERSION_TYPE\" = \"external\" ] && git diff --quiet origin/main -- \\\n          .github/workflows/readthedocs-preview.yml \\\n          .readthedocs.yaml \\\n          Makefile \\\n          README.md \\\n          docs/ \\\n          pdm.dev.lock \\\n          pdm.lock;\n        then\n          exit 183;\n        fi\n    post_system_dependencies:\n      - env | sort\n    pre_create_environment:\n      - PIPX_BIN_DIR=$READTHEDOCS_VIRTUALENV_PATH/bin pipx install pdm==2.22.3\n    post_install:\n      - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH make dev-doc\n    post_build:\n      - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH make mypy doc-coverage\n  os: ubuntu-24.04\n  tools:\n    python: \"3.12\"\nsphinx:\n  configuration: docs/conf.py\n  fail_on_warning: true\nversion: 2\n"
  },
  {
    "path": ".releaserc.json",
    "content": "{\n    \"plugins\": [\n        [\n            \"@semantic-release/commit-analyzer\",\n            {\n                \"releaseRules\": [\n                    {\n                        \"breaking\": true,\n                        \"release\": \"major\"\n                    },\n                    {\n                        \"type\": \"build\",\n                        \"release\": false\n                    },\n                    {\n                        \"type\": \"chore\",\n                        \"release\": false\n                    },\n                    {\n                        \"type\": \"ci\",\n                        \"release\": false\n                    },\n                    {\n                        \"type\": \"docs\",\n                        \"release\": false\n                    },\n                    {\n                        \"type\": \"feat\",\n                        \"release\": \"minor\"\n                    },\n                    {\n                        \"type\": \"fix\",\n                        \"release\": \"patch\"\n                    },\n                    {\n                        \"type\": \"perf\",\n                        \"release\": \"patch\"\n                    },\n                    {\n                        \"type\": \"refactor\",\n                        \"release\": false\n                    },\n                    {\n                        \"type\": \"revert\",\n                        \"release\": \"patch\"\n                    },\n                    {\n                        \"type\": \"style\",\n                        \"release\": false\n                    },\n                    {\n                        \"type\": \"test\",\n                        \"release\": false\n                    },\n                    {\n                        \"scope\": \"*major-release*\",\n                        \"release\": \"major\"\n                    },\n                    {\n                        \"scope\": \"*minor-release*\",\n                        \"release\": \"minor\"\n                    },\n                    {\n                        \"scope\": \"*patch-release*\",\n                        \"release\": \"patch\"\n                    },\n                    {\n                        \"scope\": \"*no-release*\",\n                        \"release\": false\n                    }\n                ]\n            }\n        ],\n        [\n            \"@semantic-release/release-notes-generator\",\n            {\n                \"presetConfig\": {\n                    \"types\": [\n                        {\n                            \"type\": \"build\",\n                            \"section\": \"Build\"\n                        },\n                        {\n                            \"type\": \"chore\",\n                            \"section\": \"Chores\"\n                        },\n                        {\n                            \"type\": \"ci\",\n                            \"section\": \"Continuous Integration\"\n                        },\n                        {\n                            \"type\": \"docs\",\n                            \"section\": \"Documentation\"\n                        },\n                        {\n                            \"type\": \"feat\",\n                            \"section\": \"Features\"\n                        },\n                        {\n                            \"type\": \"fix\",\n                            \"section\": \"Bug Fixes\"\n                        },\n                        {\n                            \"type\": \"perf\",\n                            \"section\": \"Performance\"\n                        },\n                        {\n                            \"type\": \"refactor\",\n                            \"section\": \"Refactor\"\n                        },\n                        {\n                            \"type\": \"revert\",\n                            \"section\": \"Reverts\"\n                        },\n                        {\n                            \"type\": \"style\",\n                            \"section\": \"Styles\"\n                        },\n                        {\n                            \"type\": \"test\",\n                            \"section\": \"Tests\"\n                        }\n                    ]\n                }\n            }\n        ],\n        \"@semantic-release/github\"\n    ],\n    \"preset\": \"conventionalcommits\"\n}\n"
  },
  {
    "path": ".renovaterc.json",
    "content": "{\n    \"$schema\": \"https://docs.renovatebot.com/renovate-schema.json\",\n    \"constraints\": {\n        \"copier\": \"==9.4.1\",\n        \"pdm\": \"2.22.3\",\n        \"python\": \"==3.12\"\n    },\n    \"customManagers\": [\n        {\n            \"customType\": \"regex\",\n            \"datasourceTemplate\": \"pypi\",\n            \"description\": \"Match Python packages installed with pip/pipx\",\n            \"fileMatch\": [\n                \"^Makefile$\",\n                \"^README\\\\.md$\",\n                \"^\\\\.devcontainer/Dockerfile$\",\n                \"^\\\\.github/workflows/.+\\\\.yml$\",\n                \"^\\\\.gitlab/workflows/.+\\\\.yml$\",\n                \"^\\\\.readthedocs\\\\.yaml$\",\n                \"^\\\\.renovaterc\\\\.json$\",\n                \"^docs/.+\\\\.md$\"            ],\n            \"matchStrings\": [\n                \"pip install.* (?<depName>.*?)(\\\\[.*?\\\\])?==(?<currentValue>.*?)[\\\"\\n]\",\n                \"pipx install( --force)? (?<depName>.*?)(\\\\[.*?\\\\])?==(?<currentValue>.*?)\\\\s\",\n                \"pipx list --short \\\\| grep -q \\\"(?<depName>.*?)(\\\\[.*?\\\\])? (?<currentValue>.*?)\\\"\"\n            ]\n        },\n        {\n            \"customType\": \"regex\",\n            \"datasourceTemplate\": \"repology\",\n            \"depTypeTemplate\": \"debian\",\n            \"description\": \"Match debian packages installed in Dockerfiles\",\n            \"fileMatch\": [\n                \"^\\\\.devcontainer\\\\/Dockerfile$\"            ],\n            \"matchStrings\": [\n                \"# renovate: depName=(?<depName>.*?)\\nARG .*?_VERSION=\\\"(?<currentValue>.*)\\\"\\n\"\n            ],\n            \"versioningTemplate\": \"deb\"\n        },\n        {\n            \"customType\": \"regex\",\n            \"datasourceTemplate\": \"pypi\",\n            \"depNameTemplate\": \"pdm\",\n            \"description\": \"Match pdm version specified in setup-pdm GitHub Action\",\n            \"fileMatch\": [\n                \"^\\\\.github/workflows/.+\\\\.yml$\"            ],\n            \"matchStrings\": [\n                \"uses: pdm-project/setup-pdm[\\\\s\\\\S]+?\\\\sversion: (?<currentValue>.*)\\n\"\n            ]\n        },\n        {\n            \"customType\": \"regex\",\n            \"datasourceTemplate\": \"pypi\",\n            \"depNameTemplate\": \"pdm\",\n            \"description\": \"Match pdm version specified in the renovate constraints\",\n            \"fileMatch\": [\n                \"^\\\\.renovaterc\\\\.json$\"            ],\n            \"matchStrings\": [\n                \"\\\"pdm\\\": \\\"(?<currentValue>.*)\\\"\"\n            ]\n        },\n        {\n            \"customType\": \"regex\",\n            \"datasourceTemplate\": \"github-runners\",\n            \"depTypeTemplate\": \"github-runner\",\n            \"description\": \"Match GitHub runner defined in GitHub Actions matrix strategy\",\n            \"fileMatch\": [\n                \"^\\\\.github/workflows/.+\\\\.yml$\",\n                \"^template/.*\\\\.github.*/workflows/.+\\\\.yml(\\\\.jinja)?$\"\n            ],\n            \"matchStrings\": [\n                \"# renovate: github-runner\\n\\\\s+- (os: )?(?<depName>.*?)-(?<currentValue>.*)\\n\"\n            ],\n            \"versioningTemplate\": \"docker\"\n        },\n        {\n            \"customType\": \"regex\",\n            \"datasourceTemplate\": \"npm\",\n            \"description\": \"Match npm packages used with npx\",\n            \"fileMatch\": [\n                \"^\\\\.github/workflows/.+\\\\.yml$\",\n                \"^\\\\.gitlab/workflows/.+\\\\.yml$\"            ],\n            \"matchStrings\": [\n                \"--package (?<depName>.+?)@(?<currentValue>.+?)\\\\s\"\n            ],\n            \"versioningTemplate\": \"docker\"\n        }\n    ],\n    \"extends\": [\n        \"config:best-practices\",\n        \":enablePreCommit\",\n        \":maintainLockFilesWeekly\",\n        \":semanticCommitTypeAll(build)\"\n    ],\n    \"packageRules\": [\n        {\n            \"description\": \"Update lock files for development dependencies\",\n            \"matchUpdateTypes\": [\n                \"lockFileMaintenance\"\n            ],\n            \"postUpgradeTasks\": {\n                \"commands\": [\n                    \"pdm update --lockfile pdm.dev.lock --no-default --dev --no-sync --update-eager\"\n                ]\n            }\n        },\n        {\n            \"description\": \"Group pdm Python package and version specified in setup-pdm GitHub Action\",\n            \"groupName\": \"pdm\",\n            \"matchDatasources\": [\n                \"github-tags\",\n                \"pypi\"\n            ],\n            \"matchDepNames\": [\n                \"pdm\"\n            ]\n        },\n        {\n            \"description\": \"Group renovate docker tag and pre-commit-hooks tag\",\n            \"groupName\": \"renovate\",\n            \"matchDatasources\": [\n                \"docker\",\n                \"github-tags\"\n            ],\n            \"matchDepNames\": [\n                \"ghcr.io/renovatebot/renovate\",\n                \"renovatebot/pre-commit-hooks\"\n            ]\n        },\n        {\n            \"description\": \"Group debian packages to avoid failure when multiple packages are outdated\",\n            \"groupName\": \"debian packages\",\n            \"matchDepTypes\": [\n                \"debian\"\n            ]\n        }\n    ]\n}\n"
  },
  {
    "path": ".vscode/extensions.json",
    "content": "{\n  \"recommendations\": [\n    \"DavidAnson.vscode-markdownlint\",\n    \"ExecutableBookProject.myst-highlight\",\n    \"charliermarsh.ruff\",\n    \"ms-python.mypy-type-checker\",\n    \"ms-python.python\",\n    \"ms-vscode-remote.remote-containers\",\n    \"richie5um2.vscode-sort-json\",\n    \"streetsidesoftware.code-spell-checker\"\n  ]\n}\n"
  },
  {
    "path": ".vscode/settings.json",
    "content": "{\n  \"[jsonc]\": {\n    \"editor.defaultFormatter\": \"vscode.json-language-features\"\n  },\n  \"[markdown]\": {\n    \"editor.defaultFormatter\": \"DavidAnson.vscode-markdownlint\"\n  },\n  \"[python]\": {\n    \"editor.codeActionsOnSave\": {\n      \"source.fixAll.ruff\": \"explicit\",\n      \"source.organizeImports.ruff\": \"explicit\"\n    },\n    \"editor.defaultFormatter\": \"charliermarsh.ruff\",\n    \"editor.formatOnSave\": true\n  },\n  \"cSpell.words\": [\n    \"autofix\",\n    \"automodule\",\n    \"cobertura\",\n    \"codespell\",\n    \"commitlint\",\n    \"conventionalcommits\",\n    \"datasource\",\n    \"deepclean\",\n    \"deflist\",\n    \"devcontainer\",\n    \"devcontainers\",\n    \"elif\",\n    \"endmacro\",\n    \"epub\",\n    \"furo\",\n    \"genindex\",\n    \"huxuan\",\n    \"interruptible\",\n    \"JPKXI\",\n    \"maxdepth\",\n    \"modindex\",\n    \"mypy\",\n    \"noninteractive\",\n    \"pathjoin\",\n    \"pipenv\",\n    \"pipx\",\n    \"pycache\",\n    \"pydantic\",\n    \"pypi\",\n    \"pyproject\",\n    \"pytest\",\n    \"Quickstart\",\n    \"renovatebot\",\n    \"repology\",\n    \"setuptools\",\n    \"softprops\",\n    \"sphinxcontrib\",\n    \"titlesonly\",\n    \"toctree\",\n    \"unshallow\",\n    \"viewcode\"\n  ],\n  \"editor.codeActionsOnSave\": {\n    \"source.fixAll\": \"explicit\"\n  },\n  \"editor.formatOnSave\": true,\n  \"editor.rulers\": [\n    88\n  ],\n  \"files.exclude\": {\n    \"**/*.egg-info\": true,\n    \"**/.coverage\": true,\n    \"**/.mypy_cache\": true,\n    \"**/.pdm-build\": true,\n    \"**/.pytest_cache\": true,\n    \"**/.ruff_cache\": true,\n    \"**/.venv\": true,\n    \"**/Pipfile*\": true,\n    \"**/__pycache__\": true,\n    \"**/_build\": true,\n    \"**/coverage.xml\": true,\n    \"**/htmlcov\": true\n  },\n  \"files.insertFinalNewline\": true,\n  \"files.trimFinalNewlines\": true,\n  \"files.trimTrailingWhitespace\": true,\n  \"myst.preview.extensions\": [\n    \"dollarmath\",\n    \"deflist\"\n  ],\n  \"sortJSON.contextMenu\": {\n    \"sortJSONAlphaNum\": false,\n    \"sortJSONAlphaNumReverse\": false,\n    \"sortJSONKeyLength\": false,\n    \"sortJSONKeyLengthReverse\": false,\n    \"sortJSONReverse\": false,\n    \"sortJSONType\": false,\n    \"sortJSONTypeReverse\": false,\n    \"sortJSONValues\": false,\n    \"sortJSONValuesReverse\": false\n  }\n}\n"
  },
  {
    "path": "LICENSE",
    "content": "MIT License\n\nCopyright (c) 2019-2025 huxuan\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "Makefile",
    "content": ".PHONY: clean deepclean install dev prerequisites mypy ruff ruff-format pyproject-fmt codespell lint pre-commit test-run test build publish doc-watch doc-build doc-coverage doc\n########################################################################################\n# Variables\n########################################################################################\n\n# Documentation target directory, will be adapted to specific folder for readthedocs.\nPUBLIC_DIR := $(shell [ \"$$READTHEDOCS\" = \"True\" ] && echo \"$${READTHEDOCS_OUTPUT}html\" || echo \"public\")\n\n# Determine the Python version used by pipx.\nPIPX_PYTHON_VERSION := $(shell `pipx environment --value PIPX_DEFAULT_PYTHON` -c \"from sys import version_info; print(f'{version_info.major}.{version_info.minor}')\")\n\n########################################################################################\n# Development Environment Management\n########################################################################################\n\n# Remove common intermediate files.\nclean:\n\t-rm -rf \\\n\t\t$(PUBLIC_DIR) \\\n\t\t.coverage \\\n\t\t.mypy_cache \\\n\t\t.pdm-build \\\n\t\t.pdm-python \\\n\t\t.pytest_cache \\\n\t\t.ruff_cache \\\n\t\tPipfile* \\\n\t\t__pypackages__ \\\n\t\tbuild \\\n\t\tcoverage.xml \\\n\t\tdist\n\tfind . -name '*.egg-info' -print0 | xargs -0 rm -rf\n\tfind . -name '*.pyc' -print0 | xargs -0 rm -f\n\tfind . -name '*.swp' -print0 | xargs -0 rm -f\n\tfind . -name '.DS_Store' -print0 | xargs -0 rm -f\n\tfind . -name '__pycache__' -print0 | xargs -0 rm -rf\n\n# Remove pre-commit hook, virtual environment alongside intermediate files.\ndeepclean: clean\n\tif command -v pre-commit > /dev/null 2>&1; then pre-commit uninstall; fi\n\tif command -v pdm >/dev/null 2>&1 && pdm venv list | grep -q in-project ; then pdm venv remove --yes in-project >/dev/null 2>&1; fi\n\n# Install the package in editable mode.\ninstall:\n\tpdm install --prod\n\n# Install the package in editable mode with specific optional dependencies.\ndev-%: install\n\tpdm install --lockfile pdm.dev.lock --no-default --dev --group $*\n\n# Prepare the development environment.\n# Install the package in editable mode with all optional dependencies and pre-commit hook.\ndev: install\n\tpdm install --lockfile pdm.dev.lock --no-default --dev\n\tif [ \"$(CI)\" != \"true\" ] && command -v pre-commit > /dev/null 2>&1; then pre-commit install; fi\n\n# Lock both prod and dev dependencies.\nlock:\n\tpdm lock --prod --update-reuse-installed\n\tpdm lock --lockfile pdm.dev.lock --no-default --dev --update-reuse-installed\n\n# Install standalone tools\nprerequisites:\n\tpipx list --short | grep -q \"check-jsonschema 0.31.1\" || pipx install --force check-jsonschema==0.31.1\n\tpipx list --short | grep -q \"codespell 2.4.1\" || pipx install --force codespell[toml]==2.4.1\n\tpipx list --short | grep -q \"pdm 2.22.3\" || pipx install --force pdm==2.22.3\n\tpipx list --short | grep -q \"pre-commit 4.1.0\" || pipx install --force pre-commit==4.1.0\n\tpipx list --short | grep -q \"pyproject-fmt 2.5.0\" || pipx install --force pyproject-fmt==2.5.0\n\tpipx list --short | grep -q \"ruff 0.9.4\" || pipx install --force ruff==0.9.4\n\tpipx list --short | grep -q \"watchfiles 1.0.4\" || pipx install --force watchfiles==1.0.4\n\n########################################################################################\n# Lint and pre-commit\n########################################################################################\n\n# Check lint with mypy.\nmypy:\n\tpdm run python -m mypy . --html-report $(PUBLIC_DIR)/reports/mypy\n\n# Lint with ruff.\nruff:\n\truff check .\n\n# Format with ruff.\nruff-format:\n\truff format --check .\n\n# Check lint with pyproject-fmt.\npyproject-fmt:\n\tpyproject-fmt pyproject.toml\n\n# Check lint with codespell.\ncodespell:\n\tcodespell\n\n# Check jsonschema with check-jsonschema.\ncheck-jsonschema:\n\tcheck-jsonschema --builtin-schema vendor.github-workflows .github/workflows/*.yml\n\tcheck-jsonschema --builtin-schema vendor.readthedocs .readthedocs.yaml\n\tcheck-jsonschema --builtin-schema vendor.renovate --regex-variant nonunicode .renovaterc.json\n\n# Check lint with all linters.\nlint: mypy ruff ruff-format pyproject-fmt codespell check-jsonschema\n\n# Run pre-commit with autofix against all files.\npre-commit:\n\tpre-commit run --all-files --hook-stage manual\n\n########################################################################################\n# Test\n########################################################################################\n\n# Clean and run test with coverage.\ntest-run:\n\tpdm run python -m coverage erase\n\tpdm run python -m coverage run -m pytest\n\n# Generate coverage report for terminal and xml.\ntest: test-run\n\tpdm run python -m coverage report\n\tpdm run python -m coverage xml\n\n########################################################################################\n# Package\n########################################################################################\n\n# Build the package.\nbuild:\n\tpdm build\n\n# Publish the package.\npublish:\n\tpdm publish\n\n########################################################################################\n# Documentation\n########################################################################################\n\n# Generate documentation with auto build when changes happen.\ndoc-watch:\n\tpdm run python -m http.server --directory public &\n\twatchfiles \"make doc-build\" docs src README.md\n\n# Build documentation only from src.\ndoc-build:\n\tpdm run sphinx-build --fail-on-warning --write-all docs $(PUBLIC_DIR)\n\n# Generate html coverage reports with badge.\ndoc-coverage: test-run\n\tpdm run python -m coverage html -d $(PUBLIC_DIR)/reports/coverage\n\tpdm run bash scripts/generate-coverage-badge.sh $(PUBLIC_DIR)/_static/badges\n\n# Generate all documentation with reports.\ndoc: doc-build mypy doc-coverage\n\n########################################################################################\n# End\n########################################################################################\n"
  },
  {
    "path": "README.md",
    "content": "# IPTVTools\n\nA set of scripts that help to better IPTV experience.\n\n[![CI](https://github.com/huxuan/iptvtools/actions/workflows/ci.yml/badge.svg)](https://github.com/huxuan/iptvtools/actions/workflows/ci.yml)\n[![CommitLint](https://github.com/huxuan/iptvtools/actions/workflows/commitlint.yml/badge.svg)](https://github.com/huxuan/iptvtools/actions/workflows/commitlint.yml)\n[![DevContainer](https://github.com/huxuan/iptvtools/actions/workflows/devcontainer.yml/badge.svg)](https://github.com/huxuan/iptvtools/actions/workflows/devcontainer.yml)\n[![Release](https://github.com/huxuan/iptvtools/actions/workflows/release.yml/badge.svg)](https://github.com/huxuan/iptvtools/actions/workflows/release.yml)\n[![Renovate](https://github.com/huxuan/iptvtools/actions/workflows/renovate.yml/badge.svg)](https://github.com/huxuan/iptvtools/actions/workflows/renovate.yml)\n[![Semantic Release](https://github.com/huxuan/iptvtools/actions/workflows/semantic-release.yml/badge.svg)](https://github.com/huxuan/iptvtools/actions/workflows/semantic-release.yml)\n[![Coverage](https://img.shields.io/endpoint?url=https://huxuan.github.io/iptvtools/_static/badges/coverage.json)](https://huxuan.github.io/iptvtools/reports/coverage)\n[![Release](https://img.shields.io/github/v/release/huxuan/iptvtools)](https://github.com/huxuan/iptvtools/releases)\n[![PyPI](https://img.shields.io/pypi/v/iptvtools)](https://pypi.org/project/iptvtools/)\n[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/iptvtools)](https://pypi.org/project/iptvtools/)\n[![GitHub](https://img.shields.io/github/license/huxuan/iptvtools)](https://github.com/huxuan/iptvtools/blob/main/LICENSE)\n\n[![pdm-managed](https://img.shields.io/badge/pdm-managed-blueviolet)](https://pdm-project.org)\n[![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit)](https://github.com/pre-commit/pre-commit)\n[![Checked with mypy](https://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/)\n[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)\n[![Conventional Commits](https://img.shields.io/badge/Conventional%20Commits-1.0.0-%23FE5196?logo=conventionalcommits&logoColor=white)](https://conventionalcommits.org)\n[![Pydantic v2](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/pydantic/pydantic/5697b1e4c4a9790ece607654e6c02a160620c7e1/docs/badge/v2.json)](https://pydantic.dev)\n[![Copier](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/copier-org/copier/master/img/badge/badge-grayscale-inverted-border-orange.json)](https://github.com/copier-org/copier)\n[![Serious Scaffold Python](https://img.shields.io/endpoint?url=https://serious-scaffold.github.io/ss-python/_static/badges/logo.json)](https://serious-scaffold.github.io/ss-python)\n[![Open in Dev Containers](https://img.shields.io/static/v1?label=Dev%20Containers&message=Open&color=blue&logo=visualstudiocode)](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/huxuan/iptvtools)\n\n> [!IMPORTANT]\n> _IPTVTools_ is in the **Beta** phase.\n> Changes and potential instability should be anticipated.\n> Any feedback, comments, suggestions and contributions are welcome!\n\n## Features\n\nScripts currently provided:\n\n- iptvtools-cli filter\n  - Merge from different resources.\n  - Check the tcp/udp connectivity.\n  - Filter by custom criteria, e.g. resolution.\n  - Match with templates and EPG.\n  - Format the url with UDPxy if provided.\n  - Unify channels' titles.\n\nFeatures planned on the road:\n\n- [ ] Scan certain ip and port range to find new channels.\n- [ ] Establish a lightweight database for routine maintenance.\n\nBesides, all scripts should be lightweight and able to keep running regularly after proper configuration.\n\nLast but not least, any ideas, comments and suggestions are welcome!\n\n## Prerequisites\n\nTo filter by stream information, e.g., resolution/height, [ffmpeg](https://www.ffmpeg.org/) (or [ffprobe](https://www.ffmpeg.org/ffprobe.html) more precisely) is needed, please install according to the [documentation](https://www.ffmpeg.org/download.html).\n\n## Installation\n\nIt is recommended to manage iptvtools via [pipx](https://github.com/pypa/pipx):\n\n```shell\npipx install iptvtools\n```\n\n## Usage\n\nPlease refer to the [documentation](https://iptvtools.readthedocs.io/) while some useful information in [wiki](https://github.com/huxuan/iptvtools/wiki).\n## 📜 License\n\nMIT License, for more details, see the [LICENSE](https://github.com/huxuan/iptvtools/blob/main/LICENSE) file.\n"
  },
  {
    "path": "config.json",
    "content": "{\n    \"id_unifiers\": {\n        \"-\": \"\",\n        \"IPTV\": \"\",\n        \"北京纪实\": \"BTV冬奥纪实\",\n        \"BTV北京卫视\": \"北京卫视\",\n        \"卡酷动画\": \"BTV卡酷少儿\",\n        \"CETV1\": \"中国教育1台\",\n        \"CETV2\": \"中国教育2台\",\n        \"CETV3\": \"中国教育3台\",\n        \"CETV4\": \"中国教育4台\",\n        \"纪实频道\": \"上海纪实\"\n    },\n    \"title_unifiers\": {\n        \"4K超清\": \"北京IPTV 4K超清\",\n        \"BTV冬奥纪实4K\": \"BTV冬奥纪实\",\n        \"BTV冬奥纪实HDR\": \"BTV冬奥纪实\",\n        \"CETV4\": \"中国教育4台\",\n        \"DOGTV\": \"北京IPTV 萌宠TV\",\n        \"HD\": \"\",\n        \"卡酷动画\": \"BTV卡酷少儿\",\n        \"淘Baby\": \"北京IPTV 淘Baby\",\n        \"淘剧场\": \"北京IPTV 淘剧场\",\n        \"淘娱乐\": \"北京IPTV 淘娱乐\",\n        \"淘电影\": \"北京IPTV 淘电影\",\n        \"电视台\": \"\",\n        \"高清\": \"\",\n        \"＋\": \"+\"\n    }\n}\n"
  },
  {
    "path": "docs/advanced/cicd.md",
    "content": "# CI/CD Configurations\n\nThe CI/CD (Continuous Integration and Continuous Delivery) workflows automate various development tasks to ensure project maintainability with minimal human effort. The configuration files are located at `.github/workflows/*.yml` for GitHub and `.gitlab/workflows/*.yml` for GitLab.\n\n## `ci.yml`\n\nThe `ci` workflow is the most frequently used workflow, running on all pull/merge requests and changes to the default `main` branch. It performs linting, testing, and builds for the documentation and the package across all supported operation systems and Python versions to ensure everything works as expected.\n\n## `commitlint.yml`\n\nThe `commitlint` workflow checks whether the pull/merge request title comply with the <project:/development/commit.md>. This ensures consistent commit history and enable the possibility of automated release pipeline.\n\n## `delete-untagged-packages.yml`\n\nThe `delete-untagged-packages` workflow removes untagged packages since GitHub will still keep the package when overridden with the same tag. It helps keep the GitHub Packages clean and tidy.\n\n## `devcontainer.yml`\n\nThe `devcontainer` workflow will be triggered by container related changes. It builds and tests the development and production containers and push the development container except during pull/merge requests, ensuring seamless containerized environments.\n\n## `readthedocs-preview.yml`\n\nThe `readthedocs-preview` workflow leverage the [readthedocs/actions/preview](https://github.com/readthedocs/actions/tree/v1/preview) to add Read the Docs preview links to the related pull requests. These links make it easy to review documentation changes.\n\n## `release.yml`\n\nThe `release` workflow manages the entire publish process, including publishing the documentation, containers and packages. It is triggered by a new release or a release tag. It also ensures all the builds and tests are succeed before completing the release.\n\n## `renovate.yml`\n\nThe `renovate` workflow automates the <project:/management/update.md>. It is scheduled to run weekly and will create pull/merges request when there are new versions of the scaffold template, Python packages, GitHub Runners, GitHub Actions, docker images and etc. It keeps the project secure and ensures compatibility with the latest versions.\n\n## `semantic-release.yml`\n\nThe `semantic-release` workflow automate the versioning and release process by publishing new releases or new release tags when certain changes are pushed to the default `main` branch. It simplifies the release management while maintaining consistency.\n"
  },
  {
    "path": "docs/advanced/dev-containers.md",
    "content": "# Development Container\n\nInstead of manually configuring your development environment, [Dev Containers](https://containers.dev/) offer a seamless containerized development experience right out of the box.\n\n## Prerequisites\n\nBefore you can use a Dev Container, you will need to install a few components.\n\n1. [Docker Desktop](https://www.docker.com/products/docker-desktop) or an [alternative Docker option](https://code.visualstudio.com/remote/advancedcontainers/docker-options).\n1. [Visual Studio Code](https://code.visualstudio.com/).\n1. The [Dev Containers extension](vscode:extension/ms-vscode-remote.remote-containers) within VSCode.\n\n## Usage\n\nAfter installing the prerequisites, you have two main approaches to use a Dev Container. Using [a locally cloned repository](#open-a-locally-cloned-repository-in-a-container) leverages your existing local source code, while [an isolated container volume](#open-the-repository-in-an-isolated-container-volume) creates a separate copy of the repository, which is particularly useful for PR reviews or exploring branches without altering your local environment.\n\n### Open a locally cloned repository in a container\n\nWhen you open a repository that includes a Dev Container configuration in VS Code, you will receive a prompt to reopen it in the container.\n\n```{image} /_static/images/dev-container-reopen-prompt.png\n:alt: Dev Container Reopen Prompt.\n```\n\nIf you missed the prompt, you can use the **Dev Containers: Reopen in Container** command from the Command Palette to initiate the containerized environment. Here are some frequently used commands:\n\nDev Containers: Reopen in Container\n: Triggers the containerized environment setup upon opening a repository configured for Dev Containers.\n\nDev Containers: Rebuild Without Cache and Reopen in Container\n: Useful for refreshing your environment in case of issues or to update to a newer version.\n\nDev Containers: Clean Up Dev Containers...\n: Deletes stopped Dev Container instances and removes unused volumes, helping maintain a clean development environment.\n\n### Open the repository in an isolated container volume\n\nYou may already notice the badge [![Open in Dev Containers](https://img.shields.io/static/v1?label=Dev%20Containers&message=Open&color=blue&logo=visualstudiocode)](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/huxuan/iptvtools) in the [Overview](/index.md) page. You can click the badge or [this link](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/huxuan/iptvtools) to get started. Clicking these links will cause VS Code to automatically install the Dev Containers extension if needed, clone the source code into a container volume, and spin up a dev container for use.\n\n## Reference\n\nFor more detailed guidance and advanced usage, explore the following resources:\n\n- [Dev Containers tutorial](https://code.visualstudio.com/docs/devcontainers/tutorial)\n- [Developing inside a Container](https://code.visualstudio.com/docs/devcontainers/containers)\n"
  },
  {
    "path": "docs/advanced/index.md",
    "content": "# Advanced Usage\n\nThis section provides recommended best practices for enhancing your development workflow. While not essential, these topics can optimize the project management and development processes.\n\n```{toctree}\ndev-containers\npartial-dev-env\ncicd\n```\n"
  },
  {
    "path": "docs/advanced/partial-dev-env.md",
    "content": "# Partially Set Up Development Environment\n\nIn certain cases, it is unnecessary to install all dependencies as well as the pre-commit hook. For example, this can speed up the setup process in CI/CD.\n\n## Minimal installation\n\nInstall the project in editable mode with only the necessary dependencies, which is useful for scenarios like deployment.\n\n```bash\nmake install\n```\n\n## Documentation generation\n\nInstall the project in editable mode with dependencies related to `doc`,\nrecommended for scenarios like the documentation generation CI/CD process.\n\n```bash\nmake dev-doc\n```\n\n## Lint check\n\nInstall the project in editable mode with dependencies related to `lint`,\nrecommended for scenarios like the lint CI/CD process.\n\n```bash\nmake dev-lint\n```\n\n## Package build\n\nInstall the project in editable mode with dependencies related to `package`,\nrecommended for scenarios like the package CI/CD process.\n\n```bash\nmake dev-package\n```\n\n## Testing\n\nInstall the project in editable mode with dependencies related to `test`,\nrecommended for scenarios like the test CI/CD process.\n\n```bash\nmake dev-test\n```\n\n## Combination\n\nTo install dependencies for `doc` and `lint`, use the following command:\n\n```bash\nmake dev-doc,lint\n```\n"
  },
  {
    "path": "docs/api/index.md",
    "content": "# API Reference\n\n```{toctree}\n:maxdepth: 1\nsettings\n```\n"
  },
  {
    "path": "docs/api/settings.md",
    "content": "# iptvtools.settings\n\n```{eval-rst}\n.. automodule:: iptvtools.settings\n```\n"
  },
  {
    "path": "docs/cli/filter.md",
    "content": "# IPTVTools Filter\n\n```{eval-rst}\n.. click:: iptvtools.cli:filter\n  :prog: iptvtools-cli filter\n  :nested: full\n```\n"
  },
  {
    "path": "docs/cli/index.md",
    "content": "# CLI Reference\n\n```{toctree}\n:maxdepth: 1\niptvtools\nfilter\n```\n"
  },
  {
    "path": "docs/cli/iptvtools.md",
    "content": "# IPTVTools\n\n```{eval-rst}\n.. click:: iptvtools.cli:cli\n  :prog: iptvtools-cli\n  :nested: short\n```\n"
  },
  {
    "path": "docs/conf.py",
    "content": "\"\"\"Configuration file for the Sphinx documentation builder.\n\nFor the full list of built-in configuration values, see the documentation:\nhttps://www.sphinx-doc.org/en/master/usage/configuration.html\n\"\"\"\n\nfrom importlib import metadata\n\n# -- Project information ---------------------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information\n\nauthor = \"huxuan\"\ncopyright = \"2019-2025, huxuan\"\nproject = \"IPTVTools\"\nrelease = metadata.version(\"iptvtools\")\nversion = \".\".join(release.split(\".\")[:2])\n\n\n# -- General configuration -------------------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration\n\nextensions = [\n    \"myst_parser\",\n    \"sphinx.ext.autodoc\",\n    \"sphinx.ext.napoleon\",\n    \"sphinx.ext.viewcode\",\n    \"sphinx_click\",\n    \"sphinx_design\",\n    \"sphinxcontrib.autodoc_pydantic\",\n]\nsource_suffix = {\n    \".rst\": \"restructuredtext\",\n    \".md\": \"markdown\",\n}\nexclude_patterns = [\"_build\", \"Thumbs.db\", \".DS_Store\"]\ntemplates_path = [\"_templates\"]\nhtml_theme_options = {\n    \"announcement\": (\n        \"<em>IPTVTools</em> \"\n        \"is in the <strong>Beta</strong> phase. \"\n        \"Changes and potential instability should be anticipated. \"\n        \"Any feedback, comments, suggestions and contributions are welcome!\"\n    ),\n}\n\n# -- Options for HTML output -----------------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output\n\nhtml_theme = \"furo\"\nhtml_static_path = [\"_static\"]\n\n# -- Options for autodoc extension  ----------------------------------------------------\n# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#configuration\n\nautodoc_default_options = {\n    \"members\": None,\n}\n\n# -- Options for autodoc_pydantic extension  -------------------------------------------\n# https://autodoc-pydantic.readthedocs.io/en/stable/users/configuration.html\n\nautodoc_pydantic_settings_show_json = False\n\n# -- Options for myst-parser extension  ------------------------------------------------\n# https://myst-parser.readthedocs.io/en/latest/configuration.html\n\nmyst_enable_extensions = [\n    \"colon_fence\",\n    \"deflist\",\n]\nmyst_heading_anchors = 3\nmyst_url_schemes = {\n    \"http\": None,\n    \"https\": None,\n    \"vscode\": None,\n}\n"
  },
  {
    "path": "docs/development/cleanup-dev-env.md",
    "content": "# Clean Up Development Environment\n\nWhen encountering environment-related problems, a straightforward solution is to cleanup the environment and setup a new one. Three different levels of cleanup approach are provided here.\n\n## Intermediate cleanup\n\nIntermediate cleanup only removes common intermediate files, such as generated documentation, package, coverage report, cache files for mypy, pytest, ruff and so on.\n\n```bash\nmake clean\n```\n\n## Deep cleanup\n\nDeep cleanup removes the pre-commit hook and the virtual environment alongside the common intermediate files.\n\n```bash\nmake deepclean\n```\n\n## Complete cleanup\n\nComplete cleanup restores the repository to its original, freshly-cloned state, ideal for starting over from scratch.\n\n```{caution}\nThis will remove all untracked files, please use it with caution. It is recommended to check with dry-run mode (`git clean -dfnx`) before actually removing anything. For more information, please refer to the [git-clean documentation](https://git-scm.com/docs/git-clean).\n```\n\n```bash\ngit clean -dfx\n```\n"
  },
  {
    "path": "docs/development/commit.md",
    "content": "# Commit Convention\n\nUsing structured commit messages, we can enhance the readability of our project history, simplify automated changelog generation, and streamline the release process. We primarily follow the [Conventional Commit](https://www.conventionalcommits.org/) and [Angular's commit guidelines](https://github.com/angular/angular.js/blob/master/DEVELOPERS.md#commits).\n\n## Commit Message Pattern\n\n```text\n<type>(<optional scope>): <description>\n```\n\nExamples:\n\n```text\nbuild(dependencies): bump the prod group with 9 updates.\ndoc: Add doc for commit convention.\nchore: remove deprecated key in ruff config.\n```\n\nType\n: Describes the nature of the change:\n\n| Type      | Description                                            |\n|-----------|--------------------------------------------------------|\n| `build`   | Changes that affect the build system or dependencies.  |\n| `chore`   | Routine tasks or changes outside the src/runtime code. |\n| `ci`      | Changes related to continuous integration.             |\n| `doc`     | Documentation changes.                                 |\n| `feat`    | New features.                                          |\n| `fix`     | Bug fixes.                                             |\n| `perf`    | Performance improvements.                              |\n| `refactor`| Code restructuring without changing behavior.          |\n| `revert`  | Revert a previous commit.                              |\n| `style`   | Code formatting changes.                               |\n| `test`    | Add or update tests.                                   |\n\nScope [Optional]\n: Represents the part of the project impacted by the change. Examples include `logging`, `settings`, and `cli`.\n\n### Breaking Change\n\nA \"breaking change\" refers to any modification that disrupts the existing functionality in a way that may affect users. It can be denoted using an exclamation mark (`!`) before the colon, like `refactor!: Stuff`.\n\n## Commit in Development Branches\n\nWhile the commit convention seems strict, we aim for flexibility during the development phase.\nBy adhering to the <project:/management/settings.md>, all changes should be introduced via pull/merge requests.\nUsing the squash merge strategy, the emphasis is primarily on the title of pull/merge requests.\nIn this way, individual commit within development branches does not need to strictly adhere to the commit convention.\n\n````{note}\nA CI/CD pipeline checks the titles of pull/merge requests against the following regex pattern:\n\n```text\n^(build|chore|ci|doc|feat|fix|perf|refactor|revert|style|test)(\\(\\w+\\))?!?:\\s.*\n```\n````\n"
  },
  {
    "path": "docs/development/git-workflow.md",
    "content": "# Git Workflow\n\nThis pages shows the recommended Git workflow to keep the local repository clean and organized while ensuring smooth collaboration among team members.\n\n## Prerequisites\n\nMake sure you have [Git](https://git-scm.com/) (version 2.23 and above) installed and properly configured especially for authentication.\n\n## Fork and clone the repository\n\nFork the repository to your own namespace, and let us take `https://github.com/<username>/iptvtools` as example.\n\nClone the repository and navigate to the root directory:\n\n```shell\ngit clone git@github.com:<username>/iptvtools.git\ncd iptvtools\n```\n\n## Configure the remote\n\nAdd and update the `upstream` remote repository:\n\n```shell\ngit remote add upstream https://github.com/huxuan/iptvtools\ngit fetch upstream\n```\n\nConfigure `git` to pull `main` branch from the `upstream` remote:\n\n```shell\ngit config --local branch.main.remote upstream\n```\n\nConfigure `git` never to push to the `upstream` remote:\n\n```shell\ngit remote set-url --push upstream git@github.com/<username>/iptvtools.git\n```\n\n## Verify the remote configuration\n\nList the remote repositories with urls:\n\n```shell\ngit remote -v\n```\n\nYou should have two remote repositories: `origin` to your forked CPython repository, and `upstream` pointing to the official CPython repository:\n\n```shell\norigin  git@github.com:<username>/iptvtools.git (fetch)\norigin  git@github.com:<username>/iptvtools.git (push)\nupstream        https://github.com/huxuan/iptvtools (fetch)\nupstream        git@github.com:<username>/iptvtools.git (push)\n```\n\nNote that the push url of `upstream` repository is the forked repository.\n\nShow the upstream for `main` branch:\n\n```shell\ngit config branch.main.remote\n```\n\nYou should see `upstream` here.\n\n## Work on a feature branch\n\nCreate and switch to a new branch from `main`:\n\n```shell\ngit switch -c <branch-name> main\n```\n\nStage the changed files:\n\n```shell\ngit add -p # to review and add changes to existing files\ngit add <filename1> <filename2> # to add new files\n```\n\nCommit the staged files:\n\n```shell\ngit commit -m \"the commit message\"\n```\n\nPush the committed changes:\n\n```shell\ngit push\n```\n\n## Create a pull request\n\nNavigate to the hosting platform and create a pull request.\n\nAfter the pull request is merged, you need to delete the branch in your namespace.\n\n```{note}\nIt is recommended to configure the automatic deletion of the merged branches.\n```\n\n## Housekeeping the cloned repository\n\nUpdate the `main` branch from upstream:\n\n```shell\ngit switch main\ngit pull upstream main\n```\n\nRemove deleted remote-tracking references:\n\n```shell\ngit fetch --prune origin\n```\n\nRemove local branches:\n\n```shell\ngit branch -D <branch-name>\n```\n\nAfter all these operations, you should be ready to <project:#work-on-a-feature-branch> again.\n\n## Reference\n\n- [Git bootcamp and cheat sheet, Python Developer's Guide](https://devguide.python.org/getting-started/git-boot-camp/)\n"
  },
  {
    "path": "docs/development/index.md",
    "content": "# Development Practices\n\nThis section is designed for developers and covers essential topics during daily development lifecycle. Follow these guidelines to ensure all contributors adhere to best practices, maintain code quality, and collaborate efficiently.\n\n```{toctree}\ngit-workflow\nsetup-dev-env\ncleanup-dev-env\ncommit\ntests\n```\n"
  },
  {
    "path": "docs/development/setup-dev-env.md",
    "content": "# Set Up Development Environment\n\nThis page shows the approach to set up development environment. To simplify the process, a unified `Makefile` is maintained at the root directory of the repo. In other words, all the `make` related commands are supposed to run there.\n\n## Prerequisites\n\n[pipx](https://pipx.pypa.io/) is required to manage the standalone tools used across the development lifecycle.\nPlease refer to pipx's installation instructions [here](https://pipx.pypa.io/stable/installation/).\nOnce pipx is set up, install the needed standalone tools with the following command:\n\n```bash\nmake prerequisites\n```\n\n## Setup\n\nDevelopment environment can be setup with the following command:\n\n```bash\nmake dev\n```\n\nThis command will accomplish the following tasks:\n\n- Create a virtual environment.\n- Install all the dependencies, including those for documentation, lint, package and test.\n- Install the project in editable mode.\n- Install git hook scripts for `pre-commit`.\n\nTo speed up the setup process in certain scenarios, you may find <project:/advanced/partial-dev-env.md> helpful.\n"
  },
  {
    "path": "docs/development/tests.md",
    "content": "# Tests\n\nIn the context of CI/CD automation, dependency updates, and the release process, tests play a crucial role in daily development. We utilize [pytest](https://docs.pytest.org/) and [coverage](https://coverage.readthedocs.io) with proper configuration to ensure everything works as expected. This page provides general information and conventions we wish you to follow.\n\n## Running Tests\n\nAfter [setting up the development environment](/development/setup-dev-env.md), tests can be run with the command:\n\n```bash\nmake test\n```\n\nWith the default configuration, this command displays the result for each test case, the execution time for slow test cases, and a report on test coverage.\n\n## Writing Tests\n\nFor guidelines on how to write tests, refer to [the official documentation](https://docs.pytest.org/how-to/assert.html). Here are some conventions we expect you to follow:\n\n1. Organize all test cases under the `tests` directory.\n2. Align test modules with the modules to be tested.\n\n   For example, tests for the `iptvtools.cli` module should be located in the file `tests/cli_test.py`. If there are too many test cases, they can be split into files within the `tests/cli/` directory, using a prefix for each test file.\n3. Unless necessary, do not lower the threshold of the test coverage.\n\n## Coverage Report\n\nAfter running the tests, the coverage report will be printed on the screen and generated as part of the documentation. You can view it [here](/reports/coverage/index.md).\n"
  },
  {
    "path": "docs/index.md",
    "content": "# Welcome to IPTVTools's documentation\n\n```{toctree}\n:hidden:\nOverview <self>\nusage/index\nmanagement/index\ndevelopment/index\nadvanced/index\ncli/index\napi/index\nreports/index\nChangelog <https://github.com/huxuan/iptvtools/releases>\n```\n\n```{include} ../README.md\n:start-line: 1\n```\n\n## 🔖 Indices and tables\n\n* {ref}`genindex`\n* {ref}`modindex`\n* {ref}`search`\n"
  },
  {
    "path": "docs/management/index.md",
    "content": "# Project Management\n\nThis section is designed for project maintainers and covers essential tasks for managing your project. Follow these guidelines to ensure your project remains up-to-date and adheres to best practices.\n\n```{toctree}\ninit\nsettings\nupdate\nrelease\n```\n"
  },
  {
    "path": "docs/management/init.md",
    "content": "# Project Initialization\n\n## Prerequisites\n\n[pipx](https://pipx.pypa.io/) is required to manage the standalone tools used across the development lifecycle.\nPlease refer to pipx's installation instructions [here](https://pipx.pypa.io/stable/installation/).\nOnce pipx is set up, install the copier for project generation using the following command:\n\n```bash\npipx install copier==9.4.1\n```\n\n## Create the Repository\n\nCreate a blank Git repository on the hosting platform. Clone it locally and navigate to the root directory:\n\n```bash\ngit clone git@github.com:huxuan/iptvtools.git\ncd iptvtools\n```\n\n## Generate the Project\n\nRunning the following command and answer the prompts to set up the project:\n\n```bash\ncopier copy gh:serious-scaffold/ss-python .\n```\n\n## Set Up Development Environment\n\nSet up development environment to prepare for the initial commit:\n\n```bash\nmake dev\n```\n\n## Commit and push\n\n```bash\ngit add .\ngit commit -m \"chore: init from serious-scaffold-python\"\nSKIP=no-commit-to-branch git push\n```\n\nNow, everything is done!\n"
  },
  {
    "path": "docs/management/release.md",
    "content": "# Release Process\n\nWith the integration of [semantic-release](https://github.com/semantic-release/semantic-release), the release process is fully automated. To enable this, follow the settings for <project:/management/settings.md#renovate-and-semantic-release>. Besides, adhering to the <project:/development/commit.md#commit-message-pattern> is strongly recommended to ensure the release process works as expected.\n\n## Release Configuration\n\nThe release configuration is located in the root directory of the project:\n\n```{literalinclude} ../../.releaserc.json\n```\n\nBased on this configuration, the following trigger rules apply:\n\n* A **major** release is triggered by a 'BREAKING CHANGE' or 'BREAKING-CHANGE' in the footer or has a `major-release` scope.\n* A **minor** release is triggered when the commit type is `feat` or has a `minor-release` scope.\n* A **patch** release is triggered when the commit type is `fix`, `perf`, `refactor` or `revert` or has a `patch-release` scope.\n* No release is triggered if the commit type is any other type or has a `no-release` scope.\n\n## Commit message examples\n\n### Major release\n\n* ```text\n  feat: drop Python 3.8 support\n\n  BREAKING CHANGE: drop Python 3.8 support\n  ```\n* `chore(major-release): a major release`\n\n### Minor release\n\n* `feat: add an awesome feature`\n* `chore(minor-release): a minor release`\n\n### Patch release\n\n* `fix: fix a silly bug`\n* `perf: performance improvement for the core`\n* `refactor: refactor the base module`\n* `revert: revert a buggy implementation`\n* `chore(patch-release): a patch release`\n\n### No release\n\n* `feat(no-release): a feature that should not trigger a release`\n* `fix(no-release,core): a fix that should not trigger a release, but with more scopes`\n\n## Release Tasks\n\nThe release process includes the following tasks:\n\n::::{tab-set}\n\n:::{tab-item} GitHub\n:sync: github\n\n1. Generate a changelog from unreleased commits.\n1. Publish a new GitHub Release and semantic version tag.\n1. Build and publish the documentation to GitHub Pages.\n1. Build and publish the Python package to the configured package repository.\n1. Build and publish the Development and Production Containers with the build cache to GitHub Packages.\n    1. The Production Container is tagged as `ghcr.io/huxuan/iptvtools:py<PYTHON_VERSION>` for the latest version and `ghcr.io/huxuan/iptvtools:py<PYTHON_VERSION>-<PROJECT_VERSION>` for archives.\n    1. The Development Container is tagged as `ghcr.io/huxuan/iptvtools/dev:py<PYTHON_VERSION>` for the latest version and `ghcr.io/huxuan/iptvtools/dev:py<PYTHON_VERSION>-<PROJECT_VERSION>` for archives.\n    1. The build cache for the Development Container is tagged as `ghcr.io/huxuan/iptvtools/dev-cache:py<PYTHON_VERSION>`.\n\n:::\n\n:::{tab-item} GitLab\n:sync: gitlab\n\n1. Generate a changelog from unreleased commits.\n1. Publish a new GitLab Release and semantic version tag.\n1. Build and publish the documentation to GitLab Pages.\n1. Build and publish the Python package to the configured package repository.\n1. Build and publish the Development and Production Containers with build cache to GitLab Container Registry.\n    1. The Production Container is tagged as `registry.gitlab.com/huxuan/iptvtools:py<PYTHON_VERSION>` for the latest version and `registry.gitlab.com/huxuan/iptvtools:py<PYTHON_VERSION>-<PROJECT_VERSION>` for archives.\n    1. The Development Container is tagged as `registry.gitlab.com/huxuan/iptvtools/dev:py<PYTHON_VERSION>` for the latest version and `registry.gitlab.com/huxuan/iptvtools/dev:py<PYTHON_VERSION>-<PROJECT_VERSION>` for archives.\n    1. The build cache for the Development Container is tagged as `registry.gitlab.com/huxuan/iptvtools/dev-cache:py<PYTHON_VERSION>`.\n\n:::\n\n::::\n"
  },
  {
    "path": "docs/management/settings.md",
    "content": "# Repository Settings\n\nThere are several settings to utilize the features provided by the project template. Although some of them are not strictly required, it is highly recommended finish these one-time jobs so as to benefit on the whole development lifecycle.\n\n## Branch protection\n\n::::{tab-set}\n\n:::{tab-item} GitHub\n:sync: github\n\n1. Navigate to the [Branch protection rules](https://github.com/huxuan/iptvtools/settings/branches) settings.\n1. Ensure a rule for the default `main` branch.\n1. Enable **Require a pull request before merging** with **Require approvals** and **Dismiss stale pull request approvals when new commits are pushed** enabled.\n1. Enable **Require status checks to pass before merging** and set [ci](https://github.com/huxuan/iptvtools/actions/workflows/ci.yml) and [commitlint](https://github.com/huxuan/iptvtools/actions/workflows/commitlint.yml) as required status checks.\n\n:::\n\n:::{tab-item} GitLab\n:sync: gitlab\n\n1. Navigate to the [Repository](https://gitlab.com/huxuan/iptvtools/-/settings/repository) settings and the **Protected branches** section.\n1. Ensure the default `main` branch is protected with **Maintainers** for **Allowed to merge**, **No one** for **Allowed to push and merge** and **Allowed to force push** disabled.\n\n:::\n::::\n\n## Tag protection\n\n::::{tab-set}\n\n:::{tab-item} GitHub\n:sync: github\n\n1. Navigate to the [Protected tags](https://github.com/huxuan/iptvtools/settings/tag_protection) settings.\n1. Create a rule for tag name pattern `v*`.\n\n:::\n\n:::{tab-item} GitLab\n:sync: gitlab\n\n1. Navigate to the [Repository](https://gitlab.com/huxuan/iptvtools/-/settings/repository) settings and the **Protected tags** section.\n1. Add a rule with wildcard `v*` for **Tag** and **Maintainers** for **Allowed to create**.\n\n:::\n::::\n\n## Squash merge\n\n::::{tab-set}\n\n:::{tab-item} GitHub\n:sync: github\n\n1. Navigate to the [General](https://github.com/huxuan/iptvtools/settings) settings and the **Pull Requests** section.\n1. Disable **Allow merge commits** and **Allow rebase merging**.\n1. Enable **Allow squash merging** and set **Pull request title** as **Default commit message**.\n\n:::\n\n:::{tab-item} GitLab\n:sync: gitlab\n\n1. Navigate to the [Merge requests](https://gitlab.com/huxuan/iptvtools/-/settings/merge_requests) settings.\n1. Set **Fast-forward merge** for the **Merge method**.\n1. Set **Require** for the **Squash commits when merging**.\n1. Enable **Pipelines must succeed** in the **Merge checks**.\n\n:::\n::::\n\n## Pages\n\n::::{tab-set}\n\n:::{tab-item} GitHub\n:sync: github\n\n1. Navigate to the [GitHub Pages](https://github.com/huxuan/iptvtools/settings/pages) settings.\n1. Set **GitHub Actions** as **Source**.\n\n:::\n\n:::{tab-item} GitLab\n:sync: gitlab\n\nNothing need to do for GitLab Pages.\n\n:::\n::::\n\n## Package publish\n\n::::{tab-set}\n\n:::{tab-item} GitHub\n:sync: github\n\n1. Navigate to the [Actions secrets and variables](https://github.com/huxuan/iptvtools/settings/secrets/actions) settings.\n1. Set the **variable** `PDM_PUBLISH_REPO`, the repository (package index) URL to upload the package which defaults to `https://pypi.org`, the official PyPI.\n1. Set the **variable** `PDM_PUBLISH_USERNAME`, the username to authenticate to the repository (package index) which defaults to `__token__`, used for [API token](https://pypi.org/help/#apitoken).\n1. Set the **secret** `PDM_PUBLISH_PASSWORD`, the password to authenticate to the repository (package index).\n\n:::\n\n:::{tab-item} GitLab\n:sync: gitlab\n\n1. Navigate to the [CI/CD](https://gitlab.com/huxuan/iptvtools/-/settings/ci_cd) settings and the **Variables** section.\n1. Set the variable `PDM_PUBLISH_REPO`, the repository (package index) URL to upload the package, default to `https://pypi.org`, the official PyPI.\n1. Set the variable `PDM_PUBLISH_USERNAME`, the username to authenticate to the repository (package index), default to `__token__`, used for [API token](https://pypi.org/help/#apitoken).\n1. Set the variable `PDM_PUBLISH_PASSWORD` with the **Mask variable** option for security, the password to authenticate to the repository (package index).\n\n:::\n::::\n\n## Renovate and semantic-release\n\n::::::{tab-set}\n\n:::::{tab-item} GitHub\n:sync: github\n\nThere are two approaches, either with GitHub App or with personal access token (classic). GitHub App is the more recommended way to avoid the issues and pull requests tied to a particular user.\n\n::::{tab-set}\n\n:::{tab-item} GitHub App\n\n  1. [Register a GitHub App](https://docs.github.com/en/apps/creating-github-apps/registering-a-github-app/registering-a-github-app) with permission listed [here](https://docs.renovatebot.com/modules/platform/github/#running-as-a-github-app) and `Repository administration: write` permission as mentioned [here](https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/managing-repository-settings/configuring-tag-protection-rules#about-tag-protection-rules).\n  1. [Generate a private key](https://docs.github.com/en/apps/creating-github-apps/authenticating-with-a-github-app/managing-private-keys-for-github-apps#generating-private-keys), and download the private key as a `.pem` file.\n  1. Navigate to the [Actions secrets and variables](https://github.com/huxuan/iptvtools/settings/secrets/actions) settings.\n  1. Set **App ID** of the GitHub App as **variable** `BOT_APP_ID`.\n  1. Set the content of the private key as **secret** `BOT_PRIVATE_KEY`.\n\n:::\n\n:::{tab-item} personal access token (classic)\n\n1. [Create a personal access token (classic)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) with **workflow** scope.\n1. Navigate to the [Actions secrets and variables](https://github.com/huxuan/iptvtools/settings/secrets/actions) settings and set the token as a **secret** `PAT`.\n\n:::\n::::\n\n```{note}\nYou can set the scope of the variables and secrets to **Repository** or **Organization** according to actual requirements.\n```\n\n:::::\n\n:::::{tab-item} GitLab\n:sync: gitlab\n\nEither [Group access tokens](https://docs.gitlab.com/ee/user/group/settings/group_access_tokens.html), [Project access tokens](https://docs.gitlab.com/ee/user/project/settings/project_access_tokens.html) or [Personal access tokens](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html) can be used. The group or project access tokens are more recommended to avoid the issues and merge requests tied to particular user.\n\n1. Create a [group access token](https://gitlab.com/groups/huxuan/-/settings/access_tokens), [project access token](https://gitlab.com/huxuan/iptvtools/-/settings/access_tokens) or [personal access token](https://gitlab.com/-/user_settings/personal_access_tokens) with `Maintainer` role and `api, write_repository` scope.\n1. Navigate to the [CI/CD](https://gitlab.com/huxuan/iptvtools/-/settings/ci_cd) settings and the **Variables** section. Set the token as variable `PAT` with the **Mask variable** option for security.\n1. Navigate to the [Pipeline schedules](https://gitlab.com/huxuan/iptvtools/-/pipeline_schedules). Create a new schedule with `*/15 0-3 * * 1` as **Interval Pattern** and mark it as **Activated**.\n\n```{note}\nAlthough optional, [creating a personal access token (classic)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) on **GitHub** is strongly recommended. This token only needs `read-only` access and will increase the rate limit for Renovate to fetch dependencies and changelogs from github.com. It can be from any account and should be set as the variable `GITHUB_COM_TOKEN` with the **Mask variable** option for security. For more information on setting this up, see [Renovate's documentation](https://docs.renovatebot.com/getting-started/running/#githubcom-token-for-changelogs).\n```\n\n:::::\n::::::\n"
  },
  {
    "path": "docs/management/update.md",
    "content": "# Template and Dependency Update\n\n## Template update\n\nTo update the project template, thanks to the [update feature](https://copier.readthedocs.io/en/stable/updating/) provided by [Copier](https://github.com/copier-org/copier) and the [regex manager](https://docs.renovatebot.com/modules/manager/regex/) provided by Renovate, a pull request will be automatically created when a new version of the template is released. In most cases, Copier will update the project seamlessly. If conflicts arise, they can be resolved manually since everything is version-controlled by Git.\n\n### Tips to minimize potential conflicts\n\nTo minimize potential conflicts, consider the following suggestions:\n\n1. Avoid modifying the auto-generated files unless necessary.\n1. For template-related changes, consider proposing an issue or a pull request to the [project template repository](http://github.com/serious-scaffold/ss-python) directly.\n1. For project-specific changes, adopt an inheritance or extension approach to minimize modifications to auto-generated content.\n\n## Dependency update\n\nWith the integration of [Renovate](https://github.com/renovatebot/renovate), all dependencies, including those used for development and CI/CD, will be automatically updated via pull requests whenever a new version is released. This allows us to focus solely on testing to ensure the new versions do not break anything. Moreover, an issue titled \"Dependency Dashboard\" will be created, so that you can have an overview of the state of all dependencies.\n\n### Managed dependency types\n\nThe project template tracks the following dependencies:\n\n1. Supported managers other than `regex`:\n   1. [pep621](https://docs.renovatebot.com/modules/manager/pep621/): The lock file generated by PDM for both dependencies and development dependencies in `pyproject.toml`.\n   1. [github-actions](https://docs.renovatebot.com/modules/manager/github-actions/): Actions, runners and containers in GitHub Actions.\n   1. [gitlabci](https://docs.renovatebot.com/modules/manager/gitlabci/): Containers in GitLab CI/CD.\n   1. [pre-commit](https://docs.renovatebot.com/modules/manager/pre-commit/): Pre-commit hooks.\n1. Regex manager:\n   1. Python packages installed with pip/pipx, listed in the README, DevContainer Dockerfile, GitHub Actions, GitLab CI/CD, ReadTheDocs configuration, Renovate configuration and documentation.\n   1. Debian packages installed in the DevContainer Dockerfile.\n   1. PDM version specified in the `pdm-project/setup-pdm` GitHub action.\n   1. PDM version specified in the renovate constraints.\n   1. NPM packages used with npx.\n   1. The project template itself.\n\n### Add new dependencies\n\nWhen adding new dependencies that belong to the managed dependency type mentioned above, it is recommended to pin or lock their versions to ensure they are smoothly managed by Renovate.\n\nWhen adding new types of dependencies, it is also recommended to manage them with Renovate.\n\n- If this follows a common pattern, consider creating an issue or even sending a pull request to project template directly.\n- If it is project-specific, you can extend the renovate configuration:\n  - For supported managers other than `regex`, add them in the Renovate configuration using environment variable `RENOVATE_ENABLED_MANAGERS` in GitHub Actions or GitLab CI/CD and configure them in the `renovaterc.json` under the root directory if needed.\n  - For `regex` managers, add new entries in the `customManagers` and configure `packageRules` if needed in the `.renovaterc.json`.\n\n  ```{note}\n  This also adheres to the <project:#tips-to-minimize-potential-conflicts>.\n  ```\n\n```{note}\nFor the complete list of supported managers and their corresponding configurations, please refer to the [Managers - Renovate Docs](https://docs.renovatebot.com/modules/manager/).\n```\n"
  },
  {
    "path": "docs/reports/coverage/index.md",
    "content": "# Coverage Reports\n\n<!-- placeholder for generated coverage reports -->\n"
  },
  {
    "path": "docs/reports/index.md",
    "content": "# Code Quality Reports\n\n```{toctree}\n:maxdepth: 2\nmypy/index\ncoverage/index\n```\n"
  },
  {
    "path": "docs/reports/mypy/index.md",
    "content": "# MyPy Reports\n\n<!-- placeholder for generated mypy reports -->\n"
  },
  {
    "path": "docs/usage/filter.md",
    "content": "# Filter\n\n## Reference\n\n<project:/cli/filter.md>\n\n## Example\n\nThere is a [well-maintained IPTV list](https://gist.github.com/sdhzdmzzl/93cf74947770066743fff7c7f4fc5820) only for Beijing Unicom and a [well-maintained templates & EPG](http://epg.51zmt.top:8000/) mainly for China. So for me::\n\n```bash\n$ iptvtools-cli filter \\\n-i https://gist.githubusercontent.com/sdhzdmzzl/93cf74947770066743fff7c7f4fc5820/raw/11107d2dcfe2f5785e7ada94bb44c0cd349191c5/bj-unicom-iptv.m3u \\\n-t http://epg.51zmt.top:8000/test.m3u\n```\n\nWith UDPXY, it becomes::\n\n```bash\n$ iptvtools-cli filter \\\n-i https://gist.githubusercontent.com/sdhzdmzzl/93cf74947770066743fff7c7f4fc5820/raw/11107d2dcfe2f5785e7ada94bb44c0cd349191c5/bj-unicom-iptv.m3u \\\n-t http://epg.51zmt.top:8000/test.m3u \\\n-u http://192.168.0.1:8888\n```\n\nJust replace `http://192.168.0.1:8888` with corresponding UDPXY prefix should be OK.\n\n## Selected Parameters\n\nHere is some further explanation for those not so obvious parameters.\n\n### GROUP_EXCLUDE\n\nFilter the playlist depends on the group title with a blacklist (regular expression).\nNote that, it has higher priority than the whitelist ``GROUP_INCLUDE``.\n\n### GROUP_INCLUDE\n\nFilter the playlist depends on the group title with a whitelist (regular expression).\nNote that, if set, only groups match the pattern will be included.\n\n### CHANNEL_EXCLUDE\n\nFilter the playlist depends on the channel title by a blacklist (regular expression).\nNote that, it has higher priority than the whitelist ``CHANNEL_INCLUDE``.\n\n### CHANNEL_INCLUDE\n\nFilter the playlist depends on the channel title by a whitelist (regular expression).\nNote that, if set, only channels match the pattern will be included.\n\n### MIN_HEIGHT\n\nHEIGHT is a dominant factor of stream quality,\nwhere 1080 in height means 1080p.\nIt is necessary to set this filter\nif the stream is supposed to be shown on high resolution screens,\ne.g., a 4K TV.\n\n### CONFIG\n\n[CONFIG](https://github.com/huxuan/iptvtools/blob/master/config.json)\nis a customized configuration to unify ``title`` and ``id``.\n``title`` is the exact title which will be shown and\nthe ``id`` is used for potential match with the template.\nA general idea is to make the ``id`` as simple as possible\nso they will have a high possibility to match,\nthough there might be some false positive cases.\nSo, ``id_unifiers`` can be treated as\na further simplification of ``title_unifier``.\n\nFor example, entry ``\"-\": \"\"`` will convert ``CCTV-1`` to ``CCTV1``,\nentry ``\"＋\": \"+\"`` will convert ``CCTV-5＋`` to ``CCTV-5+``.\nA whole replacement is also possible,\nas ``\"BTV冬奥纪实\": \"北京纪实\"`` will\nmatch the whole of ``BTV冬奥纪实`` and\nreplace it with ``北京纪实``.\n\nPlease be caution about using too many of them\nsince this simplified strategy is just for some basic requirement.\nSome entries may lead to some unexpected changes.\nFor example, entry ``\"CCTV-1\": \"中央1套\"`` will convert ``CCTV-11`` to ``中央1套1``.\nSo, in generally,\nonly keep those necessary entries and keep it as simple as possible.\n\n### SORT_KEYS\n\nList of keys to sort the channels. Valid options currently supported are\n`tvg-id`, `height` and `title`. By default, it will work the same as\n`-s tvg-id resolution title`, and you can change the order as you want.\nIf you want to have more keys to be supported, just let me know.\n\n### TEMPLATES\n\nA m3u playlist with well-maintained information to cooperate with EPG.\nPlease refer to [Well‐maintained templates & EPGs](https://github.com/huxuan/iptvtools/wiki/Well%E2%80%90maintained-templates-&-EPGs).\n\nBTW, there is also a list [Well‐maintained playlists](https://github.com/huxuan/iptvtools/wiki/Well%E2%80%90maintained-playlists).\n\n### TIMEOUT\n\nTIMEOUT is used to check the connectivity.\nDirect check which only fetch the response header tends to be fast.\nBut it usually takes seconds to probe stream information\ndepends on your network (bandwidth and latency).\nFor me, it is about 3 to 5 seconds.\n\n### UDPXY\n\nIf the IPTV streams is forwarded by UDPXY,\nsetting it will convert all the urls automatically.\nFor examples, with UDPXY `http://192.168.0.1:8888/`,\n`rtp://123.45.67.89:1234` will be converted to\n`http://192.168.0.1:8888/rtp/123.45.67.89:1234`.\n\n### SKIP_CONNECTIVITY_CHECK\n\nSkip any connectivity check (to be used to just apply title and id unifiers)\nuse in combination with `-I 0`\n"
  },
  {
    "path": "docs/usage/index.md",
    "content": "# Usage\n\n```{toctree}\nfilter\n```\n"
  },
  {
    "path": "pyproject.toml",
    "content": "[build-system]\nbuild-backend = \"setuptools.build_meta\"\nrequires = [\n    \"setuptools==75.8.0\",\n    \"setuptools-scm==8.2.0\",\n]\n\n[project]\nname = \"iptvtools\"\ndescription = \"A set of scripts that help to better IPTV experience.\"\nreadme = \"README.md\"\nkeywords = [\n    \"iptvtools\",\n    \"iptvtools-cli\",\n    \"m3u filter\",\n    \"serious-scaffold\",\n]\nlicense = { text = \"MIT\" }\nauthors = [\n    { email = \"i@huxuan.org\", name = \"huxuan\" },\n]\nrequires-python = \">=3.10\"\nclassifiers = [\n    \"Development Status :: 4 - Beta\",\n    \"License :: OSI Approved :: MIT License\",\n    \"Operating System :: MacOS :: MacOS X\",\n    \"Operating System :: POSIX :: Linux\",\n    \"Programming Language :: Python :: 3 :: Only\",\n    \"Programming Language :: Python :: 3.10\",\n    \"Programming Language :: Python :: 3.11\",\n    \"Programming Language :: Python :: 3.12\",\n    \"Programming Language :: Python :: 3.13\",\n]\ndynamic = [\n    \"version\",\n]\ndependencies = [\n    \"click>=8.1.8\",\n    \"pydantic-settings>=2.7.1\",\n    \"requests>=2.32.3\",\n    \"tqdm>=4.67.1\",\n]\nurls.documentation = \"https://huxuan.github.io/iptvtools\"\nurls.issue = \"https://github.com/huxuan/iptvtools/issues\"\nurls.repository = \"https://github.com/huxuan/iptvtools\"\nscripts.iptvtools-cli = \"iptvtools.cli:cli\"\n\n[dependency-groups]\ntest = [\n    \"coverage>=7.6.10\",\n    \"pytest>=8.3.4\",\n]\ndoc = [\n    \"autodoc-pydantic>=2.2.0\",\n    \"coverage>=7.6.10\",\n    \"furo>=2024.8.6\",\n    \"mypy[reports]>=1.14.1\",\n    \"myst-parser>=4.0.0\",\n    \"pytest>=8.3.4\",\n    \"sphinx>=8.1.3\",\n    \"sphinx-click>=6.0.0\",\n    \"sphinx-design>=0.6.1\",\n    \"types-requests>=2.32.0.20241016\",\n    \"types-tqdm>=4.67.0.20241221\",\n]\nlint = [\n    \"mypy>=1.14.1\",\n    \"types-requests>=2.32.0.20241016\",\n    \"types-tqdm>=4.67.0.20241221\",\n]\n\n[tool.pdm]\ndistribution = true\n\n[tool.setuptools_scm]\nfallback_version = \"0.0.0\"\n\n[tool.ruff]\nsrc = [\n    \"src\",\n]\nfix = true\nlint.select = [\n    \"B\",      # flake8-bugbear\n    \"D\",      # pydocstyle\n    \"E\",      # pycodestyle error\n    \"F\",      # Pyflakes\n    \"I\",      # isort\n    \"RUF100\", # Unused noqa directive\n    \"S\",      # flake8-bandit\n    \"SIM\",    # flake8-simplify\n    \"UP\",     # pyupgrade\n    \"W\",      # pycodestyle warning\n]\nlint.per-file-ignores.\"tests/*\" = [\n    \"S101\",\n]\nlint.pydocstyle.convention = \"google\"\n\n[tool.codespell]\nwrite-changes = true\ncheck-filenames = true\n\n[tool.pyproject-fmt]\nindent = 4\nkeep_full_version = true\nmax_supported_python = \"3.13\"\n\n[tool.pytest.ini_options]\naddopts = \"-l -s --durations=0\"\nlog_cli = true\nlog_cli_level = \"info\"\nlog_date_format = \"%Y-%m-%d %H:%M:%S\"\nlog_format = \"%(asctime)s %(levelname)s %(message)s\"\nminversion = \"6.0\"\n\n[tool.coverage.report]\nfail_under = 0\n\n[tool.coverage.run]\nsource = [\n    \"iptvtools\",\n]\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_any_unimported = true\ndisallow_untyped_defs = true\nenable_error_code = [\n    \"ignore-without-code\",\n]\nexclude = [\n    \"build\",\n    \"doc\",\n]\nno_implicit_optional = true\nshow_error_codes = true\nwarn_return_any = true\nwarn_unused_ignores = true\n"
  },
  {
    "path": "scripts/generate-coverage-badge.sh",
    "content": "#!/bin/bash\n\nTOTAL_COVERAGE=$(coverage report --format=total)\nCOLOR=\"#9f9f9f\"\n\nif [ \"$TOTAL_COVERAGE\" -gt 95 ]; then\n    COLOR=\"#4c1\"\nelif [ \"$TOTAL_COVERAGE\" -gt 90 ]; then\n    COLOR=\"#a3c51c\"\nelif [ \"$TOTAL_COVERAGE\" -gt 75 ]; then\n    COLOR=\"#dfb317\"\nelif [ \"$TOTAL_COVERAGE\" -gt 0 ]; then\n    COLOR=\"#e05d44\"\nfi\n\nCOVERAGE_JSON_DIR=${1:-.}\nmkdir -p \"$COVERAGE_JSON_DIR\"\n\ncat << EOF > \"${COVERAGE_JSON_DIR}/coverage.json\"\n{\n  \"schemaVersion\": 1,\n  \"label\": \"coverage\",\n  \"message\": \"${TOTAL_COVERAGE}%\",\n  \"color\": \"${COLOR}\"\n}\nEOF\n"
  },
  {
    "path": "src/iptvtools/__init__.py",
    "content": "\"\"\"Init for the project.\"\"\"\n"
  },
  {
    "path": "src/iptvtools/cli.py",
    "content": "\"\"\"Command Line Interface.\"\"\"\n\nimport logging\nimport shutil\n\nimport click\n\nfrom iptvtools import exceptions\nfrom iptvtools.config import Config\nfrom iptvtools.constants import defaults, helps\nfrom iptvtools.models import Playlist\n\n\n@click.group(\n    context_settings={\"show_default\": True},\n)\n@click.version_option()\ndef cli() -> None:\n    \"\"\"CLI for IPTVTools.\"\"\"\n\n\n@cli.command()\n@click.option(\"--channel-exclude\", help=helps.CHANNEL_EXCLUDE)\n@click.option(\"--channel-include\", help=helps.CHANNEL_INCLUDE)\n@click.option(\"--group-exclude\", help=helps.GROUP_EXCLUDE)\n@click.option(\"--group-include\", help=helps.GROUP_INCLUDE)\n@click.option(\n    \"--max-height\", default=defaults.MAX_HEIGHT, type=int, help=helps.MAX_HEIGHT\n)\n@click.option(\n    \"--min-height\", default=defaults.MIN_HEIGHT, type=int, help=helps.MIN_HEIGHT\n)\n@click.option(\"-c\", \"--config\", default=defaults.CONFIG, help=helps.CONFIG)\n@click.option(\n    \"-i\", \"--inputs\", multiple=True, default=defaults.INPUTS, help=helps.INPUTS\n)\n@click.option(\n    \"-I\", \"--interval\", default=defaults.INTERVAL, type=int, help=helps.INTERVAL\n)\n@click.option(\"-L\", \"--log-level\", default=defaults.LOG_LEVEL, help=helps.LOG_LEVEL)\n@click.option(\n    \"-n\",\n    \"--skip-connectivity-check\",\n    is_flag=True,\n    help=helps.SKIP_CONNECTIVITY_CHECK,\n)\n@click.option(\"-o\", \"--output\", default=defaults.OUTPUT, help=helps.OUTPUT)\n@click.option(\n    \"-r\",\n    \"--replace-group-by-source\",\n    is_flag=True,\n    help=helps.REPLACE_GROUP_BY_SOURCE,\n)\n@click.option(\n    \"-R\",\n    \"--resolution-on-title\",\n    is_flag=True,\n    help=helps.RESOLUTION_ON_TITLE,\n)\n@click.option(\n    \"-s\", \"--sort-keys\", multiple=True, default=defaults.SORT_KEYS, help=helps.SORT_KEYS\n)\n@click.option(\n    \"-t\", \"--templates\", multiple=True, default=defaults.TEMPLATES, help=helps.TEMPLATES\n)\n@click.option(\"-T\", \"--timeout\", default=defaults.TIMEOUT, type=int, help=helps.TIMEOUT)\n@click.option(\"-u\", \"--udpxy\", default=defaults.UDPXY, help=helps.UDPXY)\ndef filter(\n    channel_exclude: str,\n    channel_include: str,\n    group_exclude: str,\n    group_include: str,\n    max_height: int,\n    min_height: int,\n    config: str,\n    inputs: list[str],\n    interval: int,\n    log_level: str,\n    skip_connectivity_check: bool,\n    output: str,\n    replace_group_by_source: bool,\n    resolution_on_title: bool,\n    sort_keys: list[str],\n    templates: list[str],\n    timeout: int,\n    udpxy: str,\n) -> None:\n    \"\"\"Filter m3u playlists.\"\"\"\n    logging.basicConfig(level=log_level.upper())\n\n    if (max_height or min_height or resolution_on_title) and shutil.which(\n        \"ffprobe\"\n    ) is None:\n        raise exceptions.FFmpegNotInstalledError()\n\n    Config.init(config)\n    playlist = Playlist(\n        channel_exclude,\n        channel_include,\n        group_exclude,\n        group_include,\n        max_height,\n        min_height,\n        inputs,\n        interval,\n        skip_connectivity_check,\n        output,\n        replace_group_by_source,\n        resolution_on_title,\n        sort_keys,\n        templates,\n        timeout,\n        udpxy,\n    )\n    playlist.parse()\n    playlist.filter()\n    playlist.export()\n    if playlist.inaccessible_urls:\n        logging.info(\"Inaccessible Urls:\")\n        logging.info(\"\\n\".join(sorted(playlist.inaccessible_urls)))\n    if playlist.low_res_urls:\n        logging.info(\"Low resolution Urls:\")\n        logging.info(\"\\n\".join(sorted(playlist.low_res_urls)))\n    if playlist.high_res_urls:\n        logging.info(\"High resolution Urls:\")\n        logging.info(\"\\n\".join(sorted(playlist.high_res_urls)))\n"
  },
  {
    "path": "src/iptvtools/config.py",
    "content": "#!/usr/bin/env python\n\"\"\"Configuration for iptvtools.\n\nFile: config.py\nAuthor: huxuan\nEmail: i(at)huxuan.org\n\"\"\"\n\nimport json\nimport os\nimport os.path\nfrom pathlib import Path\nfrom typing import Any\n\n\nclass MetaConfig(type):\n    \"\"\"Configuration for iptvtools.\"\"\"\n\n    config: dict[str, Any] = {}\n\n    @classmethod\n    def init(cls, config_file: str | Path) -> None:\n        \"\"\"Initialize configuration.\"\"\"\n        if os.path.isfile(config_file):\n            with open(config_file) as fin:\n                cls.config = json.load(fin)\n\n    def __getattr__(cls, key: str) -> Any:\n        \"\"\"Get configuration with key.\"\"\"\n        return cls.config.get(key, {})\n\n\nclass Config(metaclass=MetaConfig):  # pylint: disable=R0903\n    \"\"\"Configuration for iptvtools.\"\"\"\n"
  },
  {
    "path": "src/iptvtools/constants/__init__.py",
    "content": "#!/usr/bin/env python\n\"\"\"Constants for iptvtools.\n\nFile: __init__.py\nAuthor: huxuan\nEmail: i(at)huxuan.org\n\"\"\"\n"
  },
  {
    "path": "src/iptvtools/constants/defaults.py",
    "content": "#!/usr/bin/env python\n\"\"\"Defaults for iptvtools.\n\nFile: constants.py\nAuthor: huxuan\nEmail: i(at)huxuan.org\n\"\"\"\n\nCONFIG = \"config.json\"\nINPUTS = [\"https://iptv-org.github.io/iptv/index.m3u\"]\nINTERVAL = 1\nLOG_LEVEL = \"INFO\"\nMAX_HEIGHT = -1\nMIN_HEIGHT = 0\nOUTPUT = \"iptvtools.m3u\"\nSORT_KEYS = [\"group-title\", \"tvg-id\", \"height\", \"title\"]\nTEMPLATES: list[str] = []\nTIMEOUT = 10\nUDPXY = None\n"
  },
  {
    "path": "src/iptvtools/constants/helps.py",
    "content": "#!/usr/bin/env python\n\"\"\"Helps for iptvtools.\n\nFile: constants.py\nAuthor: huxuan\nEmail: i(at)huxuan.org\n\"\"\"\n\nCONFIG = \"Configuration file to unify title and id.\"\nCHANNEL_EXCLUDE = (\n    \"Channels to exclude with regex. \"\n    \"Note: Blacklist has higher priority than whitelist.\"\n)\nCHANNEL_INCLUDE = (\n    \"Channels to include with regex. \"\n    \"Note: Only channels in the whitelist will be included if set.\"\n)\nGROUP_EXCLUDE = (\n    \"Groups to exclude with regex.Note: Blacklist has higher priority than whitelist.\"\n)\nGROUP_INCLUDE = (\n    \"Groups to include with regex.\"\n    \"Note: Only groups in the whitelist will be included if set.\"\n)\nINPUTS = \"One or more input m3u playlist files/urls.\"\nINTERVAL = \"Interval in seconds between successive fetching requests.\"\nLOG_LEVEL = \"Log level.\"\nMAX_HEIGHT = \"Maximum height/resolution to accept, -1 means no resolution filtering.\"\nMIN_HEIGHT = \"Minimum height/resolution to accept, 0 means no resolution filtering.\"\nOUTPUT = \"Output file name.\"\nREPLACE_GROUP_BY_SOURCE = (\n    \"Flag to replace the group title with the source name, where the source \"\n    \"name is the basename of input files/urls without extension.\"\n)\nRESOLUTION_ON_TITLE = (\n    \"Flag to append resolution such as 8K, 4K, 1080p, 720p to the title.\"\n)\nSORT_KEYS = (\n    \"List of keys to sort the channels. Valid options currently supported \"\n    \"are `group-title`, `tvg-id`, `template-order`, `height` and `title`.\"\n)\nTEMPLATES = (\n    \"Template m3u files/urls with well-maintained channel information to \"\n    \"replace the matched entries.\"\n)\nTIMEOUT = \"Timeout threshold for fetching request.\"\nUDPXY = \"UDP Proxy for certain IPTV channels.\"\nSKIP_CONNECTIVITY_CHECK = \"Skip connectivity check.\"\n"
  },
  {
    "path": "src/iptvtools/constants/patterns.py",
    "content": "#!/usr/bin/env python\n\"\"\"Patterns for iptvtools.\n\nFile: constants.py\nAuthor: huxuan\nEmail: i(at)huxuan.org\n\"\"\"\n\nimport re\n\nPARAMS = re.compile(r'(\\S+)=\"(.*?)\"')\nEXTINF = re.compile(r\"^#EXTINF:(?P<duration>-?\\d+?) ?(?P<params>.*),(?P<title>.*?)$\")\nEXTM3U = re.compile(r\"^#EXTM3U ?(?P<params>.*)$\")\n"
  },
  {
    "path": "src/iptvtools/constants/tags.py",
    "content": "#!/usr/bin/env python\n\"\"\"Tags for iptvtools.\n\nFile: constants.py\nAuthor: huxuan\nEmail: i(at)huxuan.org\n\"\"\"\n\nM3U = \"#EXTM3U\"\nINF = \"#EXTINF\"\n"
  },
  {
    "path": "src/iptvtools/exceptions.py",
    "content": "#!/usr/bin/env python\n\"\"\"Custom exceptions for iptvtools.\n\nFile: exceptions.py\nAuthor: huxuan\nEmail: i(at)huxuan.org\n\"\"\"\n\n\nclass BaseCustomException(RuntimeError):\n    \"\"\"Base Custom Exception.\"\"\"\n\n\nclass FFmpegNotInstalledError(BaseCustomException):\n    \"\"\"Raise when FFmpeg is not installed.\"\"\"\n\n    def __init__(self) -> None:\n        \"\"\"Init for FfmpegNotInstalledError.\"\"\"\n        super().__init__(\n            \"Need `FFmpeg` for resolution related processing.\\n\"\n            \"Please install it according to \"\n            \"`https://www.ffmpeg.org/download.html`.\"\n        )\n"
  },
  {
    "path": "src/iptvtools/models.py",
    "content": "#!/usr/bin/env python\n\"\"\"Playlist which contains all the channels' information.\n\nFile: models.py\nAuthor: huxuan\nEmail: i(at)huxuan.org\n\"\"\"\n\nimport logging\nimport os.path\nimport random\nimport re\nimport sys\nimport time\nfrom typing import Any\n\nfrom tqdm import tqdm\n\nfrom iptvtools import parsers, utils\nfrom iptvtools.constants import defaults, tags\n\n\nclass Playlist:\n    \"\"\"Playlist model.\"\"\"\n\n    def __init__(\n        self,\n        channel_exclude: str,\n        channel_include: str,\n        group_exclude: str,\n        group_include: str,\n        max_height: int,\n        min_height: int,\n        inputs: list[str],\n        interval: int,\n        skip_connectivity_check: bool,\n        output: str,\n        replace_group_by_source: bool,\n        resolution_on_title: bool,\n        sort_keys: list[str],\n        templates: list[str],\n        timeout: int,\n        udpxy: str,\n    ) -> None:\n        \"\"\"Init for Playlist.\"\"\"\n        self.channel_exclude = channel_exclude\n        self.channel_include = channel_include\n        self.group_exclude = group_exclude\n        self.group_include = group_include\n        self.max_height = max_height\n        self.min_height = min_height\n        self.inputs = inputs\n        self.interval = interval\n        self.skip_connectivity_check = skip_connectivity_check\n        self.output = output\n        self.replace_group_by_source = replace_group_by_source\n        self.resolution_on_title = resolution_on_title\n        self.sort_keys = sort_keys\n        self.templates = templates\n        self.timeout = timeout\n        self.udpxy = udpxy\n        self.data: dict[str, Any] = {}\n        self.id_url: dict[str, Any] = {}\n        self.inaccessible_urls: set[str] = set()\n        self.low_res_urls: set[str] = set()\n        self.high_res_urls: set[str] = set()\n        self.tvg_url = None\n\n    def export(self) -> None:\n        \"\"\"Export playlist information.\"\"\"\n        res = []\n        res.append(tags.M3U)\n        if self.tvg_url is not None:\n            res[0] += f' x-tvg-url=\"{self.tvg_url}\"'\n        for url in sorted(self.data, key=self.__custom_sort):\n            if (\n                url in self.inaccessible_urls\n                or url in self.low_res_urls\n                or url in self.high_res_urls\n            ):\n                continue\n\n            entry = self.data[url]\n            params_dict = entry.get(\"params\", {})\n            if self.replace_group_by_source:\n                params_dict[\"group-title\"] = self.data[url][\"source\"]\n            params = \" \".join(\n                [f'{key}=\"{value}\"' for key, value in params_dict.items()]\n            )\n            duration = entry[\"duration\"]\n            title = entry[\"title\"]\n            if self.resolution_on_title:\n                height = self.data[url].get(\"height\")\n                title += f\" [{utils.height_to_resolution(height)}]\"\n\n            res.append(f\"{tags.INF}:{duration} {params},{title}\\n{url}\")\n\n        with open(self.output, \"w\", encoding=\"utf-8\") as f:\n            f.write(\"\\n\".join(res))\n\n    def parse(self) -> None:\n        \"\"\"Parse contents.\"\"\"\n        self._parse(self.inputs)\n        logging.debug(self.data)\n        self._parse(self.templates, is_template=True)\n        logging.debug(self.data)\n\n    def _parse(self, sources: list[str], is_template: bool = False) -> None:\n        \"\"\"Parse playlist sources.\"\"\"\n        template_order = 0\n        for source in sources:\n            source_name = os.path.splitext(os.path.basename(source))[0]\n            current_item = {}\n            skip = False\n            is_first_line = True\n            for line in parsers.parse_content_to_lines(source):\n                if not line:\n                    continue\n                if is_first_line:\n                    is_first_line = False\n                    if line.startswith(tags.M3U):\n                        res = parsers.parse_tag_m3u(line)\n                        if res.get(\"tvg-url\"):\n                            self.tvg_url = res.get(\"tvg-url\")\n                        continue\n                if skip:\n                    skip = False\n                    continue\n                if line.startswith(tags.INF):\n                    current_item = parsers.parse_tag_inf(line)\n                    current_item = utils.unify_title_and_id(current_item)\n                    current_id = current_item[\"id\"]\n\n                    params = current_item.get(\"params\", {})\n                    group = params.get(\"group-title\", \"\")\n                    if not skip and self.group_include:\n                        if re.search(self.group_include, group):\n                            logging.debug(f\"Group to include: `{group}`.\")\n                        else:\n                            skip = True\n                    if (\n                        not skip\n                        and self.group_exclude\n                        and re.search(self.group_exclude, group)\n                    ):\n                        skip = True\n                        logging.debug(f\"Group to exclude: `{group}`.\")\n\n                    title = current_item.get(\"title\", \"\")\n                    if not skip and self.channel_include:\n                        if re.search(self.channel_include, title):\n                            logging.debug(f\"Channel to include: `{title}`.\")\n                        else:\n                            skip = True\n                    if (\n                        not skip\n                        and self.channel_exclude\n                        and re.search(self.channel_exclude, title)\n                    ):\n                        skip = True\n                        logging.debug(f\"Channel to exclude: `{title}`.\")\n\n                else:\n                    if is_template:\n                        template_order = template_order + 1\n                        for url in self.id_url.get(current_id, []):\n                            current_params = current_item[\"params\"]\n                            current_params[\"template-order\"] = template_order\n                            self.data[url][\"params\"].update(current_params)\n                            self.data[url][\"title\"] = current_item[\"title\"]\n                    else:\n                        if self.udpxy:\n                            line = utils.convert_url_with_udpxy(line, self.udpxy)\n                        current_item[\"source\"] = source_name\n                        self.data[line] = current_item\n\n                        if current_id not in self.id_url:\n                            self.id_url[current_id] = []\n                        self.id_url[current_id].append(line)\n\n    def filter(self) -> None:\n        \"\"\"Filter process.\"\"\"\n        urls = list(self.data.keys())\n        random.shuffle(urls)\n        pbar = tqdm(urls, ascii=True)\n        for url in pbar:\n            status = \"OK\"\n            time.sleep(self.interval)\n            if self.skip_connectivity_check:\n                status = \"Skipped\"\n            elif self.max_height or self.min_height or self.resolution_on_title:\n                height = utils.check_stream(url, self.timeout)\n                if height == 0:\n                    self.inaccessible_urls.add(url)\n                    status = \"Inaccessible (0 height)\"\n                elif height < self.min_height:\n                    self.low_res_urls.add(url)\n                    status = \"Low Resolution\"\n                elif (\n                    self.max_height != defaults.MAX_HEIGHT and height > self.max_height\n                ):\n                    self.high_res_urls.add(url)\n                    status = \"High Resolution\"\n                self.data[url][\"height\"] = height\n            elif not utils.check_connectivity(url, self.timeout):\n                self.inaccessible_urls.add(url)\n                status = \"Inaccessible (No connectivity)\"\n            pbar.write(f\"{url}, {status}!\")\n\n    def __custom_sort(self, url: str) -> list[Any]:\n        \"\"\"Sort by tvg-id, resolution, template-order and title.\"\"\"\n        res = []\n        for key in self.sort_keys:\n            entry = self.data[url]\n            if key == \"height\":\n                res.append(-entry.get(key, 0))\n            elif key == \"title\":\n                res.append(entry.get(key, \"\"))\n            elif key == \"tvg-id\":\n                res.append(\n                    int(re.sub(r\"\\D\", \"\", entry[\"params\"].get(key, \"\")) or sys.maxsize)\n                )\n            elif key == \"template-order\":\n                res.append(int(entry[\"params\"].get(key) or sys.maxsize))\n            elif key == \"group-title\":\n                res.append(entry[\"params\"].get(key) or \"\")\n        return res\n"
  },
  {
    "path": "src/iptvtools/parsers.py",
    "content": "#!/usr/bin/env python\n\"\"\"Simplified parser for m3u8 file.\n\nFile: parser.py\nAuthor: huxuan\nEmail: i(at)huxuan.org\n\"\"\"\n\nimport os.path\nimport re\nimport tempfile\nfrom collections.abc import Iterator\nfrom typing import Any\n\nimport requests\n\nfrom iptvtools.constants import patterns\n\n\ndef parse_content_to_lines(content: str, timeout: int | None = None) -> Iterator[str]:\n    \"\"\"Universal interface to split content into lines.\"\"\"\n    if os.path.isfile(content):\n        with open(content, encoding=\"utf-8\") as fp:\n            for line in fp:\n                yield re.sub(r\"[^\\S ]+\", \"\", line.strip())\n    else:\n        with tempfile.TemporaryFile(mode=\"w+t\") as fp:\n            fp.write(requests.get(content, timeout=timeout).text)\n            fp.seek(0)\n            for line in fp:\n                yield re.sub(r\"[^\\S ]+\", \"\", line.strip())\n\n\ndef parse_tag_inf(line: str) -> dict[str, Any]:\n    \"\"\"Parse INF content.\"\"\"\n    match = patterns.EXTINF.fullmatch(line)\n    res = match and match.groupdict() or {}\n    if \"params\" in res:\n        res[\"params\"] = dict(patterns.PARAMS.findall(res[\"params\"]))\n    return res\n\n\ndef parse_tag_m3u(line: str) -> dict[str, Any]:\n    \"\"\"Parse M3U content.\"\"\"\n    match = patterns.EXTM3U.fullmatch(line)\n    return match and match.groupdict() or {}\n"
  },
  {
    "path": "src/iptvtools/py.typed",
    "content": ""
  },
  {
    "path": "src/iptvtools/settings.py",
    "content": "\"\"\"Settings Module.\"\"\"\n\nimport logging\nfrom logging import getLevelName\n\nfrom pydantic_settings import BaseSettings, SettingsConfigDict\n\n\nclass GlobalSettings(BaseSettings):\n    \"\"\"System level settings.\"\"\"\n\n    ci: bool = False\n    \"\"\"Indicator for whether or not in CI/CD environment.\"\"\"\n\n\nclass Settings(BaseSettings):\n    \"\"\"Project specific settings.\"\"\"\n\n    logging_level: str | None = getLevelName(logging.INFO)\n    \"\"\"Default logging level for the project.\"\"\"\n\n    model_config = SettingsConfigDict(\n        env_prefix=\"IPTVTOOLS_\",\n    )\n\n\n# NOTE(huxuan): `#:` style docstring is required for module attributes to satisfy both\n# autodoc [1] and `check-docstring-first` in `pre-commit` [2].\n# [1] https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#directive-autoattribute\n# [2] https://github.com/pre-commit/pre-commit-hooks/issues/159#issuecomment-559886109\n\n#: Instance for system level settings.\nglobal_settings = GlobalSettings()\n\n#: Instance for project specific settings.\nsettings = Settings()\n"
  },
  {
    "path": "src/iptvtools/utils.py",
    "content": "#!/usr/bin/env python\n\"\"\"Relevant Utilities.\n\nFile: utils.py\nAuthor: huxuan\nEmail: i(at)huxuan.org\n\"\"\"\n\nimport json\nimport logging\nimport socket\nimport struct\nfrom subprocess import (\n    PIPE,\n    Popen,\n    TimeoutExpired,\n)\nfrom typing import Any\nfrom urllib.parse import urlparse\n\nimport requests\n\nfrom iptvtools.config import Config\n\nPROBE_COMMAND = (\n    \"ffprobe -hide_banner -show_streams -select_streams v -of json=c=1 -v quiet\"\n)\n\nUDP_SCHEME = (\n    \"udp\",\n    \"rtp\",\n)\n\n\ndef convert_url_with_udpxy(orig_url: str, udpxy: str) -> str:\n    \"\"\"Convert url with udpxy.\"\"\"\n    parsed_url = urlparse(orig_url)\n    if parsed_url.scheme in UDP_SCHEME:\n        return f\"{udpxy}/{parsed_url.scheme}/{parsed_url.netloc}\"\n    return orig_url\n\n\ndef unify_title_and_id(item: dict[str, Any]) -> dict[str, Any]:\n    \"\"\"Unify title and id.\"\"\"\n    for title_unifier in sorted(Config.title_unifiers):\n        if title_unifier in item[\"title\"]:\n            item[\"title\"] = item[\"title\"].replace(\n                title_unifier, Config.title_unifiers[title_unifier]\n            )\n\n    if \"tvg-name\" in item.get(\"params\", {}):\n        item[\"id\"] = item[\"params\"][\"tvg-name\"]\n    else:\n        item[\"id\"] = item[\"title\"]\n\n    for id_unifier in sorted(Config.id_unifiers):\n        if id_unifier in item[\"id\"]:\n            item[\"id\"] = item[\"id\"].replace(id_unifier, Config.id_unifiers[id_unifier])\n\n    return item\n\n\ndef probe(url: str, timeout: int | None = None) -> Any:\n    \"\"\"Invoke probe to get stream information.\"\"\"\n    outs = None\n    with Popen(  # noqa: S603\n        f\"{PROBE_COMMAND} {url}\".split(), stdout=PIPE, stderr=PIPE\n    ) as proc:\n        try:\n            outs, _ = proc.communicate(timeout=timeout)\n        except TimeoutExpired:\n            proc.kill()\n    if outs:\n        try:\n            return json.loads(outs.decode(\"utf-8\"))\n        except json.JSONDecodeError as exc:\n            logging.error(exc)\n    return None\n\n\ndef check_stream(url: str, timeout: int | None = None) -> int:\n    \"\"\"Check stream information and return height.\"\"\"\n    stream_info = probe(url, timeout)\n    if stream_info and stream_info.get(\"streams\"):\n        return max([int(stream.get(\"height\", 0)) for stream in stream_info[\"streams\"]])\n    return 0\n\n\ndef check_connectivity(url: str, timeout: int | None = None) -> bool:\n    \"\"\"Check connectivity.\"\"\"\n    parsed_url = urlparse(url)\n    if parsed_url.scheme in UDP_SCHEME:\n        return check_udp_connectivity(parsed_url.netloc, timeout)\n    return check_http_connectivity(url, timeout)\n\n\ndef check_udp_connectivity(url: str, timeout: int | None = None) -> bool:\n    \"\"\"Check UDP connectivity.\"\"\"\n    ipaddr, port = url.rsplit(\":\", 1)\n    sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)\n    sock.settimeout(timeout)\n    sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n    sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)\n    sock.bind((\"\", int(port)))\n    mreq = struct.pack(\"4sl\", socket.inet_aton(ipaddr), socket.INADDR_ANY)\n    sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)\n    try:\n        if sock.recv(10240):\n            return True\n    except TimeoutError:\n        pass\n    return False\n\n\ndef check_http_connectivity(url: str, timeout: int | None = None) -> bool:\n    \"\"\"Check HTTP connectivity.\"\"\"\n    try:\n        return requests.get(url, timeout=timeout, stream=True).ok\n    except requests.RequestException:\n        return False\n\n\ndef height_to_resolution(height: int) -> str:\n    \"\"\"Convert height to resolution.\"\"\"\n    if not height:\n        return \"\"\n    if height >= 4320:\n        return \"8K\"\n    if height >= 2160:\n        return \"4K\"\n    if height >= 1080:\n        return \"1080p\"\n    if height >= 720:\n        return \"720p\"\n    return f\"{height}p\"\n"
  },
  {
    "path": "tests/__init__.py",
    "content": "\"\"\"Init for the test.\"\"\"\n"
  },
  {
    "path": "tests/cli_test.py",
    "content": "\"\"\"Test for cli.\"\"\"\n\nfrom click.testing import CliRunner\n\nfrom iptvtools.cli import cli\n\n\ndef test_cli() -> None:\n    \"\"\"Test for cli.\"\"\"\n    runner = CliRunner()\n    result = runner.invoke(cli)\n    assert result.exit_code == 0\n    assert \"Usage\" in result.output\n\n\ndef test_cli_filter_help() -> None:\n    \"\"\"Test the help for filter subcommand of the cli.\"\"\"\n    runner = CliRunner()\n    result = runner.invoke(cli, [\"filter\", \"--help\"])\n    assert result.exit_code == 0\n    assert \"Show this message and exit.\" in result.output\n"
  },
  {
    "path": "tests/pkg_test.py",
    "content": "\"\"\"Test for pkg.\"\"\"\n\nimport iptvtools\n\n\ndef test_pkg() -> None:\n    \"\"\"Test for pkg.\"\"\"\n    assert iptvtools.__package__ == \"iptvtools\"\n"
  },
  {
    "path": "tests/settings_test.py",
    "content": "\"\"\"Test for settings.\"\"\"\n\nimport os\n\nfrom iptvtools.settings import global_settings, settings\n\n\ndef test_settings() -> None:\n    \"\"\"Test for settings.\"\"\"\n    assert settings.logging_level == os.getenv(\n        \"IPTVTOOLS_LOGGING_LEVEL\",\n        \"INFO\",\n    )\n    assert str(global_settings.ci).lower() == os.getenv(\"CI\", \"False\").lower()\n"
  }
]