[
  {
    "path": ".coveragerc",
    "content": "[run]\nbranch = True\nsource = hitherdither\ninclude = */hitherdither/*\nomit =\n    */setup.py\n\n[report]\nexclude_lines =\n    except NameError\n    except ImportError\n\n\n\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE.md",
    "content": "* bleak version:\n* Python version:\n* Operating System:\n* BlueZ version (`bluetoothctl -v`) in case of Linux: \n\n### Description\n\nDescribe what you were trying to get done.\nTell us what happened, what went wrong, and what you expected to happen.\n\n### What I Did\n\n```\nPaste the command(s) you ran and the output.\nIf there was a crash, please include the traceback here.\n```\n"
  },
  {
    "path": ".github/workflows/build_and_test.yml",
    "content": "name: Build and Test\n\non:\n    push:\n        branches: [ master, develop ]\n    pull_request:\n        branches: [ master, develop ]\n\njobs:\n    build_linux:\n        name: \"Build and test\"\n        runs-on: ${{ matrix.os }}\n        strategy:\n            matrix:\n                os: [ubuntu-latest]\n                python-version: [3.7, 3.8, 3.9, '3.10', '3.11']\n        steps:\n            -   uses: actions/checkout@v4\n            -   name: Set up Python ${{ matrix.python-version }}\n                uses: actions/setup-python@v4\n                with:\n                    python-version: ${{ matrix.python-version }}\n            -   name: Upgrade pip. setuptools and wheel\n                run: python -m pip install --upgrade pip setuptools wheel\n\n            -   name: Install development dependencies\n                run: pip install flake8 pytest pytest-cov\n            -   name: Lint with flake8\n                run: |\n                    # stop the build if there are Python syntax errors or undefined names\n                    flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics\n                    # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide\n                    flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics\n            -   name: Install package as editable\n                run: pip install -e .\n            -   name: Test with pytest\n                run: |\n                    pytest tests --junitxml=junit/test-results-${{ matrix.os }}-${{ matrix.python-version }}.xml --cov=com --cov-report=xml --cov-report=html\n            -   name: Upload pytest test results\n                uses: actions/upload-artifact@v3\n                with:\n                    name: pytest-results-${{ matrix.os }}-${{ matrix.python-version }}\n                    path: junit/test-results-${{ matrix.os }}-${{ matrix.python-version }}.xml\n                # Use always() to always run this step to publish test results when there are test failures\n                if: ${{ always() }}\n"
  },
  {
    "path": ".github/workflows/pypi-publish.yml",
    "content": "# This workflows will upload a Python Package using Twine when a release is created\n# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries\n\nname: Upload Python Package\n\non:\n  release:\n    types: [created]\n\njobs:\n  deploy:\n\n    runs-on: ubuntu-latest\n\n    steps:\n    - uses: actions/checkout@v2\n    - name: Set up Python\n      uses: actions/setup-python@v2\n      with:\n        python-version: '3.x'\n    - name: Install dependencies\n      run: |\n        python -m pip install --upgrade pip\n        pip install setuptools wheel twine\n    - name: Build and publish\n      env:\n        TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}\n        TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}\n      run: |\n        python setup.py sdist bdist_wheel\n        twine upload dist/*\n"
  },
  {
    "path": ".gitignore",
    "content": "hitherdither/data/*.png\ntests/astronaut.png\ntests/rocket.jpg\n\n# Created by .ignore support plugin (hsz.mobi)\n### VisualStudio template\n## Ignore Visual Studio temporary files, build results, and\n## files generated by popular Visual Studio add-ons.\n\n# User-specific files\n*.suo\n*.user\n*.userosscache\n*.sln.docstates\n\n# User-specific files (MonoDevelop/Xamarin Studio)\n*.userprefs\n\n# Build results\n[Dd]ebug/\n[Dd]ebugPublic/\n[Rr]elease/\n[Rr]eleases/\nx64/\nx86/\nbld/\n[Bb]in/\n[Oo]bj/\n[Ll]og/\n\n# Visual Studio 2015 cache/options directory\n.vs/\n# Uncomment if you have tasks that create the project's static files in wwwroot\n#wwwroot/\n\n# MSTest test Results\n[Tt]est[Rr]esult*/\n[Bb]uild[Ll]og.*\n\n# NUNIT\n*.VisualState.xml\nTestResult.xml\n\n# Build Results of an ATL Project\n[Dd]ebugPS/\n[Rr]eleasePS/\ndlldata.c\n\n# DNX\nproject.lock.json\nproject.fragment.lock.json\nartifacts/\n\n*_i.c\n*_p.c\n*_i.h\n*.ilk\n*.meta\n*.obj\n*.pch\n*.pdb\n*.pgc\n*.pgd\n*.rsp\n*.sbr\n*.tlb\n*.tli\n*.tlh\n*.tmp\n*.tmp_proj\n*.log\n*.vspscc\n*.vssscc\n.builds\n*.pidb\n*.svclog\n*.scc\n\n# Chutzpah Test files\n_Chutzpah*\n\n# Visual C++ cache files\nipch/\n*.aps\n*.ncb\n*.opendb\n*.opensdf\n*.sdf\n*.cachefile\n*.VC.db\n*.VC.VC.opendb\n\n# Visual Studio profiler\n*.psess\n*.vsp\n*.vspx\n*.sap\n\n# TFS 2012 Local Workspace\n$tf/\n\n# Guidance Automation Toolkit\n*.gpState\n\n# ReSharper is a .NET coding add-in\n_ReSharper*/\n*.[Rr]e[Ss]harper\n*.DotSettings.user\n\n# JustCode is a .NET coding add-in\n.JustCode\n\n# TeamCity is a build add-in\n_TeamCity*\n\n# DotCover is a Code Coverage Tool\n*.dotCover\n\n# NCrunch\n_NCrunch_*\n.*crunch*.local.xml\nnCrunchTemp_*\n\n# MightyMoose\n*.mm.*\nAutoTest.Net/\n\n# Web workbench (sass)\n.sass-cache/\n\n# Installshield output folder\n[Ee]xpress/\n\n# DocProject is a documentation generator add-in\nDocProject/buildhelp/\nDocProject/Help/*.HxT\nDocProject/Help/*.HxC\nDocProject/Help/*.hhc\nDocProject/Help/*.hhk\nDocProject/Help/*.hhp\nDocProject/Help/Html2\nDocProject/Help/html\n\n# Click-Once directory\npublish/\n\n# Publish Web Output\n*.[Pp]ublish.xml\n*.azurePubxml\n# TODO: Comment the next line if you want to checkin your web deploy settings\n# but database connection strings (with potential passwords) will be unencrypted\n*.pubxml\n*.publishproj\n\n# Microsoft Azure Web App publish settings. Comment the next line if you want to\n# checkin your Azure Web App publish settings, but sensitive information contained\n# in these scripts will be unencrypted\nPublishScripts/\n\n# NuGet Packages\n*.nupkg\n# The packages folder can be ignored because of Package Restore\n**/packages/*\n# except build/, which is used as an MSBuild target.\n!**/packages/build/\n# Uncomment if necessary however generally it will be regenerated when needed\n#!**/packages/repositories.config\n# NuGet v3's project.json files produces more ignoreable files\n*.nuget.props\n*.nuget.targets\n\n# Microsoft Azure Build Output\ncsx/\n*.build.csdef\n\n# Microsoft Azure Emulator\necf/\nrcf/\n\n# Windows Store app package directories and files\nAppPackages/\nBundleArtifacts/\nPackage.StoreAssociation.xml\n_pkginfo.txt\n\n# Visual Studio cache files\n# files ending in .cache can be ignored\n*.[Cc]ache\n# but keep track of directories ending in .cache\n!*.[Cc]ache/\n\n# Others\nClientBin/\n~$*\n*~\n*.dbmdl\n*.dbproj.schemaview\n*.pfx\n*.publishsettings\nnode_modules/\norleans.codegen.cs\n\n# Since there are multiple workflows, uncomment next line to ignore bower_components\n# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)\n#bower_components/\n\n# RIA/Silverlight projects\nGenerated_Code/\n\n# Backup & report files from converting an old project file\n# to a newer Visual Studio version. Backup files are not needed,\n# because we have git ;-)\n_UpgradeReport_Files/\nBackup*/\nUpgradeLog*.XML\nUpgradeLog*.htm\n\n# SQL Server files\n*.mdf\n*.ldf\n\n# Business Intelligence projects\n*.rdl.data\n*.bim.layout\n*.bim_*.settings\n\n# Microsoft Fakes\nFakesAssemblies/\n\n# GhostDoc plugin setting file\n*.GhostDoc.xml\n\n# Node.js Tools for Visual Studio\n.ntvs_analysis.dat\n\n# Visual Studio 6 build log\n*.plg\n\n# Visual Studio 6 workspace options file\n*.opt\n\n# Visual Studio LightSwitch build output\n**/*.HTMLClient/GeneratedArtifacts\n**/*.DesktopClient/GeneratedArtifacts\n**/*.DesktopClient/ModelManifest.xml\n**/*.Server/GeneratedArtifacts\n**/*.Server/ModelManifest.xml\n_Pvt_Extensions\n\n# Paket dependency manager\n.paket/paket.exe\npaket-files/\n\n# FAKE - F# Make\n.fake/\n\n# JetBrains Rider\n.idea/\n*.sln.iml\n### Python template\n# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nenv/\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\n*.egg-info/\n.installed.cfg\n*.egg\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*,cover\n.hypothesis/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\nlocal_settings.py\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\ntarget/\n\n# IPython Notebook\n.ipynb_checkpoints\n\n# pyenv\n.python-version\n\n# celery beat schedule file\ncelerybeat-schedule\n\n# dotenv\n.env\n\n# virtualenv\nvenv/\nENV/\n\n# Spyder project settings\n.spyderproject\n\n# Rope project settings\n.ropeproject\n### JetBrains template\n# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm\n# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839\n\n# User-specific stuff:\n.idea/workspace.xml\n.idea/tasks.xml\n.idea/dictionaries\n.idea/vcs.xml\n.idea/jsLibraryMappings.xml\n\n# Sensitive or high-churn files:\n.idea/dataSources.ids\n.idea/dataSources.xml\n.idea/dataSources.local.xml\n.idea/sqlDataSources.xml\n.idea/dynamic.xml\n.idea/uiDesigner.xml\n\n# Gradle:\n.idea/gradle.xml\n.idea/libraries\n\n# Mongo Explorer plugin:\n.idea/mongoSettings.xml\n\n## File-based project format:\n*.iws\n\n## Plugin-specific files:\n\n# IntelliJ\n/out/\n\n# mpeltonen/sbt-idea plugin\n.idea_modules/\n\n# JIRA plugin\natlassian-ide-plugin.xml\n\n# Crashlytics plugin (for Android Studio and IntelliJ)\ncom_crashlytics_export_strings.xml\ncrashlytics.properties\ncrashlytics-build.properties\nfabric.properties\nhitherdither/data/scenenodither.png\n"
  },
  {
    "path": "LICENSE",
    "content": "The MIT License\n\nCopyright (c) 2020 Henrik Blidh\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.\n\n"
  },
  {
    "path": "MANIFEST.in",
    "content": "include LICENSE README.rst\n"
  },
  {
    "path": "Pipfile",
    "content": "[[source]]\n\nurl = \"https://pypi.python.org/simple\"\nverify_ssl = true\nname = \"pypi\"\n\n\n[packages]\n\nPillow = \">=3.3.1\"\nnumpy = \">=1.9.0\"\n\n\n[dev-packages]\n\npytest = \"*\"\ntwine = \"*\"\n\n\n[requires]\n\npython_version = \"3.6\""
  },
  {
    "path": "README.rst",
    "content": "hitherdither\n============\n\n|Build Status| |Coverage Status|\n\nA package inspired by [1]_, implementing dithering algorithms that can be used with \n`PIL/Pillow <https://pillow.readthedocs.io/en/3.3.x/>`_. \n\nDescription\n-----------\n\nThis module is a small extension to `PIL/Pillow <https://pillow.readthedocs.io/en/3.3.x/>`_, adding\na more managable palette object and several dithering algorithms:\n\n* Error diffusion dithering\n    - Floyd-Steinberg\n    - Jarvis-Judice-Ninke\n    - Stucki\n    - Burkes\n    - Sierra3 \n    - Sierra2\n    - Sierra-2-4A\n    - Stevenson-Arce\n    - Atkinson\n* Standard ordered dithering\n    - Bayer matrix\n    - Cluster dot matrix\n    - Arbitrary square threshold matrix (not implemented yet)\n* Yliluoma's ordered dithering (see [1]_)\n    - Algorithm 1 \n    - Algorithm 2 (not implemented yet)\n    - Algorithm 3 (not implemented yet)\n\nThe dithering algorithms are applicable for arbitrary palettes and for both\nRGB and greyscale images.\n\nInstallation\n------------\n\n::\n\n    pip install git+https://www.github.com/hbldh/hitherdither\n\nUsage\n-----\n\nBayer dithering using a median cut palette:\n\n.. code:: python\n\n   from PIL import Image\n   import hitherdither\n\n   img = Image.open('image.jpg')\n   palette = hitherdither.palette.Palette.create_by_median_cut(img)\n   img_dithered = hitherdither.ordered.bayer.bayer_dithering(\n       img, palette, [256/4, 256/4, 256/4], order=8)\n\n`Yliluoma's Algorithm 1 <http://bisqwit.iki.fi/story/howto/dither/jy/#YliluomaSOrderedDitheringAlgorithm 1>`_\nusing a predefined palette:\n\n.. code:: python\n\n   from PIL import Image\n   import hitherdither\n\n   palette = hitherdither.palette.Palette(\n       [0x080000, 0x201A0B, 0x432817, 0x492910,\n        0x234309, 0x5D4F1E, 0x9C6B20, 0xA9220F,\n        0x2B347C, 0x2B7409, 0xD0CA40, 0xE8A077,\n        0x6A94AB, 0xD5C4B3, 0xFCE76E, 0xFCFAE2]\n   )\n\n   img = Image.open('image.jpg')\n   img_dithered = hitherdither.ordered.yliluoma.yliluomas_1_ordered_dithering(\n       img, palette, order=8)\n\nTests\n~~~~~\n\nTests can be run with `pytest <http://doc.pytest.org/en/latest/>`_:\n\n.. code:: sh\n\n    hbldh@devbox:~/Repos/hitherdither$ py.test tests\n    ============================= test session starts ==============================\n    platform linux -- Python 3.5.2, pytest-3.0.2, py-1.4.31, pluggy-0.3.1\n    rootdir: /home/hbldh/Repos/hitherdither, inifile: \n    collected 13 items \n\n    tests/test_bayer.py ...\n    tests/test_palette.py ..........\n\n    ========================== 13 passed in 0.11 seconds ===========================\n\nReferences\n----------\n\n.. [1] Joel Yliluoma's arbitrary-palette positional dithering algorithm (http://bisqwit.iki.fi/story/howto/dither/jy/)\n\n\n.. |Build Status| image:: https://github.com/hbldh/hitherdither/workflows/Build%20and%20Test/badge.svg\n   :target: https://github.com/hbldh/hitherdither/actions?query=workflow%3A%22Build+and+Test%22\n   :alt: Build and Test\n.. |Coverage Status| image:: https://coveralls.io/repos/github/hbldh/hitherdither/badge.svg?branch=master\n   :target: https://coveralls.io/github/hbldh/hitherdither?branch=master\n"
  },
  {
    "path": "hitherdither/__init__.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import absolute_import\n\nfrom . import data\nfrom . import math\nfrom . import ordered\nfrom . import diffusion\nfrom . import palette\nfrom . import utils\nfrom .__version__ import __version__, version\n"
  },
  {
    "path": "hitherdither/__version__.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n__version__.py\n-----------\n\n:copyright: 2017-05-10 by hbldh <henrik.blidh@nedomkull.com>\n\n\"\"\"\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import absolute_import\n\n# Version information.\n__version__ = \"0.1.7\"\nversion = __version__  # backwards compatibility name\n"
  },
  {
    "path": "hitherdither/data/__init__.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\ntry:\n    import pathlib2 as pathlib\nexcept ImportError:\n    import pathlib\n\ntry:\n    from urllib import urlopen\nexcept ImportError:\n    from urllib.request import urlopen\n\nfrom PIL import Image\n\n\ndef scene():\n    \"\"\"Chrono Cross PNG image used in Yliluoma's web page.\n\n    :return: The PIL image of the Chrono Cross scene.\n\n    \"\"\"\n    image_path = pathlib.Path(__file__).resolve().parent.joinpath(\"scene.png\")\n    image_url = \"http://bisqwit.iki.fi/jutut/kuvat/ordered_dither/scene.png\"\n    return _image(image_path, image_url)\n\n\ndef scene_undithered():\n    \"\"\"Chrono Cross PNG image rendered directly with specified palette.\n\n    :return: The PIL image of the undithered Chrono Cross scene.\n\n    \"\"\"\n    return _image(\n        pathlib.Path(__file__).resolve().parent.joinpath(\"scenenodither.png\"),\n        \"http://bisqwit.iki.fi/jutut/kuvat/ordered_dither/scenenodither.png\",\n    )\n\n\ndef scene_bayer0():\n    \"\"\"Chrono Cross PNG image dithered using ordered Bayer matrix method.\n\n    :return: The PIL image of the ordered Bayer matrix dithered\n        Chrono Cross scene.\n\n    \"\"\"\n    return _image(\n        pathlib.Path(__file__).resolve().parent.joinpath(\"scenebayer0.png\"),\n        \"http://bisqwit.iki.fi/jutut/kuvat/ordered_dither/scenebayer0.png\",\n    )\n\n\ndef _image(pth, url):\n    \"\"\"Load image specified in ``path``. If not present,\n    fetch it from ``url`` and store locally.\n\n    :param str or :class:`~pathlib.Path` pth:\n    :param str url: URL from where to fetch the image.\n    :return: The :class:`~PIL.Image` requested.\n\n    \"\"\"\n    if pth.exists():\n        return Image.open(str(pth))\n    else:\n        r = urlopen(url)\n        with open(str(pth), \"wb\") as f:\n            f.write(r.read())\n        return _image(pth, url)\n\n\ndef palette():\n    return [\n        0x080000,\n        0x201A0B,\n        0x432817,\n        0x492910,\n        0x234309,\n        0x5D4F1E,\n        0x9C6B20,\n        0xA9220F,\n        0x2B347C,\n        0x2B7409,\n        0xD0CA40,\n        0xE8A077,\n        0x6A94AB,\n        0xD5C4B3,\n        0xFCE76E,\n        0xFCFAE2,\n    ]\n"
  },
  {
    "path": "hitherdither/diffusion.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n:mod:`diffusion`\n=======================\n\n.. moduleauthor:: hbldh <henrik.blidh@swedwise.com>\nCreated on 2016-09-12, 11:34\n\n\"\"\"\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import absolute_import\n\nimport numpy as np\n\n_DIFFUSION_MAPS = {\n    \"floyd-steinberg\": (\n        (1, 0, 7 / 16),\n        (-1, 1, 3 / 16),\n        (0, 1, 5 / 16),\n        (1, 1, 1 / 16),\n    ),\n    \"atkinson\": (\n        (1, 0, 1 / 8),\n        (2, 0, 1 / 8),\n        (-1, 1, 1 / 8),\n        (0, 1, 1 / 8),\n        (1, 1, 1 / 8),\n        (0, 2, 1 / 8),\n    ),\n    \"jarvis-judice-ninke\": (\n        (1, 0, 7 / 48),\n        (2, 0, 5 / 48),\n        (-2, 1, 3 / 48),\n        (-1, 1, 5 / 48),\n        (0, 1, 7 / 48),\n        (1, 1, 5 / 48),\n        (2, 1, 3 / 48),\n        (-2, 2, 1 / 48),\n        (-1, 2, 3 / 48),\n        (0, 2, 5 / 48),\n        (1, 2, 3 / 48),\n        (2, 2, 1 / 48),\n    ),\n    \"stucki\": (\n        (1, 0, 8 / 42),\n        (2, 0, 4 / 42),\n        (-2, 1, 2 / 42),\n        (-1, 1, 4 / 42),\n        (0, 1, 8 / 42),\n        (1, 1, 4 / 42),\n        (2, 1, 2 / 42),\n        (-2, 2, 1 / 42),\n        (-1, 2, 2 / 42),\n        (0, 2, 4 / 42),\n        (1, 2, 2 / 42),\n        (2, 2, 1 / 42),\n    ),\n    \"burkes\": (\n        (1, 0, 8 / 32),\n        (2, 0, 4 / 32),\n        (-2, 1, 2 / 32),\n        (-1, 1, 4 / 32),\n        (0, 1, 8 / 32),\n        (1, 1, 4 / 32),\n        (2, 1, 2 / 32),\n    ),\n    \"sierra3\": (\n        (1, 0, 5 / 32),\n        (2, 0, 3 / 32),\n        (-2, 1, 2 / 32),\n        (-1, 1, 4 / 32),\n        (0, 1, 5 / 32),\n        (1, 1, 4 / 32),\n        (2, 1, 2 / 32),\n        (-1, 2, 2 / 32),\n        (0, 2, 3 / 32),\n        (1, 2, 2 / 32),\n    ),\n    \"sierra2\": (\n        (1, 0, 4 / 16),\n        (2, 0, 3 / 16),\n        (-2, 1, 1 / 16),\n        (-1, 1, 2 / 16),\n        (0, 1, 3 / 16),\n        (1, 1, 2 / 16),\n        (2, 1, 1 / 16),\n    ),\n    \"sierra-2-4a\": (\n        (1, 0, 2 / 4),\n        (-1, 1, 1 / 4),\n        (0, 1, 1 / 4),\n    ),\n}\n\n\ndef error_diffusion_dithering(image, palette, method=\"floyd-steinberg\", order=2):\n    \"\"\"Perform image dithering by error diffusion method.\n\n    .. note:: Error diffusion is totally unoptimized and therefore very slow.\n        It is included more as a reference implementation than as a useful\n        method.\n\n    Reference:\n        http://bisqwit.iki.fi/jutut/kuvat/ordered_dither/error_diffusion.txt\n\n    Quantization error of *current* pixel is added to the pixels\n    on the right and below according to the formulas below.\n    This works nicely for most static pictures, but causes\n    an avalanche of jittering artifacts if used in animation.\n\n    Floyd-Steinberg:\n\n              *  7\n           3  5  1      / 16\n\n    Jarvis-Judice-Ninke:\n\n              *  7  5\n        3  5  7  5  3\n        1  3  5  3  1   / 48\n\n    Stucki:\n\n              *  8  4\n        2  4  8  4  2\n        1  2  4  2  1   / 42\n\n    Burkes:\n\n              *  8  4\n        2  4  8  4  2   / 32\n\n\n    Sierra3:\n\n              *  5  3\n        2  4  5  4  2\n           2  3  2      / 32\n\n    Sierra2:\n\n              *  4  3\n        1  2  3  2  1   / 16\n\n    Sierra-2-4A:\n\n              *  2\n           1  1         / 4\n\n    Stevenson-Arce:\n\n                      *   .  32\n        12   .   26   .  30   .  16\n        .   12    .  26   .  12   .\n        5    .   12   .  12   .   5    / 200\n\n    Atkinson:\n\n              *   1   1    / 8\n          1   1   1\n              1\n\n    :param :class:`PIL.Image` image: The image to apply error\n        diffusion dithering to.\n    :param :class:`~hitherdither.colour.Palette` palette: The palette to use.\n    :param str method: The error diffusion map to use.\n    :param int order: Metric parameter ``ord`` to send to\n        :method:`numpy.linalg.norm`.\n    :return: The error diffusion dithered PIL image of type\n        \"P\" using the input palette.\n\n    \"\"\"\n    ni = np.array(image, \"float\")\n\n    diff_map = _DIFFUSION_MAPS.get(method.lower())\n\n    for y in range(ni.shape[0]):\n        for x in range(ni.shape[1]):\n            old_pixel = ni[y, x]\n            old_pixel[old_pixel < 0.0] = 0.0\n            old_pixel[old_pixel > 255.0] = 255.0\n            new_pixel = palette.pixel_closest_colour(old_pixel, order)\n            quantization_error = old_pixel - new_pixel\n            ni[y, x] = new_pixel\n            for dx, dy, diffusion_coefficient in diff_map:\n                xn, yn = x + dx, y + dy\n                if (0 <= xn < ni.shape[1]) and (0 <= yn < ni.shape[0]):\n                    ni[yn, xn] += quantization_error * diffusion_coefficient\n    return palette.create_PIL_png_from_rgb_array(np.array(ni, \"uint8\"))\n"
  },
  {
    "path": "hitherdither/exceptions.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nexceptions\n-----------\n\n:copyright: 2017-05-10 by hbldh <henrik.blidh@nedomkull.com>\n\n\"\"\"\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import absolute_import\n\n\nclass HitherDitherError(Exception):\n    pass\n\n\nclass PaletteCouldNotBeCreatedError(Exception):\n    pass\n"
  },
  {
    "path": "hitherdither/math/__init__.py",
    "content": ""
  },
  {
    "path": "hitherdither/ordered/__init__.py",
    "content": "from . import bayer\nfrom . import yliluoma\nfrom . import cluster\n"
  },
  {
    "path": "hitherdither/ordered/bayer.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nbayer_dithering\n-----------\n\n:copyright: 2016-09-09 by hbldh <henrik.blidh@nedomkull.com>\n\n\"\"\"\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import absolute_import\n\nimport numpy as np\n\n\ndef B(n, transposed=False):\n    \"\"\"Get the Bayer matrix with side of length ``n``.\n\n    Will only work if ``n`` is a power of 2.\n\n    Reference: http://caca.zoy.org/study/part2.html\n\n    :param int n: Power of 2 side length of matrix.\n    :return: The Bayer matrix.\n\n    \"\"\"\n    return (1 + I(n, transposed)) / (1 + (n * n))\n\n\ndef I(n, transposed=False):\n    \"\"\"Get the index matrix with side of length ``n``.\n\n    Will only work if ``n`` is a power of 2.\n\n    Reference: http://caca.zoy.org/study/part2.html\n\n    :param int n: Power of 2 side length of matrix.\n    :param bool transposed:\n    :return: The index matrix.\n\n    \"\"\"\n    if n == 0:\n        return np.array([[0, 0], [0, 0]], \"int\")\n    if n == 2:\n        if transposed:\n            return np.array([[0, 3], [2, 1]], \"int\")\n        else:\n            return np.array([[0, 2], [3, 1]], \"int\")\n    else:\n        smaller_I = I(n >> 1, transposed)\n        if transposed:\n            return np.bmat(\n                [\n                    [4 * smaller_I, 4 * smaller_I + 3],\n                    [4 * smaller_I + 2, 4 * smaller_I + 1],\n                ]\n            )\n        else:\n            return np.bmat(\n                [\n                    [4 * smaller_I, 4 * smaller_I + 2],\n                    [4 * smaller_I + 3, 4 * smaller_I + 1],\n                ]\n            )\n\n\ndef bayer_dithering(image, palette, thresholds, order=8):\n    \"\"\"Render the image using the ordered Bayer matrix dithering pattern.\n\n    :param :class:`PIL.Image` image: The image to apply\n        Bayer ordered dithering to.\n    :param :class:`~hitherdither.colour.Palette` palette: The palette to use.\n    :param thresholds: Thresholds to apply dithering at.\n    :param int order: The size of the Bayer matrix.\n    :return:  The Bayer matrix dithered PIL image of type \"P\"\n        using the input palette.\n\n    \"\"\"\n    bayer_matrix = B(order)\n    ni = np.array(image, \"uint8\")\n    thresholds = np.array(thresholds, \"uint8\")\n    xx, yy = np.meshgrid(range(ni.shape[1]), range(ni.shape[0]))\n    xx %= order\n    yy %= order\n    factor_threshold_matrix = np.expand_dims(bayer_matrix[yy, xx], axis=2) * thresholds\n    new_image = ni + factor_threshold_matrix\n    return palette.create_PIL_png_from_rgb_array(new_image)\n"
  },
  {
    "path": "hitherdither/ordered/cluster.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nbayer_dithering\n-----------\n\n:copyright: 2016-09-09 by hbldh <henrik.blidh@nedomkull.com>\n\n\"\"\"\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import absolute_import\n\nimport numpy as np\n\n_CLUSTER_DOT_MATRICES = {\n    4: np.array([[12, 5, 6, 13], [4, 0, 1, 7], [11, 3, 2, 8], [15, 10, 9, 14]], \"float\")\n    / 16.0,\n    8: np.array(\n        [\n            [24, 10, 12, 26, 35, 47, 49, 37],\n            [8, 0, 2, 14, 45, 59, 61, 51],\n            [22, 6, 4, 16, 43, 57, 63, 53],\n            [30, 20, 18, 28, 33, 41, 55, 39],\n            [34, 46, 48, 36, 25, 11, 13, 27],\n            [44, 57, 60, 50, 9, 1, 3, 15],\n            [42, 56, 62, 52, 23, 7, 5, 17],\n            [32, 40, 54, 38, 31, 21, 19, 29],\n        ],\n        \"float\",\n    )\n    / 64.0,\n    (5, 3): np.array([[9, 3, 0, 6, 12], [10, 4, 1, 7, 13], [11, 5, 2, 8, 14]], \"float\")\n    / 15.0,\n}\n\n\ndef cluster_dot_dithering(image, palette, thresholds, order=4):\n    \"\"\"Render the image using the ordered Bayer matrix dithering pattern.\n\n    Reference: http://caca.zoy.org/study/part2.html\n\n    :param :class:`PIL.Image` image: The image to apply the\n        ordered dithering to.\n    :param :class:`~hitherdither.colour.Palette` palette: The palette to use.\n    :param thresholds: Thresholds to apply dithering at.\n    :param int order: The size of the Bayer matrix.\n    :return:  The Bayer matrix dithered PIL image of type \"P\"\n        using the input palette.\n\n    \"\"\"\n\n    cluster_dot_matrix = _CLUSTER_DOT_MATRICES.get(order)\n    if cluster_dot_matrix is None:\n        raise NotImplementedError(\"Only order 4 and 8 is implemented as of yet.\")\n    ni = np.array(image, \"uint8\")\n    thresholds = np.array(thresholds, \"uint8\")\n    xx, yy = np.meshgrid(range(ni.shape[1]), range(ni.shape[0]))\n    xx %= order\n    yy %= order\n    factor_threshold_matrix = (\n        np.expand_dims(cluster_dot_matrix[yy, xx], axis=2) * thresholds\n    )\n    new_image = ni + factor_threshold_matrix\n    return palette.create_PIL_png_from_rgb_array(new_image)\n"
  },
  {
    "path": "hitherdither/ordered/yliluoma/__init__.py",
    "content": "from ._algorithm_one import yliluomas_1_ordered_dithering\n"
  },
  {
    "path": "hitherdither/ordered/yliluoma/_algorithm_one.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nalgorithm_one\n-----------\n\n:copyright: 2016-09-12 by hbldh <henrik.blidh@nedomkull.com>\n\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport numpy as np\n\nfrom ._utils import color_compare, CCIR_LUMINOSITY\nfrom ..bayer import I\n\n\ndef _get_mixing_plan_matrix(palette, order=8):\n    mixing_matrix = []\n    colours = {}\n    colour_component_distances = []\n\n    nn = order * order\n    for i in range(len(palette)):\n        for j in range(i, len(palette)):\n            for ratio in range(0, nn):\n                if i == j and ratio != 0:\n                    break\n                # Determine the two component colors.\n                c_mix = _colour_combine(palette, i, j, ratio / nn)\n                hex_colour = palette.rgb2hex(*c_mix.tolist())\n                colours[hex_colour] = (i, j, ratio / nn)\n                mixing_matrix.append(c_mix)\n\n                c1 = np.array(palette[i], \"int\")\n                c2 = np.array(palette[j], \"int\")\n                cmpval = (\n                    color_compare(c1, c2)\n                    * 0.1\n                    * (np.abs((ratio / float(nn)) - 0.5) + 0.5)\n                )\n                colour_component_distances.append(cmpval)\n\n    mixing_matrix = np.array(mixing_matrix)\n    colour_component_distances = np.array(colour_component_distances)\n\n    for c in mixing_matrix:\n        assert palette.rgb2hex(*c.tolist()) in colours\n\n    return mixing_matrix, colours, colour_component_distances\n\n\ndef _colour_combine(palette, i, j, ratio):\n    c1, c2 = np.array(palette[i], \"int\"), np.array(palette[j], \"int\")\n    return np.array(c1 + ratio * (c2 - c1), \"uint8\")\n\n\ndef _improved_mixing_error_fcn(\n    colour, mixing_matrix, colour_component_distances, luma_mat=None\n):\n    \"\"\"Compares two colours using the Psychovisual model.\n\n    The simplest way to adjust the psychovisual model is to\n    add some code that considers the difference between the\n    two pixel values that are being mixed in the dithering\n    process, and penalizes combinations that differ too much.\n\n    Wikipedia has an entire article about the topic of comparing\n    two color values. Most of the improved color comparison\n    functions are based on the CIE colorspace, but simple\n    improvements can be done in the RGB space too. Such a simple\n    improvement is shown below. We might call this RGBL, for\n    luminance-weighted RGB.\n\n    :param :class:`numpy.ndarray` colour: The colour to estimate error to.\n    :param :class:`numpy.ndarray` mixing_matrix: The rgb\n        values of mixed colours.\n    :param :class:`numpy.ndarray` colour_component_distances: The colour\n        distance of the mixed colours.\n    :return: :class:`numpy.ndarray`\n\n    \"\"\"\n    colour = np.array(colour, \"int\")\n    if luma_mat is None:\n        luma_mat = mixing_matrix.dot(CCIR_LUMINOSITY / 1000.0 / 255.0)\n    luma_colour = colour.dot(CCIR_LUMINOSITY) / (255.0 * 1000.0)\n    luma_diff_squared = (luma_mat - luma_colour) ** 2\n    diff_colour_squared = ((colour - mixing_matrix) / 255.0) ** 2\n    cmpvals = diff_colour_squared.dot(CCIR_LUMINOSITY) / 1000.0\n    cmpvals *= 0.75\n    cmpvals += luma_diff_squared\n    cmpvals += colour_component_distances\n    return cmpvals\n\n\ndef yliluomas_1_ordered_dithering(image, palette, order=8):\n    \"\"\"A dithering method that weighs in color combinations of palette.\n\n    N.B. tri-tone dithering is not implemented.\n\n    :param :class:`PIL.Image` image: The image to apply\n        Bayer ordered dithering to.\n    :param :class:`~hitherdither.colour.Palette` palette: The palette to use.\n    :param int order: The Bayer matrix size to use.\n    :return:  The dithered PIL image of type \"P\" using the input palette.\n\n    \"\"\"\n    bayer_matrix = I(order, transposed=True) / 64.0\n    ni = np.array(image, \"uint8\")\n    xx, yy = np.meshgrid(range(ni.shape[1]), range(ni.shape[0]))\n    factor_matrix = bayer_matrix[yy % order, xx % order]\n\n    # Prepare all precalculated mixed colours and their respective\n    mixing_matrix, colour_map, colour_component_distances = _get_mixing_plan_matrix(\n        palette\n    )\n    mixing_matrix = np.array(mixing_matrix, \"int\")\n    luma_mat = mixing_matrix.dot(CCIR_LUMINOSITY / 1000.0 / 255.0)\n\n    color_matrix = np.zeros(ni.shape[:2], dtype=\"uint8\")\n    for x, y in zip(np.nditer(xx), np.nditer(yy)):\n\n        min_index = np.argmin(\n            _improved_mixing_error_fcn(\n                ni[y, x, :], mixing_matrix, colour_component_distances, luma_mat\n            )\n        )\n        closest_mix_colour = mixing_matrix[min_index, :].tolist()\n        closest_mix_hexcolour = palette.rgb2hex(*closest_mix_colour)\n        plan = colour_map.get(closest_mix_hexcolour)\n        color_matrix[y, x] = plan[1] if (factor_matrix[y, x] < plan[-1]) else plan[0]\n\n    return palette.create_PIL_png_from_closest_colour(color_matrix)\n\n\ndef _evaluate_mixing_error(\n    desired_colour,\n    mixed_colour,\n    component_colour_1,\n    component_colour_2,\n    ratio,\n    component_colour_compare_value=None,\n):\n    \"\"\"Compare colours and weigh in component difference.\n\n    double EvaluateMixingError(int r,int g,int b,\n                               int r0,int g0,int b0,\n                               int r1,int g1,int b1,\n                               int r2,int g2,int b2,\n                               double ratio)\n    {\n        return ColorCompare(r,g,b, r0,g0,b0)\n             + ColorCompare(r1,g1,b1, r2,g2,b2) * 0.1\n             * (fabs(ratio-0.5)+0.5);\n    }\n\n\n    :param desired_colour:\n    :param mixed_colour:\n    :param component_colour_1:\n    :param component_colour_2:\n    :param ratio:\n    :param component_colour_compare_value:\n    :return:\n\n    \"\"\"\n    if component_colour_compare_value is None:\n        return color_compare(desired_colour, mixed_colour) + (\n            color_compare(component_colour_1, component_colour_2)\n            * 0.1\n            * (np.abs(ratio - 0.5) + 0.5)\n        )\n    else:\n        return (\n            color_compare(desired_colour, mixed_colour) + component_colour_compare_value\n        )\n"
  },
  {
    "path": "hitherdither/ordered/yliluoma/_utils.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n_utils\n-----------\n\n:copyright: 2016-09-23 by hbldh <henrik.blidh@nedomkull.com>\n\n\"\"\"\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import absolute_import\n\nimport numpy as np\n\n# CCIR 601 luminosity\nCCIR_LUMINOSITY = np.array([299.0, 587.0, 114.0])\n\n\ndef color_compare(c1, c2):\n    \"\"\"Compare the difference of two RGB values, weigh by CCIR 601 luminosity\n\n    double ColorCompare(int r1,int g1,int b1, int r2,int g2,int b2)\n    {\n        double luma1 = (r1*299 + g1*587 + b1*114) / (255.0*1000);\n        double luma2 = (r2*299 + g2*587 + b2*114) / (255.0*1000);\n        double lumadiff = luma1-luma2;\n        double diffR = (r1-r2)/255.0, diffG = (g1-g2)/255.0, diffB = (b1-b2)/255.0;\n        return (diffR*diffR*0.299 + diffG*diffG*0.587 + diffB*diffB*0.114)*0.75\n             + lumadiff*lumadiff;\n    }\n\n    :return: float\n\n    \"\"\"\n    luma_diff = c1.dot(CCIR_LUMINOSITY) / (255.0 * 1000.0) - c2.dot(CCIR_LUMINOSITY) / (\n        255.0 * 1000.0\n    )\n    diff_col = (c1 - c2) / 255.0\n    return ((diff_col ** 2).dot(CCIR_LUMINOSITY / 1000.0) * 0.75) + (luma_diff ** 2)\n"
  },
  {
    "path": "hitherdither/palette.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\npalette\n-----------\n\n:copyright: 2016-09-09 by hbldh <henrik.blidh@nedomkull.com>\n\n\"\"\"\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import absolute_import\n\nimport numpy as np\nfrom PIL import Image\nfrom PIL.ImagePalette import ImagePalette\n\nfrom hitherdither.exceptions import PaletteCouldNotBeCreatedError\n\ntry:\n    string_type = basestring\nexcept NameError:\n    string_type = str\n\n\ndef hex2rgb(h):\n    if isinstance(h, string_type):\n        return hex2rgb(int(h[1:] if h.startswith(\"#\") else h, 16))\n    return (h >> 16) & 0xFF, (h >> 8) & 0xFF, h & 0xFF\n\n\ndef rgb2hex(r, g, b):\n    return (r << 16) + (g << 8) + b\n\n\ndef _get_all_present_colours(im):\n    \"\"\"Returns a dict of RGB colours present.\n\n    N.B. Do not use this except for testing purposes.\n\n    Reference: http://stackoverflow.com/a/4643911\n\n    :param im: The image to get number of colours in.\n    :type im: :class:`~PIL.Image.Image`\n    :return: A dict of contained RGB colours as keys.\n    :rtype: dict\n\n    \"\"\"\n    from collections import defaultdict\n\n    by_color = defaultdict(int)\n    for pixel in im.getdata():\n        by_color[pixel] += 1\n    return by_color\n\n\nclass Palette(object):\n    \"\"\"The :mod:`~hitherdither` implementation of a colour palette.\n\n    Can be instantiated in from colour specifications in the following forms:\n\n    - ``uint8`` numpy array of size ``[N x 3]``\n    - ``uint8`` numpy array of size ``[3N]``\n    - :class:`~PIL.ImagePalette.ImagePalette`\n    - :class:`~PIL.Image.Image`\n    - list of hex values\n    - list of RGB tuples\n\n    \"\"\"\n\n    def __init__(self, data):\n        if isinstance(data, np.ndarray):\n            if data.ndim == 1:\n                self.colours = data.reshape((3, len(data) // 3))\n            else:\n                self.colours = data\n            self.hex = [rgb2hex(*colour) for colour in data]\n        elif isinstance(data, ImagePalette):\n            _tmp = np.frombuffer(data.palette, \"uint8\")\n            self.colours = _tmp.reshape((3, len(_tmp) // 3))\n            self.hex = [rgb2hex(*colour) for colour in data]\n        elif isinstance(data, Image.Image):\n            if data.palette is None:\n                raise PaletteCouldNotBeCreatedError(\n                    \"Image of mode {0} has no PIL palette. \"\n                    \"Make sure it is of mode P.\".format(data.mode)\n                )\n            _colours = data.getcolors()\n            _n_colours = len(_colours)\n            _tmp = np.array(data.getpalette())[: 3 * _n_colours]\n            self.colours = _tmp.reshape((3, len(_tmp) // 3)).T\n            self.hex = [rgb2hex(*colour) for colour in self]\n        elif isinstance(data, (list, tuple)):\n            if isinstance(data[0], string_type):\n                # Assume hex strings\n                self.hex = data\n                self.colours = np.array([hex2rgb(c) for c in data])\n            elif isinstance(data[0], int):\n                # Assume hex values\n                self.hex = data  # TODO: Convert to hex string.\n                self.colours = np.array([hex2rgb(c) for c in data])\n            else:\n                # Assume RGB tuples\n                self.colours = np.array(data)\n                self.hex = [rgb2hex(*colour) for colour in data]\n\n    def __iter__(self):\n        for colour in self.colours:\n            yield colour\n\n    def __len__(self):\n        return self.colours.shape[0]\n\n    def __getitem__(self, item):\n        if isinstance(item, int):\n            return self.colours[item, :]\n        else:\n            raise IndexError(\"Can only reference colours by integer values.\")\n\n    def render(self, colours):\n        return np.array(np.take(self.colours, colours, axis=0), \"uint8\")\n\n    def image_distance(self, image, order=2):\n        ni = np.array(image, \"float\")\n        distances = np.zeros((ni.shape[0], ni.shape[1], len(self)), \"float\")\n        for i, colour in enumerate(self):\n            distances[:, :, i] = np.linalg.norm(ni - colour, ord=order, axis=2)\n        return distances\n\n    def image_closest_colour(self, image, order=2):\n        return np.argmin(self.image_distance(image, order=order), axis=2)\n\n    def pixel_distance(self, pixel, order=2):\n        return np.array([np.linalg.norm(pixel - colour, ord=order) for colour in self])\n\n    def pixel_closest_colour(self, pixel, order=2):\n        return self.colours[\n            np.argmin(self.pixel_distance(pixel, order=order)), :\n        ].copy()\n\n    @classmethod\n    def create_by_kmeans(cls, image):\n        raise NotImplementedError()\n\n    @classmethod\n    def create_by_median_cut(cls, image, n=16, dim=None):\n        img = np.array(image)\n        # Create pixel buckets to simplify sorting and splitting.\n        if img.ndim == 3:\n            pixels = img.reshape((img.shape[0] * img.shape[1], img.shape[2]))\n        elif img.ndim == 2:\n            pixels = img.reshape((img.shape[0] * img.shape[1], 1))\n\n        def median_cut(p, dim=None):\n            \"\"\"Median cut method.\n\n            Reference:\n            https://en.wikipedia.org/wiki/Median_cut\n\n            :param p: The pixel array to split in two.\n            :return: Two numpy arrays, split by median cut method.\n            \"\"\"\n            if dim is not None:\n                sort_dim = dim\n            else:\n                mins = p.min(axis=0)\n                maxs = p.max(axis=0)\n                sort_dim = np.argmax(maxs - mins)\n\n            argument = np.argsort(p[:, sort_dim])\n            p = p[argument, :]\n            m = np.median(p[:, sort_dim])\n            split_mask = p[:, sort_dim] >= m\n            return [p[~split_mask, :].copy(), p[split_mask, :].copy()]\n\n        # Do actual splitting loop.\n        bins = [\n            pixels,\n        ]\n        while len(bins) < n:\n            new_bins = []\n            for bin in bins:\n                if len(bin) != 0:\n                    new_bins += median_cut(bin, dim)\n            bins = new_bins\n\n        # Average over pixels in each bin to create\n        colours = np.array(\n            [np.array(bin.mean(axis=0).round(), \"uint8\") for bin in bins], \"uint8\"\n        )\n        return cls(colours)\n\n    def create_PIL_png_from_closest_colour(self, cc):\n        \"\"\"Create a ``P`` PIL image with this palette.\n\n        Avoids the PIL dithering in favour of our own.\n\n        Reference: http://stackoverflow.com/a/29438149\n\n        :param :class:`numpy.ndarray` cc: A ``[M x N]`` array with integer\n            values representing palette colour indices to build image from.\n        :return: A :class:`PIL.Image.Image` image of mode ``P``.\n\n        \"\"\"\n        pa_image = Image.new(\"P\", cc.shape[::-1])\n        pa_image.putpalette(self.colours.flatten().tolist())\n        im = Image.fromarray(np.array(cc, \"uint8\")).im.convert(\"P\", 0, pa_image.im)\n        try:\n            # Pillow >= 4\n            return pa_image._new(im)\n        except AttributeError:\n            # Pillow < 4\n            return pa_image._makeself(im)\n\n    def create_PIL_png_from_rgb_array(self, img_array):\n        \"\"\"Create a ``P`` PIL image from a RGB image with this palette.\n\n        Avoids the PIL dithering in favour of our own.\n\n        Reference: http://stackoverflow.com/a/29438149\n\n        :param :class:`numpy.ndarray` img_array: A ``[M x N x 3]`` uint8\n            array representing RGB colours.\n        :return: A :class:`PIL.Image.Image` image of mode ``P`` with colours\n            available in this palette.\n\n        \"\"\"\n        cc = self.image_closest_colour(img_array, order=2)\n        pa_image = Image.new(\"P\", cc.shape[::-1])\n        pa_image.putpalette(self.colours.flatten().tolist())\n        im = Image.fromarray(np.array(cc, \"uint8\")).im.convert(\"P\", 0, pa_image.im)\n        try:\n            # Pillow >= 4\n            return pa_image._new(im)\n        except AttributeError:\n            # Pillow < 4\n            return pa_image._makeself(im)\n\n    @staticmethod\n    def hex2rgb(x):\n        return hex2rgb(x)\n\n    @staticmethod\n    def rgb2hex(r, g, b):\n        return rgb2hex(r, g, b)\n"
  },
  {
    "path": "hitherdither/utils.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n:mod:`utils`\n=======================\n\n.. moduleauthor:: hbldh <henrik.blidh@swedwise.com>\nCreated on 2016-09-12, 09:50\n\n\"\"\"\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import absolute_import\n\n\nimport numpy as np\nfrom PIL import Image\n\n\ndef np2pil(img):\n    return Image.fromarray(np.array(img, \"uint8\"))\n\n\ndef pil2np(img):\n    return np.array(img, \"uint8\")\n"
  },
  {
    "path": "requirements.txt",
    "content": "Pillow>=3.3.1\nnumpy>=1.9.0\n"
  },
  {
    "path": "run.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n:mod:`run`\n=======================\n\n.. moduleauthor:: hbldh <henrik.blidh@nedomkull.com>\nCreated on 2016-09-12, 09:44\n\n\"\"\"\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import absolute_import\n\nimport numpy as np\n\nfrom hitherdither import data\nfrom hitherdither.palette import Palette\nfrom hitherdither.diffusion import error_diffusion_dithering\nfrom hitherdither.ordered import yliluoma\nimport hitherdither.utils\n\n# Fetch the example image and the palette from Yliluoma's page.\ns = data.scene()\np = Palette(hitherdither.data.palette())\n\np2 = Palette.create_by_median_cut(s)\n\n# Map raw image to the palette\nclosest_colour = p.image_closest_colour(s, order=2)\n# Render the undithered image with only colours in\n# the palette as a RGB numpy array.\nundithered_image = p.render(closest_colour)\n# Create a PIL Image of mode \"P\" from the palette colour index matrix.\ns_png = p.create_PIL_png_from_closest_colour(closest_colour)\ns_png.show()\n\n#print(np.linalg.norm(undithered_image - np.array(s_png.convert(\"RGB\"))))\n\n# Render an Yliluoma algorithm 1 image.\nyliluoma1_image = yliluoma.yliluomas_1_ordered_dithering(\n    s, p, order=8)\nyliluoma1_image.resize(np.array(yliluoma1_image.size) * 4).show()\n#yliluoma1_image.show()\n\n"
  },
  {
    "path": "setup.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n# Note: To use the 'upload' functionality of this file, you must:\n#   $ pip install twine\n\nimport io\nimport os\nimport sys\nfrom shutil import rmtree\n\nfrom setuptools import find_packages, setup, Command\n\n# Package meta-data.\nNAME = 'hitherdither'\nDESCRIPTION = 'Dithering algorithms for arbitrary palettes in PIL'\nURL = 'https://github.com/hbldh/hitherdither'\nEMAIL = 'henrik.blidh@nedomkull.com'\nAUTHOR = 'Henrik Blidh'\n\n# What packages are required for this module to be executed?\nREQUIRED = [\n   'Pillow>=3.3.1',\n   'numpy>=1.9.0',\n   'pathlib2;python_version<\"3\"'\n],\n\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\nwith io.open(os.path.join(here, 'README.rst'), encoding='utf-8') as f:\n    long_description = '\\n' + f.read()\n\n# Load the package's __version__.py module as a dictionary.\nabout = {}\nwith open(os.path.join(here, NAME, '__version__.py')) as f:\n    exec(f.read(), about)\n\n\nclass UploadCommand(Command):\n    \"\"\"Support setup.py upload.\"\"\"\n\n    description = 'Build and publish the package.'\n    user_options = []\n\n    @staticmethod\n    def status(s):\n        \"\"\"Prints things in bold.\"\"\"\n        print('\\033[1m{0}\\033[0m'.format(s))\n\n    def initialize_options(self):\n        pass\n\n    def finalize_options(self):\n        pass\n\n    def run(self):\n        try:\n            self.status('Removing previous builds…')\n            rmtree(os.path.join(here, 'dist'))\n        except OSError:\n            pass\n\n        self.status('Building Source and Wheel (universal) distribution…')\n        os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))\n\n        self.status('Uploading the package to PyPi via Twine…')\n        os.system('twine upload dist/*')\n\n        sys.exit()\n\n\n# Where the magic happens:\nsetup(\n    name=NAME,\n    version=about['__version__'],\n    description=DESCRIPTION,\n    long_description=long_description,\n    author=AUTHOR,\n    author_email=EMAIL,\n    url=URL,\n    packages=find_packages(exclude=('tests',)),\n    install_requires=REQUIRED,\n    include_package_data=True,\n    license='MIT',\n    classifiers=[\n        'License :: OSI Approved :: MIT License',\n        'Programming Language :: Python',\n        'Programming Language :: Python :: 2',\n        'Programming Language :: Python :: 2.6',\n        'Programming Language :: Python :: 2.7',\n        'Programming Language :: Python :: 3',\n        'Programming Language :: Python :: 3.3',\n        'Programming Language :: Python :: 3.4',\n        'Programming Language :: Python :: 3.5',\n        'Programming Language :: Python :: 3.6',\n        'Operating System :: OS Independent',\n        'Development Status :: 4 - Beta',\n        'Intended Audience :: Developers',\n    ],\n    # $ setup.py publish support.\n    cmdclass={\n        'upload': UploadCommand,\n    },\n)\n"
  },
  {
    "path": "tests/__init__.py",
    "content": ""
  },
  {
    "path": "tests/conftest.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\ntools\n-----------\n\n:copyright: 2017-05-10 by hbldh <henrik.blidh@nedomkull.com>\n\n\"\"\"\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import absolute_import\n\nimport pytest\ntry:\n    import pathlib2 as pathlib\nexcept ImportError:\n    import pathlib\n\nfrom hitherdither.data import _image\n\n\n@pytest.fixture(scope='session')\ndef test_png():\n    p = pathlib.Path(__file__).parent.joinpath('astronaut.png')\n    url = 'https://raw.githubusercontent.com/scikit-image/scikit-image/master/skimage/data/astronaut.png'\n    i = _image(p, url)\n    return i\n\n\n@pytest.fixture(scope='session')\ndef test_jpeg():\n    p = pathlib.Path(__file__).parent.joinpath('rocket.jpg')\n    url = 'https://raw.githubusercontent.com/scikit-image/scikit-image/master/skimage/data/rocket.jpg'\n    i = _image(p, url)\n    return i\n"
  },
  {
    "path": "tests/test_bayer.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n:mod:`test_bayer`\n=======================\n\n.. moduleauthor:: hbldh <henrik.blidh@nedomkull.com>\nCreated on 2016-09-12, 13:35\n\n\"\"\"\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import absolute_import\n\nimport pytest\nimport numpy as np\n\nfrom hitherdither.ordered import bayer\n\n\n_BAYER_MATRICES = {\n    2: (1 / 5.) * np.array([\n        [1, 3],\n        [4, 2]]\n    ),\n    3: (1 / 10.) * np.array([\n        [1, 8, 4],\n        [7, 6, 3],\n        [5, 2, 9]]\n    ),\n    4: (1 / 17.) * np.array(\n        [[1, 9, 3, 11],\n         [13, 5, 15, 7],\n         [4, 12, 2, 10],\n         [16, 8, 14, 6]]\n    ),\n    8: 1 / 65. * np.array([\n        [1, 49, 13, 61, 4, 52, 16, 64],\n        [33, 17, 45, 29, 36, 20, 48, 32],\n        [9, 57, 5, 53, 12, 60, 8, 56],\n        [41, 25, 37, 21, 44, 28, 40, 24],\n        [3, 51, 15, 63, 2, 50, 14, 62],\n        [35, 19, 47, 31, 34, 18, 46, 30],\n        [11, 59, 7, 55, 10, 58, 6, 54],\n        [43, 27, 39, 23, 42, 26, 38, 22]]\n    ).T\n}\n\n@pytest.mark.parametrize(\"order\", [2,4,8])\ndef test_bayer(order):\n    np.testing.assert_allclose(bayer.B(order, False), _BAYER_MATRICES.get(order))\n\n\n"
  },
  {
    "path": "tests/test_palette.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n:mod:`test_palette`\n=======================\n\n.. moduleauthor:: hbldh <henrik.blidh@nedomkull.com>\nCreated on 2016-09-13, 09:38\n\n\"\"\"\n\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import absolute_import\n\nimport pytest\nimport numpy as np\n\nfrom hitherdither import palette\nfrom hitherdither.exceptions import PaletteCouldNotBeCreatedError\nfrom hitherdither.data import scene, scene_bayer0, scene_undithered\n\n\n@pytest.mark.parametrize(\n    \"hex_colour, rgb_colour\",\n    (\n        (\"#ffffff\", (255, 255, 255)),\n        (\"#abcdef\", (171, 205, 239)),\n        (\"#012345\", (1, 35, 69)),\n        (0x82F698, (130, 246, 152)),\n        (\"0x82f698\", (130, 246, 152)),\n    ),\n)\ndef test_hex2rgb(hex_colour, rgb_colour):\n    assert palette.hex2rgb(hex_colour) == rgb_colour\n\n\n@pytest.mark.parametrize(\n    \"hex_colour, rgb_colour\",\n    (\n        (\"#ffffff\", (255, 255, 255)),\n        (\"#abcdef\", (171, 205, 239)),\n        (\"#012345\", (1, 35, 69)),\n        (0x82F698, (130, 246, 152)),\n        (\"0x82f698\", (130, 246, 152)),\n    ),\n)\ndef test_rgb2hex(hex_colour, rgb_colour):\n    try:\n        if isinstance(hex_colour, int):\n            hc = hex_colour\n        else:\n            hc = int(hex_colour, 16)\n    except:\n        hc = int(hex_colour[1:], 16)\n    assert palette.rgb2hex(*rgb_colour) == hc\n\n\n@pytest.mark.parametrize(\n    \"input_data, n_colours\",\n    (\n        (\n            [\n                np.array((255, 255, 255)),\n                np.array((171, 205, 239)),\n                np.array((1, 35, 69)),\n                np.array((130, 246, 152)),\n            ],\n            4,\n        ),\n        ([(255, 255, 255), (171, 205, 239), (1, 35, 69), (130, 246, 152)], 4),\n        (\n            np.array(\n                [\n                    (255, 255, 255),\n                    (171, 205, 239),\n                    (1, 35, 69),\n                    (130, 246, 152),\n                    (0, 0, 0),\n                ]\n            ),\n            5,\n        ),\n        (\n            [\n                \"#ff21ee\",\n                \"#123456\",\n                \"#abcdef\",\n                \"#000000\",\n            ],\n            4,\n        ),\n        (\n            [\n                0xFF21EE,\n                0x123456,\n                0xABCDEF,\n                0x000000,\n            ],\n            4,\n        ),\n    ),\n)\ndef test_create(input_data, n_colours):\n    p = palette.Palette(input_data)\n    if isinstance(n_colours, tuple):\n        # JPEG gets 80 colours in Python 2.7.9 and 3.4,\n        # 82 in Python 2.7.12 and 3.5, 3.6...\n        assert len(p) in n_colours\n        assert len([c for c in p]) in n_colours\n    else:\n        assert len(p) == n_colours\n        assert len([c for c in p]) == n_colours\n\n\ndef test_create_png(test_png):\n    n_colours = 104\n    p = palette.Palette(test_png.convert(\"P\"))\n    if isinstance(n_colours, tuple):\n        # JPEG gets 80 colours in Python 2.7.9 and 3.4,\n        # 82 in Python 2.7.12 and 3.5, 3.6...\n        assert len(p) in n_colours\n        assert len([c for c in p]) in n_colours\n    else:\n        assert len(p) == n_colours\n        assert len([c for c in p]) == n_colours\n\n\ndef test_create_jpg(test_jpeg):\n    n_colours = (80, 82)\n    p = palette.Palette(test_jpeg.convert(\"P\"))\n    if isinstance(n_colours, tuple):\n        # JPEG gets 80 colours in Python 2.7.9 and 3.4,\n        # 82 in Python 2.7.12 and 3.5, 3.6...\n        assert len(p) in n_colours\n        assert len([c for c in p]) in n_colours\n    else:\n        assert len(p) == n_colours\n        assert len([c for c in p]) == n_colours\n\n\ndef test_create_bayer0():\n    n_colours = 16\n    p = palette.Palette(scene_bayer0())\n    if isinstance(n_colours, tuple):\n        # JPEG gets 80 colours in Python 2.7.9 and 3.4,\n        # 82 in Python 2.7.12 and 3.5, 3.6...\n        assert len(p) in n_colours\n        assert len([c for c in p]) in n_colours\n    else:\n        assert len(p) == n_colours\n        assert len([c for c in p]) == n_colours\n\n\ndef test_create_bayer0():\n    n_colours = 16\n    p = palette.Palette(scene_undithered())\n    if isinstance(n_colours, tuple):\n        # JPEG gets 80 colours in Python 2.7.9 and 3.4,\n        # 82 in Python 2.7.12 and 3.5, 3.6...\n        assert len(p) in n_colours\n        assert len([c for c in p]) in n_colours\n    else:\n        assert len(p) == n_colours\n        assert len([c for c in p]) == n_colours\n\n\ndef test_create_fails_1(test_png):\n    with pytest.raises(PaletteCouldNotBeCreatedError):\n        p = palette.Palette(test_png)\n\n\ndef test_create_fails_2(test_jpeg):\n    with pytest.raises(PaletteCouldNotBeCreatedError):\n        p = palette.Palette(test_jpeg)\n\n\ndef test_create_fails_3(test_jpeg):\n    with pytest.raises(PaletteCouldNotBeCreatedError):\n        p = palette.Palette(test_jpeg.convert(\"L\"))\n\n\ndef test_create_fails_4(test_jpeg):\n    with pytest.raises(PaletteCouldNotBeCreatedError):\n        p = palette.Palette(scene())\n"
  }
]