Repository: BradenM/micropy-cli
Branch: master
Commit: f361a0465c15
Files: 165
Total size: 418.7 KB
Directory structure:
gitextract_7ldyguvi/
├── .chglog/
│ ├── CHANGELOG.tpl.md
│ └── config.yml
├── .editorconfig
├── .git-blame-ignore-revs
├── .github/
│ ├── ISSUE_TEMPLATE/
│ │ ├── bug_report.md
│ │ ├── config.yml
│ │ ├── feature_request.md
│ │ └── question.md
│ ├── actions/
│ │ └── setup-micropy/
│ │ └── action.yml
│ ├── codeql/
│ │ └── codeql-config.yml
│ ├── renovate.json5
│ └── workflows/
│ ├── changelog.yml
│ ├── codeql-analysis.yml
│ ├── main.yml
│ ├── publish.yml
│ └── release.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .prettierignore
├── .readthedocs.yml
├── .release-please-manifest.json
├── .tool-versions
├── CHANGELOG.md
├── LICENSE
├── Makefile
├── README.md
├── docs/
│ ├── Makefile
│ ├── _autosummary/
│ │ ├── micropy.config.config_source.rst
│ │ ├── micropy.config.rst
│ │ ├── micropy.exceptions.rst
│ │ ├── micropy.main.rst
│ │ ├── micropy.packages.rst
│ │ ├── micropy.project.modules.rst
│ │ ├── micropy.project.rst
│ │ ├── micropy.rst
│ │ ├── micropy.stubs.rst
│ │ ├── micropy.stubs.source.rst
│ │ └── micropy.utils.rst
│ ├── base.md
│ ├── cli.rst
│ ├── conf.py
│ ├── header.rst
│ ├── index.rst
│ └── modules.rst
├── micropy/
│ ├── __init__.py
│ ├── __main__.py
│ ├── app/
│ │ ├── __init__.py
│ │ ├── main.py
│ │ └── stubs.py
│ ├── config/
│ │ ├── __init__.py
│ │ ├── config.py
│ │ ├── config_dict.py
│ │ ├── config_json.py
│ │ └── config_source.py
│ ├── data/
│ │ ├── __init__.py
│ │ ├── schemas/
│ │ │ ├── firmware.json
│ │ │ └── stubs.json
│ │ └── sources.json
│ ├── exceptions.py
│ ├── logger.py
│ ├── main.py
│ ├── packages/
│ │ ├── __init__.py
│ │ ├── package.py
│ │ ├── source.py
│ │ ├── source_package.py
│ │ └── source_path.py
│ ├── project/
│ │ ├── __init__.py
│ │ ├── checks.py
│ │ ├── modules/
│ │ │ ├── __init__.py
│ │ │ ├── modules.py
│ │ │ ├── packages.py
│ │ │ ├── stubs.py
│ │ │ └── templates.py
│ │ ├── project.py
│ │ ├── template/
│ │ │ ├── .gitignore
│ │ │ ├── .pylintrc
│ │ │ ├── .vscode/
│ │ │ │ ├── extensions.json
│ │ │ │ └── settings.json
│ │ │ ├── pymakr.conf
│ │ │ └── src/
│ │ │ ├── boot.py
│ │ │ └── main.py
│ │ └── template.py
│ ├── py.typed
│ ├── pyd/
│ │ ├── __init__.py
│ │ ├── abc.py
│ │ ├── backend_rshell.py
│ │ ├── backend_upydevice.py
│ │ ├── consumers.py
│ │ └── pydevice.py
│ ├── stubs/
│ │ ├── __init__.py
│ │ ├── manifest.py
│ │ ├── package.py
│ │ ├── repo.py
│ │ ├── repo_package.py
│ │ ├── repositories/
│ │ │ ├── __init__.py
│ │ │ ├── micropy.py
│ │ │ └── micropython.py
│ │ ├── repository_info.py
│ │ ├── source.py
│ │ └── stubs.py
│ └── utils/
│ ├── __init__.py
│ ├── _compat.py
│ ├── decorators.py
│ ├── helpers.py
│ ├── stub.py
│ ├── types.py
│ └── validate.py
├── pyproject.toml
├── release-please-config.json
├── scripts/
│ └── export-docs-reqs.sh
└── tests/
├── __init__.py
├── app/
│ ├── conftest.py
│ ├── test_main.py
│ └── test_stubs.py
├── conftest.py
├── data/
│ ├── esp32_test_stub/
│ │ ├── frozen/
│ │ │ ├── ntptime.py
│ │ │ └── ntptime.pyi
│ │ ├── info.json
│ │ └── stubs/
│ │ ├── machine.py
│ │ └── modules.json
│ ├── esp8266_invalid_stub/
│ │ └── info.json
│ ├── esp8266_test_stub/
│ │ ├── frozen/
│ │ │ ├── ntptime.py
│ │ │ └── ntptime.pyi
│ │ ├── info.json
│ │ └── stubs/
│ │ ├── machine.py
│ │ └── modules.json
│ ├── fware_test_stub/
│ │ ├── frozen/
│ │ │ ├── utarfile.py
│ │ │ ├── utarfile.pyi
│ │ │ ├── utokenize.py
│ │ │ └── utokenize.pyi
│ │ └── info.json
│ ├── project_test/
│ │ ├── .pylintrc
│ │ ├── .vscode/
│ │ │ └── settings.json
│ │ └── micropy.json
│ ├── stubber_test_stub/
│ │ ├── micropython.py
│ │ └── modules.json
│ ├── test_repo.json
│ ├── test_source.xml
│ └── test_sources.json
├── test_checks.py
├── test_config.py
├── test_highlevel.py
├── test_main.py
├── test_packages.py
├── test_project.py
├── test_pyd.py
├── test_stub_source.py
├── test_stubs/
│ ├── bad_test_stub/
│ │ └── modules.json
│ ├── esp32_test_stub/
│ │ ├── frozen/
│ │ │ ├── ntptime.py
│ │ │ └── ntptime.pyi
│ │ ├── info.json
│ │ └── stubs/
│ │ ├── machine.py
│ │ └── modules.json
│ └── esp8266_test_stub/
│ ├── frozen/
│ │ ├── ntptime.py
│ │ └── ntptime.pyi
│ ├── info.json
│ └── stubs/
│ ├── machine.py
│ └── modules.json
├── test_stubs.py
├── test_stubs_repo.py
├── test_template.py
├── test_utils/
│ ├── fail.json
│ ├── pass.json
│ └── schema.json
└── test_utils.py
================================================
FILE CONTENTS
================================================
================================================
FILE: .chglog/CHANGELOG.tpl.md
================================================
{{ if .Versions -}}
## [Unreleased]
{{ if .Unreleased.CommitGroups -}}
{{ range .Unreleased.CommitGroups -}}
### {{ .Title }}
{{ range .Commits -}}
- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }}
{{ end }}
{{ end -}}
{{ end -}}
{{ end -}}
{{ range .Versions }}
## {{ if .Tag.Previous }}[{{ .Tag.Name }}]{{ else }}{{ .Tag.Name }}{{ end }} - {{ datetime "2006-01-02" .Tag.Date }}
{{ range .CommitGroups -}}
### {{ .Title }}
{{ range .Commits -}}
- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }}
{{ end }}
{{ end -}}
{{- if .RevertCommits -}}
### Reverts
{{ range .RevertCommits -}}
- {{ .Revert.Header }}
{{ end }}
{{ end -}}
{{- if .MergeCommits -}}
### Pull Requests
{{ range .MergeCommits -}}
- {{ .Header }}
{{ end }}
{{ end -}}
{{- if .NoteGroups -}}
{{ range .NoteGroups -}}
### {{ .Title }}
{{ range .Notes }}
{{ .Body }}
{{ end }}
{{ end -}}
{{ end -}}
{{ end -}}
{{- if .Versions }}
[Unreleased]: {{ .Info.RepositoryURL }}/compare/{{ $latest := index .Versions 0 }}{{ $latest.Tag.Name }}...HEAD
{{ range .Versions -}}
{{ if .Tag.Previous -}}
[{{ .Tag.Name }}]: {{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }}
{{ end -}}
{{ end -}}
{{ end -}}
================================================
FILE: .chglog/config.yml
================================================
style: github
template: CHANGELOG.tpl.md
info:
title: CHANGELOG
repository_url: https://github.com/BradenM/micropy-cli
options:
commits:
filters:
Type:
- feat
- fix
- perf
- refactor
commit_groups:
title_maps:
feat: Features
fix: Bug Fixes
perf: Performance Improvements
refactor: Code Refactoring
header:
pattern: "^(\\w*)(?:\\(([\\w\\$\\.\\-\\*\\s]*)\\))?\\:\\s(.*)$"
pattern_maps:
- Type
- Scope
- Subject
issues:
prefix:
- #
refs:
actions:
- Closes
- Fixes
merges:
pattern: "^Merge branch '(\\w+)'$"
pattern_maps:
- Source
reverts:
pattern: "^Revert \"([\\s\\S]*)\"$"
pattern_maps:
- Header
notes:
keywords:
- BREAKING CHANGE
================================================
FILE: .editorconfig
================================================
# http://editorconfig.org
root = true
[*]
indent_style = space
indent_size = 4
trim_trailing_whitespace = true
insert_final_newline = true
charset = utf-8
end_of_line = lf
[LICENSE]
insert_final_newline = false
[Makefile]
indent_style = tab
[*.py]
profile = black
[*.y{,a}ml]
indent_size = 2
================================================
FILE: .git-blame-ignore-revs
================================================
# Format all files with pre-commit->black, isort, autoflake and other hooks
80755a39f3fbd9983a07ff4571197b8f6dcae1cb
# style(pyupgrade): format all files
402f8e68ee9c211e1f0238b8531b3ee0a9ce6806
# style: format all with new pre-commit rules.
0d679f27331c2ba851541c5d0ab34fe04923e660
================================================
FILE: .github/ISSUE_TEMPLATE/bug_report.md
================================================
---
name: Bug report
about: Create a report to help us improve Micropy Cli
title: ''
labels: bug
assignees: ''
---
**Describe the bug**
**Expected Behavior**
**Current Behavior**
**Steps to Reproduce**
1.
2.
3.
**Possible Solution**
**Logs**
**Context (Environment)**
* OS:
* Micropy Version:
* Python Version:
* VSCode Version:
================================================
FILE: .github/ISSUE_TEMPLATE/config.yml
================================================
blank_issues_enabled: true
contact_links:
- name: Micropy Stubs
url: https://github.com/BradenM/micropy-stubs
about: Module missing or can't find your device? Make an issue on micropy-stubs.
================================================
FILE: .github/ISSUE_TEMPLATE/feature_request.md
================================================
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
**Describe the solution you'd like**
**Describe alternatives you've considered**
**Additional context**
================================================
FILE: .github/ISSUE_TEMPLATE/question.md
================================================
---
name: Question
about: Got a question? Ask it!
title: ''
labels: question
assignees: ''
---
================================================
FILE: .github/actions/setup-micropy/action.yml
================================================
name: "Setup Micropy"
description: "Setup micropy CI env."
inputs:
poetry-version:
description: Poetry version to use.
required: true
poetry-install-url:
description: Poetry install url to use.
required: false
default: "https://install.python-poetry.org"
poetry-home:
description: Path to use as POETRY_HOME
required: false
default: "/tmp/opt/poetry"
python-version:
description: Python version to use.
required: true
runner:
description: Explicit runner cache to use.
required: false
default: ""
runs:
using: composite
steps:
# See: https://github.com/actions/cache/blob/main/workarounds.md#improving-cache-restore-performance-on-windowsusing-cross-os-caching
- name: Use GNU tar
if: runner.os == 'Windows'
shell: cmd
run: |
echo "Adding GNU tar to PATH"
echo C:\Program Files\Git\usr\bin>>"%GITHUB_PATH%"
- name: Setup Poetry env.
shell: bash
run: |
echo 'POETRY_HOME=${{ inputs.poetry-home }}' >> $GITHUB_ENV
echo 'POETRY_VIRTUALENVS_IN_PROJECT=true' >> $GITHUB_ENV
echo 'POETRY_NO_INTERACTION=true' >> $GITHUB_ENV
echo 'POETRY_VERSION=${{ inputs.poetry-version }}' >> $GITHUB_ENV
echo "${{ inputs.poetry-home }}/bin" >> $GITHUB_PATH
echo "$HOME/.local/bin" >> $GITHUB_PATH
- name: Setup Win Path
if: runner.os == 'Windows'
shell: pwsh
run: |
echo "C:\Users\runneradmin\AppData\Local\Temp\opt\poetry\bin" | Out-File -FilePath $env:GITHUB_PATH -Append
echo "C:\Users\runneradmin\AppData\Local\Temp\opt\poetry\venv\Scripts" | Out-File -FilePath $env:GITHUB_PATH -Append
- name: Workaround Poetry v1.4.0 Windows issues.
if: runner.os == 'Windows'
shell: bash
run: |
# have not looked into why this occurs.
# just disable new installer for windows.
echo 'POETRY_INSTALLER_MODERN_INSTALLATION=false' >> $GITHUB_ENV
- name: Cache poetry install.
uses: actions/cache@v3
id: poetry-install-cache
with:
path: ${{ inputs.poetry-home }}/install-poetry.py
key: poetry-install-${{ inputs.runner || matrix.os || runner.os }}-${{ inputs.poetry-version }}
- name: Fetch Poetry Installer
shell: bash
if: steps.poetry-install-cache.outputs.cache-hit != 'true'
run: |
mkdir -p "${{ inputs.poetry-home }}"
curl -sSL -o ${{ inputs.poetry-home }}/install-poetry.py ${{ inputs.poetry-install-url }}
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v4
id: python-setup
with:
python-version: ${{ inputs.python-version }}
- name: Install Poetry
shell: bash
run: |
python ${{ inputs.poetry-home }}/install-poetry.py --version ${{ inputs.poetry-version }}
${{ inputs.poetry-home }}/bin/poetry --version
- name: Get poetry cache dir.
id: poetry-config
shell: bash
run: |
POETRY="${{ inputs.poetry-home }}/bin/poetry"
CACHE_DIR="$($POETRY config cache-dir)"
echo "Poetry cache: $CACHE_DIR"
echo "cache-dir=$CACHE_DIR" >> $GITHUB_OUTPUT
- name: Cache poetry cache.
uses: actions/cache@v3
with:
path: ${{ steps.poetry-config.outputs.cache-dir }}
key: poetry-cache-${{ inputs.runner || matrix.os || runner.os }}-${{ inputs.python-version }}-${{ inputs.poetry-version }}
- name: Cache virtual env.
uses: actions/cache@v3
id: venv-cache
with:
path: .venv
key: poetry-venv-${{ inputs.runner || matrix.os || runner.os }}-${{ inputs.python-version }}-${{ inputs.poetry-version }}-${{ hashFiles('poetry.lock') }}
restore-keys: |
poetry-venv-${{ inputs.runner || matrix.os || runner.os }}-${{ inputs.python-version }}-${{ inputs.poetry-version }}-
- name: Install dependencies.
shell: bash
if: steps.venv-cache.outputs.cache-hit != 'true'
run: |
${{ inputs.poetry-home }}/bin/poetry install --with docs --with test -v
${{ inputs.poetry-home }}/bin/poetry env info
================================================
FILE: .github/codeql/codeql-config.yml
================================================
# do not scan entire .venv
# security alters stemming from dependencies are handled elsewhere (dependabot alerts+github).
paths-ignore:
- .venv
- test
================================================
FILE: .github/renovate.json5
================================================
{
$schema: "https://docs.renovatebot.com/renovate-schema.json",
extends: [
"config:base",
":rebaseStalePrs",
":prConcurrentLimit20",
":prHourlyLimitNone",
":pinDependencies",
":automergeMinor",
":automergeDigest",
],
addLabels: ["dependencies"],
major: {
automerge: false,
},
ignorePaths: ["docs/requirements.txt"],
dependencyDashboard: true,
packageRules: [
{
matchDepTypes: ["devDependencies"],
matchUpdateTypes: ["minor", "patch"],
automerge: true,
groupName: "devDependencies (non-major)"
},
{
matchDepTypes: ["devDependencies"],
matchUpdateTypes: ["major"],
automerge: true
}
],
}
================================================
FILE: .github/workflows/changelog.yml
================================================
name: Changelog
on:
push:
branches:
- master
jobs:
changelog:
name: Generate Changelog
runs-on: ubuntu-latest
continue-on-error: true
if: github.actor != 'github-actions[bot]' || github.event.pusher.email != 'github-actions[bot]@users.noreply.github.com'
steps:
- uses: actions/checkout@v3
with:
token: ${{ secrets.GH_PAT }}
fetch-depth: 0
- name: Setup go
uses: actions/setup-go@v3
- name: Generate Changelog
run: go run github.com/git-chglog/git-chglog/cmd/git-chglog@latest --output CHANGELOG.md
- name: Commit Changes
run: |
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git commit -m "chore(chglog): update changelog." -a || true
- name: Push Changes
uses: ad-m/github-push-action@77c5b412c50b723d2a4fbc6d71fb5723bcd439aa
with:
github_token: ${{ secrets.GH_PAT }}
branch: ${{ github.ref }}
================================================
FILE: .github/workflows/codeql-analysis.yml
================================================
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [master]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
schedule:
- cron: "24 9 * * 4"
env:
POETRY_VERSION: 1.8.3
PYTHON_VERSION: 3.11
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ["python"]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://git.io/codeql-language-support
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Setup environment.
uses: ./.github/actions/setup-micropy
with:
poetry-version: ${{ env.POETRY_VERSION }}
python-version: ${{ env.PYTHON_VERSION }}
runner: ubuntu-latest
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
env:
CODEQL_PYTHON: ./.venv/bin/python
with:
languages: ${{ matrix.language }}
setup-python-dependencies: false
config-file: ./.github/codeql/codeql-config.yml
source-root: micropy
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
- name: Perform CodeQL Analysis
env:
CODEQL_PYTHON: ./.venv/bin/python
uses: github/codeql-action/analyze@v2
================================================
FILE: .github/workflows/main.yml
================================================
name: Test MicropyCli
on:
pull_request: ~
push:
branches:
- master
env:
POETRY_VERSION: 1.8.3
concurrency:
group: main-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
test:
name: ${{ matrix.os }} @ Py v${{ matrix.python }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
# explicitly use macOS-12 to avoid
# macOS-11 cached wheels failing to install on a 12 runner.
# this is due to an active transition by github.
# see: https://github.blog/changelog/2022-10-03-github-actions-jobs-running-on-macos-latest-are-now-running-on-macos-12/
os: [windows-latest, macOS-12, ubuntu-latest]
python: ["3.9", "3.10", "3.11"]
steps:
- uses: actions/checkout@v3
with:
submodules: true
- name: Setup environment.
uses: ./.github/actions/setup-micropy
with:
poetry-version: ${{ env.POETRY_VERSION }}
python-version: ${{ matrix.python }}
- name: Run Tests
run: poetry run pytest --cov --cov-config=pyproject.toml --junit-xml=test_log.xml --cov-report=xml:cov.xml -vv -ra -n'auto'
- name: Upload Codecov
uses: codecov/codecov-action@v3
env:
OS: ${{ matrix.os }}
PYTHON: ${{ matrix.python }}
with:
files: ./cov.xml
fail_ci_if_error: false
flags: unittests,py-${{ matrix.python }},os-${{ matrix.os }}
env_vars: OS,PYTHON
================================================
FILE: .github/workflows/publish.yml
================================================
name: Publish Release
on:
release:
types:
- created
env:
POETRY_VERSION: 1.8.3
PYTHON_VERSION: 3.11
jobs:
publish:
name: Publish
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: true
- name: Setup environment.
uses: ./.github/actions/setup-micropy
with:
poetry-version: ${{ env.POETRY_VERSION }}
python-version: ${{ env.PYTHON_VERSION }}
- name: Build
run: poetry build
- name: Publish to PyPi
env:
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }}
run: poetry publish
================================================
FILE: .github/workflows/release.yml
================================================
name: Release
on:
workflow_dispatch:
push:
branches:
- master
jobs:
release-please:
name: Release Please
runs-on: ubuntu-latest
steps:
- name: Release Please
id: release-please
# see: googleapis/release-please#1837
uses: BradenM/release-please-action@d0fa220390843191f01153795b2e5dce67410563
with:
token: ${{ secrets.GH_PAT }}
command: manifest
================================================
FILE: .gitignore
================================================
# Created by https://www.gitignore.io/api/python
# Edit at https://www.gitignore.io/?templates=python
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
cov.xml
test_log.xml
.testmondata
.tmontmp/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don’t work, or not
# install all needed dependencies.
Pipfile.lock
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
.envrc
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# End of https://www.gitignore.io/api/python
.vscode
!.vscode/tasks.json
!micropy/project/template/**/*
!micropy/lib
!tests/data/project_test/**/*
# Created by https://www.toptal.com/developers/gitignore/api/pycharm+all
# Edit at https://www.toptal.com/developers/gitignore?templates=pycharm+all
### PyCharm+all ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### PyCharm+all Patch ###
# Ignores the whole .idea folder and all .iml files
# See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360
.idea/
# Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023
*.iml
modules.xml
.idea/misc.xml
*.ipr
# Sonarlint plugin
.idea/sonarlint
# End of https://www.toptal.com/developers/gitignore/api/pycharm+all
temp/*
# Created by https://www.toptal.com/developers/gitignore/api/direnv
# Edit at https://www.toptal.com/developers/gitignore?templates=direnv
### direnv ###
.direnv
.envrc
# End of https://www.toptal.com/developers/gitignore/api/direnv
================================================
FILE: .pre-commit-config.yaml
================================================
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
exclude: |
(?x)^(
/(
\.eggs
| \.git
| \.hg
| \.mypy_cache
| \.tox
| \.venv
| _build
| build
| dist
| micropy/lib
)/
| foo.py
)$
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: debug-statements
- id: detect-private-key
- id: end-of-file-fixer
- id: check-executables-have-shebangs
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: "v0.0.255"
hooks:
- id: ruff
args:
- --fix
- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort
- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
- id: black
- repo: https://github.com/python-poetry/poetry
rev: 1.4.1
hooks:
- id: poetry-check
files: "^(pyproject.toml|poetry.lock)$"
ci:
autofix_commit_msg: "ci(pre-commit.ci): 🎨 Auto format from pre-commit.com hooks"
autoupdate_commit_msg: "ci(pre-commit.ci): ⬆ pre-commit autoupdate"
================================================
FILE: .prettierignore
================================================
micropy/template/**
================================================
FILE: .readthedocs.yml
================================================
# .readthedocs.yml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/conf.py
# Fetch Submodules
submodules:
include: all
# Optionally build your docs in additional formats such as PDF and ePub
formats: all
build:
os: ubuntu-22.04
tools:
python: "3.11"
jobs:
# see: https://docs.readthedocs.io/en/stable/build-customization.html#install-dependencies-with-poetry
post_create_environment:
- pip install poetry==1.4.1
- poetry config virtualenvs.create false
post_install:
- poetry install --with docs
================================================
FILE: .release-please-manifest.json
================================================
{
".": "4.2.2"
}
================================================
FILE: .tool-versions
================================================
python 3.11.5
poetry 1.8.3
git-chglog 0.15.2
================================================
FILE: CHANGELOG.md
================================================
## [4.2.2](https://github.com/BradenM/micropy-cli/compare/v4.2.1...v4.2.2) (2023-06-14)
### Bug Fixes
* **cli:** Re-add version as command ([6632c5b](https://github.com/BradenM/micropy-cli/commit/6632c5bbe82f5ebf6f306c5fbe500213d208a0fb))
* **deps:** Update dependency libcst to v0.4.10 ([5b33886](https://github.com/BradenM/micropy-cli/commit/5b33886eb6210231a7afc4be5920f7b7c92fd762))
* **deps:** Update dependency markupsafe to v2.1.3 ([2b5e7a3](https://github.com/BradenM/micropy-cli/commit/2b5e7a38f53cc7605a310187817a9a85a11b7624))
* **deps:** Update dependency pydantic to v1.10.8 ([3672fcf](https://github.com/BradenM/micropy-cli/commit/3672fcf6997647366abea9672c102dc24bc5bf77))
* **deps:** Update dependency pydantic to v1.10.9 ([66fa6e5](https://github.com/BradenM/micropy-cli/commit/66fa6e5147b29183c09dadfd4dd65e7956111596))
* **deps:** Update dependency requests to v2.31.0 ([352d3b3](https://github.com/BradenM/micropy-cli/commit/352d3b3ad09c0723c7b9572245c336ccac22afb7))
* **deps:** Update dependency typing-extensions to v4.6.0 ([4858007](https://github.com/BradenM/micropy-cli/commit/4858007c327a8a258196ecbca4fe4507da90332b))
* **deps:** Update dependency typing-extensions to v4.6.1 ([24c16f2](https://github.com/BradenM/micropy-cli/commit/24c16f2978f08a6e2ac4387354ed7e309ff2158c))
* **deps:** Update dependency typing-extensions to v4.6.2 ([98ae691](https://github.com/BradenM/micropy-cli/commit/98ae691da95de484e9dd68e0a13f79400755168c))
* **deps:** Update dependency typing-extensions to v4.6.3 ([dd993e5](https://github.com/BradenM/micropy-cli/commit/dd993e517bab238d01aa11d03bb79aac2fe50020))
## [4.2.1](https://github.com/BradenM/micropy-cli/compare/v4.2.0...v4.2.1) (2023-05-18)
### Bug Fixes
* **deps:** Update dependency attrs to v23 ([3422a84](https://github.com/BradenM/micropy-cli/commit/3422a84f576e2db7e39331512e71e3d72ed85d55))
* **deps:** Update dependency python-minifier to v2.9.0 ([2a7464c](https://github.com/BradenM/micropy-cli/commit/2a7464cf9be34dc4766083fab6570fe0c507e5d7))
* **deps:** Update dependency requests to v2.29.0 ([8361e7c](https://github.com/BradenM/micropy-cli/commit/8361e7cc15b0bdcefa07048814f30c0d4bb531b3))
* **deps:** Update dependency requests to v2.30.0 ([9e389b1](https://github.com/BradenM/micropy-cli/commit/9e389b11c171d16af26b28217993e38bbbf4fa4f))
* **deps:** Update dependency typer to v0.8.0 ([d2bf4e6](https://github.com/BradenM/micropy-cli/commit/d2bf4e69e83da62b589658ed3210742152547db6))
* **deps:** Update dependency typer to v0.9.0 ([f8f4105](https://github.com/BradenM/micropy-cli/commit/f8f4105e65370eb275114f1e30ce5c3361e61dd5))
* **stubs:** Always ensure correct pyi stub root paths. ([27f87a1](https://github.com/BradenM/micropy-cli/commit/27f87a1d697e2ee5e18d178eb46c7532b3c2fe29))
* **stubs:** Do not drop firmware name when parsing from dist metadata. ([be269ee](https://github.com/BradenM/micropy-cli/commit/be269ee5e241d9274d100c5dc1c9f28278b0d1d6))
* **template:** Resolve pylance type-checking / import errors. ([179c29d](https://github.com/BradenM/micropy-cli/commit/179c29d68bf025c1b3436d53dd3f9168d93285cf))
* **template:** Update pylint config to use `MAIN` and `INFERENCE` confidence level. ([f5f5c98](https://github.com/BradenM/micropy-cli/commit/f5f5c983cbeeb6730c85750ac78115d86135d44b))
## [4.2.0](https://github.com/BradenM/micropy-cli/compare/v4.2.0-beta.3...v4.2.0) (2023-04-22)
### Features
* **app:** Add exclude_defaults, improve help in stubs create command. ([efa3263](https://github.com/BradenM/micropy-cli/commit/efa3263121704e56d6e4dc0aa4975228b4731184))
### Documentation
* **app:** Improve stubs create help. ([9738ac7](https://github.com/BradenM/micropy-cli/commit/9738ac71256e8375ee190f3114ef5cb7e46c3506))
### Miscellaneous Chores
* **release:** Release as 4.2.0 ([817c583](https://github.com/BradenM/micropy-cli/commit/817c583bc460e642ddad0b383d88483945ee234b))
## [4.2.0-beta.3](https://github.com/BradenM/micropy-cli/compare/v4.2.0-beta.2...v4.2.0-beta.3) (2023-04-17)
### Features
* **app:** Expose compile, module-defaults flags, integrity in stubs create command. ([885e9d3](https://github.com/BradenM/micropy-cli/commit/885e9d369cad98345e6132315f6fb76694379339))
* **exc:** `PyDeviceFileIntegrityError` exception. ([a2187c8](https://github.com/BradenM/micropy-cli/commit/a2187c898c9aeb641a63a9237d2469faaf929de5))
* **pyd:** `NoOpConsumer` implementation. ([5a21ecd](https://github.com/BradenM/micropy-cli/commit/5a21ecd1e85f74a5976aac209457d8273d8596cb))
* **pydevice:** File integrity support, simple run in pydevice. ([8b7a255](https://github.com/BradenM/micropy-cli/commit/8b7a2557d42cb70430430ad04280dffebd45aeb6))
* **pyd:** File integrity check support in upydevice backend. ([a3cbf31](https://github.com/BradenM/micropy-cli/commit/a3cbf31f8ff5fd5f0da24661a911f030672d93b7))
* **pyd:** Implement `remove` across backends. ([f0bba8d](https://github.com/BradenM/micropy-cli/commit/f0bba8d25cd7eb6bdb2567bfde97c22a02088c16))
* **pyd:** Use noop consumer default for upy `eval`/`eval_script` ([720ace4](https://github.com/BradenM/micropy-cli/commit/720ace4355d5e7a794e6d204a6bccfe0249d5f5d))
* **utils:** Support compiling createstubs with mpy-cross ([1819197](https://github.com/BradenM/micropy-cli/commit/181919714c6d5d9a5a382e673bd81334164cbb53))
### Bug Fixes
* **deps:** Pin dependency typer to 0.7.0 ([30ab97f](https://github.com/BradenM/micropy-cli/commit/30ab97fc450ae5af9f58cf3e5770ce609cba9745))
* **pyd:** Remove usedforsecurity hash flag for py3.8 ([522a0c3](https://github.com/BradenM/micropy-cli/commit/522a0c3537634950178178628d097944ce554cb3))
* **pyd:** Support pushing binary files to pydevice in upydevice backend. ([e58c1f2](https://github.com/BradenM/micropy-cli/commit/e58c1f2f90f6e901298dfe54d0b5234818107140))
### Miscellaneous Chores
* **release:** Set release to v4.2.0-beta3 ([9ee81a2](https://github.com/BradenM/micropy-cli/commit/9ee81a2f4f63de78716b73b2bc4224a50e58a2c4))
## [4.2.0-beta.2](https://github.com/BradenM/micropy-cli/compare/v4.2.0-beta.1...v4.2.0-beta.2) (2023-03-27)
### Bug Fixes
* **deps:** Use latest stable typer. ([ec3082e](https://github.com/BradenM/micropy-cli/commit/ec3082ed541b724743f8bc7b5bb19ecce9c9e2bc))
## [4.2.0-beta.1](https://github.com/BradenM/micropy-cli/compare/v4.2.0-beta...v4.2.0-beta.1) (2023-03-27)
### Features
* **app:** Extract main cli logic out to command/option callbacks. ([e3f6401](https://github.com/BradenM/micropy-cli/commit/e3f6401f99faebaec13946b371b966d22edd681b)), closes [#338](https://github.com/BradenM/micropy-cli/issues/338)
* **app:** Implement main app cli entries with typer. ([d2b22a5](https://github.com/BradenM/micropy-cli/commit/d2b22a567a0e0e084378f0279aa23f5802fa8bc6))
* **app:** Implement stubs subcommand with typer. ([7733f24](https://github.com/BradenM/micropy-cli/commit/7733f243c62065500be46b5995819076c41fae32))
* **app:** Link to Josverl/micropython-stubber in create help ([88fb8dd](https://github.com/BradenM/micropy-cli/commit/88fb8dd8a90d120483ae008e1d46e553293032c6))
* **cli:** Remove old cli module. ([8e14fa6](https://github.com/BradenM/micropy-cli/commit/8e14fa691158a4c537cd89d39db1b92cd208bc42))
* **deps:** Add rich, typer, shellingham ([62f7c72](https://github.com/BradenM/micropy-cli/commit/62f7c729442399038b326628fa5c96c26b0e84a3))
* **deps:** Remove pytest-clarity ([d712580](https://github.com/BradenM/micropy-cli/commit/d71258076ed85343a5a145418e5dab11f446b85e))
* **dev:** Add and utilize external mock with pytest-mock ([79b3d96](https://github.com/BradenM/micropy-cli/commit/79b3d965f46a4ffd1e9bb972267f80ae77b68fda))
* **dev:** Add pdbpp to dev deps. ([239cb3e](https://github.com/BradenM/micropy-cli/commit/239cb3ebc690fd83583d4126cecae7268aea7c4e))
* **main:** Allow override of primary data directories. ([1689bfd](https://github.com/BradenM/micropy-cli/commit/1689bfdca7c8dcdb3b4cf137978c51a2ee33506e))
* **main:** Remove stub creation logic from micropy main state. ([1f8d9ba](https://github.com/BradenM/micropy-cli/commit/1f8d9baf32e742757f8bef348e90b289317cf9b4))
* **utils:** Remove import catch for stubber. ([91103e6](https://github.com/BradenM/micropy-cli/commit/91103e6e790954648ccf6d7f50d5049bfc477bfa))
### Bug Fixes
* **app:** Use future annotations. ([450c27d](https://github.com/BradenM/micropy-cli/commit/450c27d9f1b6748464715e888144eca9d0ac2c62))
* **compat:** Typer list type errors on py3.8 ([3483302](https://github.com/BradenM/micropy-cli/commit/34833023e9c80e74b02cea1d51447a6b203064b7))
* **compat:** Use typing.Type in app.stubs ([00858c2](https://github.com/BradenM/micropy-cli/commit/00858c292d4a098b4414138842a97761384cbaef))
* **deps:** Exclude pdbpp on windows ([445455e](https://github.com/BradenM/micropy-cli/commit/445455e07917cc1b7f647bc7cb89a65050a2ade9))
* **deps:** Update dependency pydantic to v1.10.7 ([36e4727](https://github.com/BradenM/micropy-cli/commit/36e47271a1da50a9c1a35646b0f82011781bf721))
### Miscellaneous Chores
* **release:** Set release to v4.2.0-beta.1 ([614d3aa](https://github.com/BradenM/micropy-cli/commit/614d3aac359373d929b113874d7d73976a2cbb3f))
## [4.2.0-beta](https://github.com/BradenM/micropy-cli/compare/v4.1.0...v4.2.0-beta) (2023-03-20)
### Features
* **cli:** Expose backend option to select pydevice backend. ([43f3751](https://github.com/BradenM/micropy-cli/commit/43f3751620acd6fcb49d0e4cc63d9afd4b998857))
* **deps:** Add libcst as dependency, remove py38 constraint from ([9bdb811](https://github.com/BradenM/micropy-cli/commit/9bdb811a2be6c2258f9e3619b9a9265565fe5ab7))
* **deps:** Add lint dependency group, remove unused/replaced with ([f820baa](https://github.com/BradenM/micropy-cli/commit/f820baae961b41d3386f1823565709be67436df0))
* **deps:** Add micropython-stubber as proper library. ([d82f5fc](https://github.com/BradenM/micropy-cli/commit/d82f5fcc49f3adc3c25a1eea010091f48ebbe056))
* **dx:** Replace pyupgrade/autoflake hooks with ruff ([3c47ebd](https://github.com/BradenM/micropy-cli/commit/3c47ebdee44547c2f3370fb18f6b6ef0ba5b7617))
* **lib:** Remove old micropython-stubber submodule. ([ea1ee8c](https://github.com/BradenM/micropy-cli/commit/ea1ee8cdebc8ed2e7be593412e069fd1c1a2fd39))
* **main:** Support create stubs backend parameter, utilize create stub variant. ([5cb26c3](https://github.com/BradenM/micropy-cli/commit/5cb26c38e06606b84f515ac7f8f0506ddf057e8b))
* **pkg:** Drop support for python 3.7 ([87eb790](https://github.com/BradenM/micropy-cli/commit/87eb7901fe7d0ba5cd974629fadbb82524603af7))
* **utils:** Prepare create stubs with codemod variants/modules, update stubmaker imports. ([2b8de82](https://github.com/BradenM/micropy-cli/commit/2b8de8298d5220f301b792f0718e74be5429904c))
### Bug Fixes
* **deps:** Pin dependencies ([9a7f407](https://github.com/BradenM/micropy-cli/commit/9a7f407e39be33a639c48de3527d6482ece82f26))
* **deps:** Remove pypi-test sourced from pyproject ([b3fc9e3](https://github.com/BradenM/micropy-cli/commit/b3fc9e3b09002c5d5c17fe28173055cc3d3d830e))
* **deps:** Target isort <5.12.0 when on py3.7 ([f936752](https://github.com/BradenM/micropy-cli/commit/f936752a73898e9dce3ef5f3f4763384db0eab79))
* **deps:** Target pylint <2.13 when on py3.7 ([80dc833](https://github.com/BradenM/micropy-cli/commit/80dc833bb6128c033e1278ba5f3a93afe70ff2e8))
* **deps:** Update dependency boltons to v23 ([27238ba](https://github.com/BradenM/micropy-cli/commit/27238ba47d817e3286b4a1b4fa0ccc1155985e6a))
* **deps:** Update dependency mypy to v1.1.1 ([a88668c](https://github.com/BradenM/micropy-cli/commit/a88668c39019ee466a388c0ca868305687537002))
* **deps:** Update dependency pydantic to v1.10.6 ([8c600f2](https://github.com/BradenM/micropy-cli/commit/8c600f24f03ae2b1349b9c483f46c21a76de3e51))
* **deps:** Update dependency python-minifier to v2.8.1 ([24ce47d](https://github.com/BradenM/micropy-cli/commit/24ce47d875e9bc5a0c4f20bfd582a313642aac78))
### Documentation
* **cfg:** Remove requirements ([2b54413](https://github.com/BradenM/micropy-cli/commit/2b54413c2e4923aeb4ed7da2e3b78d92407b3db0))
* **cfg:** Update rtd config to setup env w/ poetry. ([82c4da0](https://github.com/BradenM/micropy-cli/commit/82c4da02955080e5202bb1630a69daadce8323f4))
### Miscellaneous Chores
* **release:** Set release v4.2.0-beta ([0e2d138](https://github.com/BradenM/micropy-cli/commit/0e2d13883f7c45c36a75ee3d51e6dd63057b0b96))
## [4.1.0](https://github.com/BradenM/micropy-cli/compare/v4.1.0-beta...v4.1.0) (2023-03-05)
### Bug Fixes
* **deps:** Update dependency cachier to v2 ([956cce8](https://github.com/BradenM/micropy-cli/commit/956cce8ca8c594659fd3e57a77c65d0d65036ff9))
* **deps:** Update dependency gitpython to v3.1.31 ([65b3e83](https://github.com/BradenM/micropy-cli/commit/65b3e83fa7136765c10f268409d002f3c784c4dc))
* **deps:** Update dependency packaging to v23 ([b81b513](https://github.com/BradenM/micropy-cli/commit/b81b513248e82bb14e7d24e9b528f37fb278942b))
* **deps:** Update dependency pydantic to v1.10.5 ([0fb9624](https://github.com/BradenM/micropy-cli/commit/0fb96244da1f42207aa09cb2d1230f0c5278a6f6))
* **deps:** Update dependency tqdm to v4.65.0 ([9fb64dd](https://github.com/BradenM/micropy-cli/commit/9fb64ddb893a48b60ba41d5b25b422a7542c9b09))
* **deps:** Update dependency typing-extensions to v4.5.0 ([e6c57c3](https://github.com/BradenM/micropy-cli/commit/e6c57c30bf7f73b887437950d72593594f6f08ac))
* **pyd:** Backend rshell excess consumer kwarg, can't union with supported py versions. ([de8da4e](https://github.com/BradenM/micropy-cli/commit/de8da4e46d3033d4a1d8c25452d975a26ed1892d))
### Miscellaneous Chores
* **release:** Update release. ([7b6d9bb](https://github.com/BradenM/micropy-cli/commit/7b6d9bb90f8d003627e57e5a8d3e70bba2e104d9))
## [4.1.0-beta](https://github.com/BradenM/micropy-cli/compare/v4.0.0...v4.1.0-beta) (2023-01-30)
### Features
* **cli:** Add flag to show outdated stub packages in search + group output by repo. ([e2cdff7](https://github.com/BradenM/micropy-cli/commit/e2cdff7b0642e461182704835a6c826693c0deca))
* **cli:** Format repo as title in stubs search output. ([eaf0543](https://github.com/BradenM/micropy-cli/commit/eaf054307f4669ab1578b8bc090fbd9c1b42ab7a))
* **cli:** Improve stub search output. ([4c127ac](https://github.com/BradenM/micropy-cli/commit/4c127ac5dab299e91a56fa2d675b771b45a79cdd))
* **cli:** Utilize stub source locators during add. ([d24b409](https://github.com/BradenM/micropy-cli/commit/d24b4095c168184025be5736aee4fbc69427c6df))
* **data:** Add display names for current stub sources. ([7f6b2cd](https://github.com/BradenM/micropy-cli/commit/7f6b2cd4afe9bcea1b76029a2f94d96bc3d18de8))
* **data:** Add micropython-stubs source ([de9c2e2](https://github.com/BradenM/micropy-cli/commit/de9c2e2c987422d4871b60ab0cd7ade624c387d8))
* **deps:** Add attrs/pydantic ([06660f0](https://github.com/BradenM/micropy-cli/commit/06660f0bdaf6c06e3787dd170e7d47bd036c3722))
* **deps:** Add distlib. ([fab22ba](https://github.com/BradenM/micropy-cli/commit/fab22ba2e2fbccf791e64f825dc3545ef2c5606d))
* **deps:** Add importlib_metadata as dep. ([6acf3ca](https://github.com/BradenM/micropy-cli/commit/6acf3ca11f9dd7444aa2e39fd0a17fe52cb3226c))
* **deps:** Add pytest-clarity+better-exceptions to dev deps. ([dc9d958](https://github.com/BradenM/micropy-cli/commit/dc9d9587c5acab63fe5d6deb6b1e9e29716ee9e0))
* **main:** Drop in new StubRepository impl in place of StubRepo. ([25f0402](https://github.com/BradenM/micropy-cli/commit/25f0402fe65420337d9dffcc1a7abdc3962f2f1f))
* **main:** Init `StubRepository` as attr. ([c17be65](https://github.com/BradenM/micropy-cli/commit/c17be65e532c04bbaddd00f1d38c01abb8f1e2ff))
* **pkg:** Add __main__ module entry. ([2388858](https://github.com/BradenM/micropy-cli/commit/238885848cf47d4dadab907130a3c715341bb9d9))
* **pkg:** Cleanup package entry, dynamically resolve version. ([72ea665](https://github.com/BradenM/micropy-cli/commit/72ea66584387bc3f0db85d9042d14b9ad676884e))
* **project:** Add pylance settings to vscode template. ([bbdc936](https://github.com/BradenM/micropy-cli/commit/bbdc936c5885dfe80fde7676344ce97cada6807f))
* **project:** Assume pylance until proper refactorings can be done. ([2610c2a](https://github.com/BradenM/micropy-cli/commit/2610c2a38b83c0d525e3a31c4ff0d40fc88a7ea7))
* **stubs:** `RepoStubLocator` locate strategy. ([08f8f86](https://github.com/BradenM/micropy-cli/commit/08f8f863b2fe8b5eb75aa88374935299d03ba6c5))
* **stubs:** Accept generic package type in stub manifest ([9d17331](https://github.com/BradenM/micropy-cli/commit/9d173316f0dc7da5479523b82159d461ee990b46))
* **stubs:** Add `display_name` field to stub repository. ([a3ef03f](https://github.com/BradenM/micropy-cli/commit/a3ef03f50b79cb0139c4b0423c86f32d3c1acd30))
* **stubs:** Add `resolve_package_(absolute,)_versioned_name` to manifest. ([37bbfa6](https://github.com/BradenM/micropy-cli/commit/37bbfa678bb29cc86f1a202ec994f9e40fb6e0fa))
* **stubs:** Add method for resolving absolute stub package name from manifest. ([ad55507](https://github.com/BradenM/micropy-cli/commit/ad55507a6b19b5273ef0f9c44d1112faa7b151c9))
* **stubs:** Add MicropyStubs package/manifest models. ([021c279](https://github.com/BradenM/micropy-cli/commit/021c279fdc2b50a55a641761ecfc32bf54398b9b))
* **stubs:** Add Micropython stubs package/manifest models. ([a9297dc](https://github.com/BradenM/micropy-cli/commit/a9297dcfacbd5a31e7207b43300cd4376d9ce089))
* **stubs:** Add RepositoryInfo model. ([109aed3](https://github.com/BradenM/micropy-cli/commit/109aed30e7e588dcbeae405ead174c6b1c26d745))
* **stubs:** Add resolve package url abstract meth to stubs manifest ([8737f52](https://github.com/BradenM/micropy-cli/commit/8737f529365f556a0d3d1e4ccde1da2e2beae7aa))
* **stubs:** Add StubPackage model. ([9664111](https://github.com/BradenM/micropy-cli/commit/9664111e9304fa3149da992adbf247ec97587f1b))
* **stubs:** Add StubRepository for managing stub manifests. ([781f7cd](https://github.com/BradenM/micropy-cli/commit/781f7cddcf3912c8da89e790b717b34f04366737))
* **stubs:** Add StubRepositoryPackage model. ([e0dda9f](https://github.com/BradenM/micropy-cli/commit/e0dda9f193eca1499f3d42e0df8fb8f2e8ecc0f8))
* **stubs:** Add StubsManifest model. ([3ae9456](https://github.com/BradenM/micropy-cli/commit/3ae9456a3277e16161f0c8f29616377338afce4c))
* **stubs:** Assume latest version by default, optionally show latest only in search, general improvements in stub repo. ([b55b483](https://github.com/BradenM/micropy-cli/commit/b55b483d591990f6cdf264e2ad1fcc9f190dddb0))
* **stubs:** Build progressive package indexes in `StubRepository`, utilize in search/resolve. ([318ec13](https://github.com/BradenM/micropy-cli/commit/318ec13e1fb35e6f757ec426c1f32c712de00f9b))
* **stubs:** Check absolute name for stub resolve matching. ([142648d](https://github.com/BradenM/micropy-cli/commit/142648d4e2edabd6956723e781f6d7608a5f6030))
* **stubs:** Enforce faux immutability in StubRepository. ([a17cc5e](https://github.com/BradenM/micropy-cli/commit/a17cc5e66e938edd093ba1e4c7c283ab6d64d074))
* **stubs:** Expose `repo_name`,`versioned_name`,`absolute_versioned_name` on `StubRepositoryPackage` ([e257aa5](https://github.com/BradenM/micropy-cli/commit/e257aa56d14a98e3b81ef26c5e8fc80615036d93))
* **stubs:** Expose name/version/absolute_name fields from stub repo package. ([d88dcae](https://github.com/BradenM/micropy-cli/commit/d88dcaea98c30b41ccacd18ae7f4c1b474fa9730))
* **stubs:** Expose url via StubRepositoryPackage descriptor. ([4fd1b12](https://github.com/BradenM/micropy-cli/commit/4fd1b1202503bed22a34e861df8ff815e5a5363e))
* **stubs:** Impl `resolve_package_url` for micropython-stubs repo. ([4bd70aa](https://github.com/BradenM/micropy-cli/commit/4bd70aaea7c78ef2ae81bf432f53cef8971d5870))
* **stubs:** Impl resolve package method in StubRepository. ([c28d988](https://github.com/BradenM/micropy-cli/commit/c28d98815a31cb0c790ea4fdcaa7371f202545b0))
* **stubs:** Implement dirty metadata adapter for dist-based stubs until proper refactorings. ([a60138f](https://github.com/BradenM/micropy-cli/commit/a60138f73863b3b93e8465a38b3805aec99f88fe))
* **stubs:** Make `StubPackage` immutable. ([2fab17d](https://github.com/BradenM/micropy-cli/commit/2fab17d46445448c93bce1ca532f112507480f40))
* **stubs:** Make `StubRepository.resolve_package` return `StubRepositoryPackage` ([24ef2fa](https://github.com/BradenM/micropy-cli/commit/24ef2fa51eafcfc25b7f468288908fef736c4830))
* **stubs:** Make `StubRepositoryPackage` immutable, iterate matchers. ([ae71f91](https://github.com/BradenM/micropy-cli/commit/ae71f9136aeae81e37f03cfe7d8ea564e64f85cf))
* **stubs:** Make `StubsManifest` immutable. ([2a6ffa3](https://github.com/BradenM/micropy-cli/commit/2a6ffa3fb57e304cda88cc22c742b454a800db7a))
* **stubs:** Make `StubSource` proper abstract, add prepare abstractmethod + impls. ([d690e71](https://github.com/BradenM/micropy-cli/commit/d690e71c6c1e9d90dc4414323c6488f7fbd7d540))
* **stubs:** Make micropython stubs package sortable. ([49b6df0](https://github.com/BradenM/micropy-cli/commit/49b6df03d4b0aeb2ef124a3f2150bb3417019e80))
* **stubs:** Micropy-stubs resolve package url impl, stub micropython for now. ([c832cb0](https://github.com/BradenM/micropy-cli/commit/c832cb0c82ce3533ae18e90ada0359e375de2e1e))
* **stubs:** Rename `StubRepositoryPackage.repository` -> `manifest`. ([a8b3ec8](https://github.com/BradenM/micropy-cli/commit/a8b3ec8b72094654cf665bc330e75a55bdf96b72))
* **stubs:** Support reuse of `StubSource` instances, improvements. ([b873a62](https://github.com/BradenM/micropy-cli/commit/b873a62970264928d772575c84e6c8ead305c6dc))
* **stubs:** Utilize `StubRepositoryPackage.match_exact` ([3ec08dd](https://github.com/BradenM/micropy-cli/commit/3ec08ddc5aad0bfbd8b98da6388989368d674790))
* **stubs:** Utilize locators in `StubManager`, resolve requirements from metadata. ([5c19624](https://github.com/BradenM/micropy-cli/commit/5c196249704a14d95ebfc99aa75ae77054cd8c48))
* **stubs:** Validate RepoInfo source, add method for fetching contents. ([0f7487f](https://github.com/BradenM/micropy-cli/commit/0f7487fa45beb62d149539967b6671c78cc41c60))
* **utils:** Add SupportsLessThan protocol to types util. ([489a9b0](https://github.com/BradenM/micropy-cli/commit/489a9b041082af18adf929471d7cc8f1f0bd39d5))
* **utils:** Add types to `ensure_existing_dir` ([e8e6ea8](https://github.com/BradenM/micropy-cli/commit/e8e6ea886098bd7bfceaf084e1d67673b5290177))
* **utils:** Add utils._compat module, add importlib metadata ([5722504](https://github.com/BradenM/micropy-cli/commit/572250447091e721623e562420c80295da999525))
* **utils:** Add utils.types, PathStr alias. ([63f65b9](https://github.com/BradenM/micropy-cli/commit/63f65b9b649f8de7b3d244c01a5fe17c201c4ca6))
* **utils:** Defer updating stale cache with `utils.get_cached_data` ([afd2ba5](https://github.com/BradenM/micropy-cli/commit/afd2ba5932d375a89401d1c314a3d6447a56e53f))
### Bug Fixes
* **cli:** Click fails to resolve package version. ([65ef13b](https://github.com/BradenM/micropy-cli/commit/65ef13b3939ffa65beca8eb74fd580cfd31d3382))
* **compat:** <=3.8 python typing compat issues. ([e7600b4](https://github.com/BradenM/micropy-cli/commit/e7600b42a9f08035187b8644eecc66315619c753))
* **deps:** Only install import-metadata when py version <3.10 ([ac1356d](https://github.com/BradenM/micropy-cli/commit/ac1356da308cfae94d16cd9935b9733ed0fad67a))
* **deps:** Pin dependencies ([84aa3c3](https://github.com/BradenM/micropy-cli/commit/84aa3c32db009121fbdd868b9664b648087186d3))
* **deps:** Pin dependencies ([1b6a46a](https://github.com/BradenM/micropy-cli/commit/1b6a46a828081d31153428e467780216737723a9))
* **deps:** Update dependency attrs to v22.2.0 ([9435223](https://github.com/BradenM/micropy-cli/commit/9435223ebe70554b6fc8d45ad68a798809ce55e2))
* **deps:** Update dependency boltons to v21 ([52bd39c](https://github.com/BradenM/micropy-cli/commit/52bd39c989dbbd1c7130ab8702d1c8caa0b50133))
* **deps:** Update dependency gitpython to v3.1.30 ([f5bb503](https://github.com/BradenM/micropy-cli/commit/f5bb5037aa1965be85198269b2ab8166f660f228))
* **deps:** Update dependency importlib-metadata to v5.2.0 ([42ab466](https://github.com/BradenM/micropy-cli/commit/42ab46681b5597eeb77d78573adcdabcd6a10bd0))
* **deps:** Update dependency markupsafe to v2.1.2 ([4239f9b](https://github.com/BradenM/micropy-cli/commit/4239f9bee88681f357b01f09b55b032ce3a5d39c))
* **deps:** Update dependency pydantic to v1.10.3 ([8d4d64d](https://github.com/BradenM/micropy-cli/commit/8d4d64ddc4ffcb64b578bd04f3f91092f6fc73a4))
* **deps:** Update dependency pydantic to v1.10.4 ([22dfef1](https://github.com/BradenM/micropy-cli/commit/22dfef18fc60db43564ee7379b319a6bb3a200e4))
* **deps:** Update dependency python-minifier to v2.8.0 ([9b0b2ef](https://github.com/BradenM/micropy-cli/commit/9b0b2efcfd2f3bc3c3ff6a8ee596329471733bd9))
* **deps:** Update dependency requests to v2.28.2 ([8e8d259](https://github.com/BradenM/micropy-cli/commit/8e8d259065b3d226aeee7be4ee0ed81cc9c7643d))
* **deps:** Update dependency requirements-parser to v0.5.0 ([26a8931](https://github.com/BradenM/micropy-cli/commit/26a8931d76121416b06fd8da90ee93e040963594))
* **main:** Add types to `MicroPy.stubs` ([2340184](https://github.com/BradenM/micropy-cli/commit/2340184db4a6e811377eab8f86f99333b8c16ab6))
* **main:** StubRepository has faux immutability. ([71feed2](https://github.com/BradenM/micropy-cli/commit/71feed28fa9a94108629784e0ba5f0de3e42ce70))
* **project:** Bad type union. ([3d32e5c](https://github.com/BradenM/micropy-cli/commit/3d32e5cb3f7802d867a7a9ac7788d33ea7c6f2cd))
* **stubs:** Ensure src path is path type in log. ([881a6a6](https://github.com/BradenM/micropy-cli/commit/881a6a6401166b621bd5eef0e76cf0b905d0b1dc))
* **stubs:** Perform repo lookups prior to adding stub ([5410a13](https://github.com/BradenM/micropy-cli/commit/5410a1369d6f693b69e36fc42b25f4a9a23c222a))
* **stubs:** Remove mutating subclass hook from `StubsManifest`. ([d3fcd7e](https://github.com/BradenM/micropy-cli/commit/d3fcd7eaef30da7e1621a507ba179a52d80ee1cf))
* **stubs:** Use `typing.Type` for sub py3.7 compat. ([1350263](https://github.com/BradenM/micropy-cli/commit/1350263bcd8b73368d99e849ead50d61b9e479b8))
* **stubs:** Utilize absolute names in stub search results. ([6c81a93](https://github.com/BradenM/micropy-cli/commit/6c81a93e8596836d13cbf5a8c0554495d5873b6b))
* **utils:** Add annotations future in type utils. ([d2d0ed8](https://github.com/BradenM/micropy-cli/commit/d2d0ed815cb46c2d7fbca6d9c1408813aa3c8565))
* **utils:** Remove PathLike GenericAlias subscript for py <3.8 ([e22343a](https://github.com/BradenM/micropy-cli/commit/e22343af79972c90c68de51249f4bd48c43372b1))
* **utils:** Use importlib metadata to check micropy version in utils. ([dbeb0a9](https://github.com/BradenM/micropy-cli/commit/dbeb0a90ebe3fbe1168968198d799514382682c3))
### Documentation
* **chglog:** Remove unreleased for release-please. ([22d7be0](https://github.com/BradenM/micropy-cli/commit/22d7be04ac806653962187aa5e67f28fd07726b9))
* **conf:** Dynamically determine docs release version ([fc8ab96](https://github.com/BradenM/micropy-cli/commit/fc8ab966f8f4a44031eba42145afc244c521c896))
### Code Refactoring
* **stubs:** Remove old `StubRepo` class. ([b9de35a](https://github.com/BradenM/micropy-cli/commit/b9de35ab082b9c2e69d5b130e9e11dc45f843320))
* **stubs:** Remove search remote from stub manager. ([95d42f0](https://github.com/BradenM/micropy-cli/commit/95d42f0f643e030f4f6e3f3e6770260a445e2014))
* **stubs:** Update repository impls to retain immutability. ([b44b335](https://github.com/BradenM/micropy-cli/commit/b44b335d1a70421058fdb715fc9d95cf2bd84fae))
* **stubs:** Utilize locator strategies over stub source factory method. ([e81ac84](https://github.com/BradenM/micropy-cli/commit/e81ac8467c744f8d4462d6dd18d877c906d97773))
* **utils:** Update usage of importlib metadata. ([a09aaf9](https://github.com/BradenM/micropy-cli/commit/a09aaf9d7e15c61ad58b2a500514e58bedbb8741))
## [v4.0.0] - 2022-11-13
### Bug Fixes
- **deps:** update dependency python-minifier to v2.7.0
- **deps:** update dependency markupsafe to v2.1.1
- **deps:** update dependency jinja2 to v3.1.2
- **deps:** update dependency gitpython to v3.1.29
- **deps:** update dependency colorama to v0.4.6
- **deps:** pin dependencies
### Code Refactoring
- **stubs:** utilize helper method during remote stub unpack.
- **utils:** extract helper methods, add types.
### Features
- **deps:** update python constraint to include v3.11, update lockfile.
- **deps:** upgrade to click v8
- **deps:** update all deps in-range
## [v4.0.0-rc.2] - 2022-04-17
### Bug Fixes
- **pyd:** remove dict union operator till py3.9 min support
- **pyd:** only type-cast rshell if type checking is enabled
- **pyd:** capture module not found error during rshell import attempt
- **pyd:** upydevice connect proper attr error if before established
- **pyd:** use host path suffix check only as fallback in copy_to
- **pyd:** consumer handler protocol methods should not be writable
### Features
- **deps:** add upydevice+deps, missing type-stubs to dev, update mypy config
- **deps:** upgrade upydevice and remove prev missing deps
- **exc:** add PyDeviceError, PyDeviceConnectionError exceptions
- **main:** update to utilize new pyd module
- **pkg:** add pypi-test source to pyproject
- **pkg:** regenerate changelog
- **pkg:** add poetry+local pre-commit hook for docs export
- **pkg:** export pyd from pkg root
- **pkg:** add git-chlog config
- **pyb:** add abcs for PyDevice, MetaPyDevice, Consumer/StreamConsumer
- **pyd:** establish should return pyd instance, update consumer types
- **pyd:** use/pass consumer handlers via delegate, expose connect/dc
- **pyd:** add ConsumerDelegate, StreamHandlers, MessageHandlers
- **pyd:** update rshell backend to implement MetaPyDeviceBackend
- **pyd:** add PyDeviceConsumer protocol
- **pyd:** add PyDevice implementation
- **pyd:** update upyd backend to interfaces + cleanup
- **pyd:** move tqdm-progress consumer to pyd.consumers
- **pyd:** add Stream/Message consumer protocols+handler protos, Split MetaPyDevice/MetaPyDeviceBackend
- **pyd:** add upydevice-based pyd backend
- **pyd:** add rshell-based pydevice backend
- **pyd:** rename pyb module -> pyd
- **pyd:** add pyd module explicit exports
- **pyd:** allow delegate_cls to be injected to pydevice via init
- **scripts:** add script for exporting docs requirements
- **utils:** remove pybwrapper
## [v4.0.0.rc.1] - 2022-03-14
### Bug Fixes
- **dev-deps:** update pytest to ^7.0 to resolve py10+win pyreadline crash
- **pkg:** rshell markers for win32
- **pkg:** fix mistake in rshell marker
- **pkg:** do not install rshell when py>=3.10 and on windows due to pyreadline.
- **pkg:** win32 rshell python marker
- **pkg:** upgrade too and pin jinja2 @ 3.0.3
- **project:** report exception on install failure to stdout
- **stubber:** replace pyminifer with python-minifer
- **utils:** capture attribute err that occurs on py310 win32 rshell import
- **utils:** utilize mp-stubbers new logic for generating stubs
### Features
- **deps:** update dependencies scoped
- **deps:** update micropython-stubber to latest master commit
- **pkg:** move pytest+coverage cfg to pyproject
- **pkg:** add missing packaging dep
- **pkg:** update includes to be more strict
- **pkg:** restructure and cleanup pyproject with dependency groups
- **pkg:** merge create_stubs group into default
## [v3.6.0] - 2021-05-17
### Bug Fixes
- **data:** update stubs schema for compat with latest stubber
### Features
- **deps:** update rshell dependency
- **deps:** update deps, add micropy-cli w/ extras as dev-dep
- **deps:** setup black, pre-commit
- **pkg:** update setup file
- **pre-commit:** add pre-commit config
- **stubber:** update micropython-stubber submodule to latest
- **utils:** remove dynamic
- **utils:** refactor stub-gen to stubs, dynamically create stubber module for import
## [v3.5.0] - 2020-11-17
## [v3.5.0.rc.1] - 2020-11-17
### Bug Fixes
- full name case mismatch for pypi packages
- package installation failures were silent
- **pkg:** constrain questionary version to <1.8.0
- **pkg:** setuptools editable installation issues
### Features
- **package:** detect and return VCSDependencySource when needed in create dep source factory
- **package:** add VCSDependencySource class for supporting VCS requirements
- **package:** add attributes and logic for VCS packages
- **pkg:** bump questionary dependency to ^1.8.1
- **pkg:** add GitPython dependency
### Reverts
- chore(deps): update setup.py
## [v3.4.0] - 2020-07-25
### Bug Fixes
- **deps:** update dpath constraint to >=1.4,<2.0
## [v3.3.0] - 2019-12-23
### Bug Fixes
- ensure any values to be extended in config are of type list ([#94](https://github.com/BradenM/micropy-cli/issues/94))
- **utils:** ignore candidate releases when checking for update
### Features
- **project:** generate recommended extensions with vscode integration ([#95](https://github.com/BradenM/micropy-cli/issues/95))
## [v3.2.0] - 2019-12-14
## [v3.2.0.rc.2] - 2019-12-13
### Bug Fixes
- Handle Invalid Requirements
- **cli:** Handle errors when reading requirements from path
- **cli:** Handle and Report Invalid Package Name Error
- **deps:** Fix loading requirements from path
- **utils:** Follow redirects when testing for valid url
### Code Refactoring
- **deps:** Remove Exception handling from Packages Module
### Features
- Add Base and Requirement Exceptions
- **poetry:** Update Poetry to Stable
## [v3.2.0.rc.1] - 2019-12-09
### Bug Fixes
- Make rshell and pyminifier requirements optional ([#82](https://github.com/BradenM/micropy-cli/issues/82))
- Colorama Version Constraint
- Colorama Broken Release, Style
- VSCode Settings failed to populate on reload ([#81](https://github.com/BradenM/micropy-cli/issues/81))
- **config:** Remove concrete path from ConfigSource
- **config:** Remove cache method for better implementation later
- **deps:** Temporary Directory would be removed before it was ready
- **logger:** Exception formatting
- **project:** Context not being updated when needed
- **project:** Add empty dict to config on create
### Code Refactoring
- Cleanup Stubs Module Context Handling
- **packages:** Use new Dependency Api in Packages Module
### Features
- **cli:** Basic install from path option implementation
- **config:** Manage sync via callback
- **config:** New Interface with file/memory autosync and dot notation
- **config:** Dictionary Config Source
- **config:** Cache and Root Key Context Manager for Config Items
- **config:** Use dpath to handle Config paths and merging
- **config:** Improved handling of collection data types
- **config:** Add pop method to config
- **config:** New and Improved Config File Interface
- **context:** Use DictConfig for Project Context
- **deps:** Package Class for representing a requirement
- **deps:** Address Package Source Uniformly
- **deps:** Allow local deps to be sourced from anywhere
- **project:** Add local-lib-path config option.
- **project:** Load Project Modules by individual Priority
- **project:** Update Projects to use Priority Queue
- **project:** Implement Dependencies in Project Module
- **project:** Render Local Deps in Project Settings
- **project:** Update Config/Context automatically
- **project:** Update modules to use new, more flexible config
- **project:** Use new Config Interface in Projects
- **project:** Try to add local deps as relative to project, fallback...
- **project:** Replace Project Cache with Config Instance
- **template:** Update TemplateModule
### Performance Improvements
- **size:** Slimmed Package Size
## [v3.1.1] - 2019-12-03
### Bug Fixes
- HookProxy failed to resolve with kwargs
- **checks:** VSCode check failing on py36
- **logger:** Exception formatting
- **package:** Add metadata to pyproject.toml
- **package:** Update Makefile and bump2version to use pyproject
- **package:** Use Dephell to generate setup.py, Remove Manifiest.in
- **project:** Exception Raised if no Templates are used in Project
- **project:** VSCode check always failed silently
### Features
- Cleanup Log File Formatting
- Use Poetry for Dependency Management
### Performance Improvements
- **size:** Slimmed Package Size
## [v3.1.0] - 2019-11-12
### Bug Fixes
- Handle Errors when adding Packages
- Project Context Stub Path Ordering
- HookProxy failed to work with descriptors.
- PackagesModule Dev, Project Context
- Move Template Check flag to TemplatesModule
- Active Project Resolve, Cli Templates List
### Code Refactoring
- Add Packages from File
- Import MicroPy and Modules to Package Root
- Restructure Project Module
### Features
- Report Ready on Project Load, Code Cleanup
- Write .gitignore file in generated .micropy folder
- Proxy Project Hooks to allow hooks with the same name, Split De...
- Resolve Project Hooks via attrs, Fix Stub List
- **project:** Project Method Hook Decorator
### Performance Improvements
- Lazy Load Project Stubs
## [v3.0.1] - 2019-10-13
### Bug Fixes
- Auto Update Check's Cache not expiring after update
- VSCode Template Check always Fails on Linux ([#65](https://github.com/BradenM/micropy-cli/issues/65))
- **upstream:** Fails to Generate Stub Files
## [v3.0.0] - 2019-10-13
### Bug Fixes
- Project Fails to Init due to Checks on Windows
- Stub Package Url fails to resolve on Windows
- Handle Chunked Content Length on Package Download
- Package urls not resolving correctly
- Fails to load Project if Template Files are Missing ([#55](https://github.com/BradenM/micropy-cli/issues/55))
### Code Refactoring
- **data:** Move all Data Paths to Data Module
### Features
- Add Flag for Skipping Template Checks
- Search/Retrieve Stubs Directly from micropy-stubs
- Update MicropyCli Stub Sources
- Refactor MicroPy Class for Better State Management
### Performance Improvements
- Lazy Load Stubs when Needed
- **project:** Lazy Load Current Active Project
### BREAKING CHANGE
micropy.STUBS renamed to micropy.stubs
## [v2.2.0] - 2019-09-28
### Features
- Template Checks, MS-Python Check ([#52](https://github.com/BradenM/micropy-cli/issues/52))
- **cli:** Automatic Update Checks ([#54](https://github.com/BradenM/micropy-cli/issues/54))
- **vscode:** Ensure Jedi is Disabled in VSCode Template
### Performance Improvements
- **stubs:** Cache Available Stubs for Searching
## [v2.1.1] - 2019-09-22
### Bug Fixes
- **hotfix:** Remove workspaceRoot var from VSCode Settings ([#51](https://github.com/BradenM/micropy-cli/issues/51))
### Features
- Relicensed under MIT
### BREAKING CHANGE
No longer compatible with <=ms-python.python[@2019](https://github.com/2019).8.30787 VSCode Extension
## [v2.1.0] - 2019-09-01
### Bug Fixes
- **project:** Requirement Files skipped on First Init
- **windows:** Support User Level Directory Linking ([#45](https://github.com/BradenM/micropy-cli/issues/45))
### Features
- **log:** Cap Log File at 2MB
- **project:** Init Project with Micropy Dev Dependency
- **project:** Git Ignore Template Option
## [v2.0.2] - 2019-08-21
### Bug Fixes
- **dep:** Require appropriate Click version
- **windows:** Warn User if MicroPy Lacks Admin Privs
## [v2.0.1] - 2019-07-26
### Bug Fixes
- **stubs:** Reduce Schema Strictness
## [v2.0.0] - 2019-07-25
### Bug Fixes
- **dep:** Broken Docutils Dependency
- **project:** Only modules install correctly
### Features
- Add Optional Pyminifier Dep for Stub Creation
- **cli:** Install Python Packages for Project
- **cli:** Verbosity Flag for Stub Creation
- **dep:** Update Tox to latest
- **dep:** Packaging Module Requirement
- **lib:** Update Stubber to Process Branch
- **project:** Update requirements.txt Files on Install
- **project:** Template Update Functionality
- **project:** Install from Requirements.txt
- **project:** Retrieve and Stub Project Requirements
- **project:** Project Config in Info File
- **project:** Make Templates Optional via CLI ([#30](https://github.com/BradenM/micropy-cli/issues/30))
- **pyb:** Handle Pyboard Output and Errors
- **stubs:** Minify Stubber Before Executing
- **util:** Generate Stub from File Utility
## [v1.1.3] - 2019-07-20
### Bug Fixes
- ValueError raised after Creating Project in Windows ([#33](https://github.com/BradenM/micropy-cli/issues/33))
- Unicode Error raised when logging on Windows ([#32](https://github.com/BradenM/micropy-cli/issues/32))
## [v1.1.2] - 2019-07-19
### Bug Fixes
- **stubs:** Ensure Firmware Stubs Load First
## [v1.1.1] - 2019-07-17
### Bug Fixes
- Temp Hotfix for False Stub Duplication
## [v1.1.0] - 2019-07-16
### Bug Fixes
- **cli:** Stub List always prints Unknown
- **cli:** Made Stub Search Case Insensitive
- **stubs:** FileExistsError when adding existing Stub
### Features
- **cli:** List Project Stubs if in Project Directory
- **cli:** Stubs now list by Firmware
- **cli:** Create Formatted Strings from Logger
- **cli:** Added --force flag when adding stubs
- **project:** Micropy Project Info File ([#29](https://github.com/BradenM/micropy-cli/issues/29))
- **project:** Micropy Project Folder ([#28](https://github.com/BradenM/micropy-cli/issues/28))
## [v1.0.0] - 2019-07-11
### Bug Fixes
- **cli:** Init Crashes if no Stubs are Loaded
- **cli:** Create Stubs Help Formatting
- **log:** Output Highlight Bug, Cleanup
- **stub:** Stub Name without Firmware
- **stubs:** Firmware not showing as Installed in Stub Search
- **stubs:** Fix Existing Firmware Reinstall
### Features
- Implemented Local and Remote Stub Sources ([#18](https://github.com/BradenM/micropy-cli/issues/18))
- **cli:** Minified Cli Output Style
- **cli:** Search Available Stubs ([#27](https://github.com/BradenM/micropy-cli/issues/27))
- **cli:** Stream Downloads with Progress Bar
- **stub:** Update Stubs to Use New Stubber Schema ([#23](https://github.com/BradenM/micropy-cli/issues/23))
- **stubs:** Updated micropython-stubber to latest
- **stubs:** Add Firmware Frozen Modules to Templates
- **stubs:** Device Stubs Firmware Resolution ([#25](https://github.com/BradenM/micropy-cli/issues/25))
- **stubs:** Add Device Frozen Modules to Templates ([#24](https://github.com/BradenM/micropy-cli/issues/24))
- **stubs:** Added Stub Stdout Verbosity
- **stubs:** Add Stubs from Repositories ([#21](https://github.com/BradenM/micropy-cli/issues/21))
- **stubs:** Replaced Stubs with Stub "Packages"
- **stubs:** Stub Repositories ([#20](https://github.com/BradenM/micropy-cli/issues/20))
- **stubs:** Update Stub Creation ([#26](https://github.com/BradenM/micropy-cli/issues/26))
- **util:** Generic Utility Functions and Module Cleanup
### Performance Improvements
- **cli:** Only Instantiate MicroPy when needed
## [v0.3.0] - 2019-06-25
### Code Refactoring
- MicroPy to use new Stub and Utility Features ([#14](https://github.com/BradenM/micropy-cli/issues/14))
### Features
- **cli:** Version Flag
- **log:** New Cli Output Style, Log Class Methods
- **pyb:** PyboardWrapper Utility ([#13](https://github.com/BradenM/micropy-cli/issues/13))
- **stubs:** Stub Manager ([#5](https://github.com/BradenM/micropy-cli/issues/5))
- **utils:** Utils Module and Validator Utility ([#4](https://github.com/BradenM/micropy-cli/issues/4))
## [v0.2.0] - 2019-06-14
### Features
- **log:** Added Proper Log Formatting, cleaned messages before write.
- **log:** Added Logging to Template Module
- **project:** Drop Cookiecutter for Purely Jinja2 ([#3](https://github.com/BradenM/micropy-cli/issues/3))
## [v0.1.1] - 2019-06-10
### Bug Fixes
- **setup:** Fixed missing cookiecutter package requirement
- **setup:** Fixed Pypi misinformation, cleaned up dist-management files
- **setup:** Fix Missing .vscode Template Files
## v0.1.0 - 2019-06-09
### Bug Fixes
- Fails First Time Setup Failed to init on first run if the stubs folder didn't exist
- Removed old command
- Fix Project Init
- Added rshell to setup.py
- Quick Fix before Project Class Restructure
- Packaging Fixes
- **package:** Allow multiple versions of python, Update Reqs
- **setup:** Included Template in Manifest
- **stub:** Fixed Refresh Stubs
- **stubs:** Cleaned Stub Names before Adding
- **stubs:** Removed Old Stub Command
- **stubs:** Fixed missing logging.py
- **template:** Fixed src template
### Code Refactoring
- Setup as proper package
### Features
- Project Init and Template Serialization
- Finished Package Setup and Structure
- Let Stub class handle validation and files
- Setup Template Files
- Initial commit
- Add Josverl Stubs on First Setup, Restructured MicroPy
- Added MicroPy Parent Class
- Added stubber as submodule over pulling files with requests
- **log:** Added Silet Stdout Context Manager to Logger
- **log:** Setup ServiceLog to work as a single parent Logger with ch...
- **log:** Added Logging
- **log:** Setup Logger as Borg for easy access
- **log:** Added file logging to ServiceLog, Added docs
- **project:** Project Module Rewrite to use Cookiecutter and JSON
- **pylint:** Added checkbox to choose stubs for pylint
- **stub:** Pass Multiple Stubs to .pylintrc
- **stub:** Added stub add, refresh commands
- **stub:** Added createstub.py download
- **stub:** Added Stub Class, Moved Stub logic to MicroPy/Stub
- **stubs:** Added Automated Stub Creation on PyBoard
- **stubs:** Added Stub Validation, Stub Class Restructure
- **stubs:** Added Basic Stub Exceptions
- **template:** Setup Template in Cookiecutter Fashion
[v4.0.0]: https://github.com/BradenM/micropy-cli/compare/v4.0.0-rc.2...v4.0.0
[v4.0.0-rc.2]: https://github.com/BradenM/micropy-cli/compare/v4.0.0.rc.1...v4.0.0-rc.2
[v4.0.0.rc.1]: https://github.com/BradenM/micropy-cli/compare/v3.6.0...v4.0.0.rc.1
[v3.6.0]: https://github.com/BradenM/micropy-cli/compare/v3.5.0...v3.6.0
[v3.5.0]: https://github.com/BradenM/micropy-cli/compare/v3.5.0.rc.1...v3.5.0
[v3.5.0.rc.1]: https://github.com/BradenM/micropy-cli/compare/v3.4.0...v3.5.0.rc.1
[v3.4.0]: https://github.com/BradenM/micropy-cli/compare/v3.3.0...v3.4.0
[v3.3.0]: https://github.com/BradenM/micropy-cli/compare/v3.2.0...v3.3.0
[v3.2.0]: https://github.com/BradenM/micropy-cli/compare/v3.2.0.rc.2...v3.2.0
[v3.2.0.rc.2]: https://github.com/BradenM/micropy-cli/compare/v3.2.0.rc.1...v3.2.0.rc.2
[v3.2.0.rc.1]: https://github.com/BradenM/micropy-cli/compare/v3.1.1...v3.2.0.rc.1
[v3.1.1]: https://github.com/BradenM/micropy-cli/compare/v3.1.0...v3.1.1
[v3.1.0]: https://github.com/BradenM/micropy-cli/compare/v3.0.1...v3.1.0
[v3.0.1]: https://github.com/BradenM/micropy-cli/compare/v3.0.0...v3.0.1
[v3.0.0]: https://github.com/BradenM/micropy-cli/compare/v2.2.0...v3.0.0
[v2.2.0]: https://github.com/BradenM/micropy-cli/compare/v2.1.1...v2.2.0
[v2.1.1]: https://github.com/BradenM/micropy-cli/compare/v2.1.0...v2.1.1
[v2.1.0]: https://github.com/BradenM/micropy-cli/compare/v2.0.2...v2.1.0
[v2.0.2]: https://github.com/BradenM/micropy-cli/compare/v2.0.1...v2.0.2
[v2.0.1]: https://github.com/BradenM/micropy-cli/compare/v2.0.0...v2.0.1
[v2.0.0]: https://github.com/BradenM/micropy-cli/compare/v1.1.3...v2.0.0
[v1.1.3]: https://github.com/BradenM/micropy-cli/compare/v1.1.2...v1.1.3
[v1.1.2]: https://github.com/BradenM/micropy-cli/compare/v1.1.1...v1.1.2
[v1.1.1]: https://github.com/BradenM/micropy-cli/compare/v1.1.0...v1.1.1
[v1.1.0]: https://github.com/BradenM/micropy-cli/compare/v1.0.0...v1.1.0
[v1.0.0]: https://github.com/BradenM/micropy-cli/compare/v0.3.0...v1.0.0
[v0.3.0]: https://github.com/BradenM/micropy-cli/compare/v0.2.0...v0.3.0
[v0.2.0]: https://github.com/BradenM/micropy-cli/compare/v0.1.1...v0.2.0
[v0.1.1]: https://github.com/BradenM/micropy-cli/compare/v0.1.0...v0.1.1
================================================
FILE: LICENSE
================================================
MIT License
Copyright (c) 2019 Braden Mars
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================================
FILE: Makefile
================================================
.PHONY: clean clean-test clean-pyc clean-build
bold := $(shell tput bold)
rsttxt := $(shell tput sgr0)
clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts
clean-build: ## remove build artifacts
@printf '$(bold)Cleaning Artifacts...\n$(rsttxt)'
rm -fr build/
rm -fr dist/
rm -fr .eggs/
rm -fr pip-wheel-metadata/
find . -name '*.egg-info' -exec rm -fr {} +
find . -name '*.egg' -exec rm -f {} +
clean-pyc: ## remove Python file artifacts
find . -name '*.pyc' -exec rm -f {} +
find . -name '*.pyo' -exec rm -f {} +
find . -name '*~' -exec rm -f {} +
find . -name '__pycache__' -exec rm -fr {} +
clean-test: ## remove test and coverage artifacts
- rm -fr .tox/
- rm -f .coverage
- rm -fr htmlcov/
find . -name '.pytest_cache' -exec rm -fr {} +
- rm -f .testmondata
- rm -rf .tmontmp
- rm cov.xml test_log.xml
test: ## run tests quickly with the default Python
pytest
watch-build: clean ## build pytest-testmon db
pytest --testmon -c pyproject.toml
watch: clean ## watch tests
- pytest --testmon
ptw --spool 2000 --onpass "make watch-cov" --clear -- --testmon -vv -c pyproject.toml
watch-cov: ## watch test coverage
pytest -n'auto' --forked --cov --cov-append --cov-config=pyproject.toml --cov-report=xml:cov.xml --cov-report term
coverage: ## generate coverage
pytest -n'auto' --cov --cov-config=pyproject.toml
coverage-html: ## generate coverage html
pytest -n'auto' --cov --cov-config=pyproject.toml --cov-report html
gendoc: ## Generate Docs
$(MAKE) -C docs clean
$(MAKE) -C docs html
@printf '$(bold)Docs Generated!\n$(rsttxt)'
test-release: dist ## release on pypi-test repo
@printf '$(bold)Uploading Test Release to TestPyPi...\n$(rsttxt)'
poetry publish -r test
@printf '$(bold)Test Released published!\n$(rsttxt)'
release: dist ## package and release
@printf '$(bold)Uploading package to PyPi...\n$(rsttxt)'
poetry publish
git push --tags
@printf '$(bold)Done! Tags Pushed!\n$(rsttxt)'
dist: clean ## builds package
@printf '$(bold)Building Source and Wheel...\n$(rsttxt)'
- rm README.rst
poetry build
ls -l dist
install: clean ## install pkg
python setup.py install
================================================
FILE: README.md
================================================
# Micropy Cli [![PyPI][pypi-img]][pypi-url] [![PyPI - Python Version][pypiv-img]][pypi-url] [![Github - Test Micropy Cli][build-img]][build-url] [![Coverage Status][cover-img]][cover-url]
Micropy Cli is a project management/generation tool for writing [Micropython](https://micropython.org/) code in modern IDEs such as VSCode.
Its primary goal is to automate the process of creating a workspace complete with:
* **Linting** compatible with Micropython
* VSCode **Intellisense**
* **Autocompletion**
* Dependency Management
* VCS Compatibility
[pypi-img]: https://img.shields.io/pypi/v/micropy-cli?logo=pypi&logoColor=white&style=flat-square
[pypi-url]: https://pypi.org/project/micropy-cli/
[pypiv-img]: https://img.shields.io/pypi/pyversions/micropy-cli.svg?style=flat-square&logo=python&logoColor=green
[build-img]: https://img.shields.io/github/workflow/status/BradenM/micropy-cli/Test%20MicropyCli/master?logo=github&style=flat-square
[build-url]: https://github.com/BradenM/micropy-cli/actions
[cover-img]: https://img.shields.io/coveralls/github/BradenM/micropy-cli/master?style=flat-square&logo=coveralls
[cover-url]: https://coveralls.io/github/BradenM/micropy-cli
# Getting Started
## Installation
You can download and install the latest version of this software from the Python package index (PyPI) as follows:
`pip install --upgrade micropy-cli`
If applicable, you can test out a pre-release by executing:
`pip install --upgrade --pre micropy-cli`
## Creating a Project
Creating a new project folder is as simple as:
1. Executing `micropy init `
2. Selecting which features to enable
3. Selecting your target device/firmware
4. Boom. Your workspace is ready.
## Micropy Project Environment
When creating a project with `micropy-cli`, two special items are added:
* A `.micropy/` folder
* A `micropy.json` file
The `.micropy/` contains symlinks from your project to your `$HOME/.micropy/stubs` folder. By doing this, micropy can reference the required stub files for your project as relative to it, rather than using absolute paths to `$HOME/.micropy`. How does this benefit you? Thanks to this feature, you can feel free to push common setting files such as `settings.json` and `.pylint.rc` to your remote git repository. This way, others who clone your repo can achieve a matching workspace in their local environment.
> Note: The generated `.micropy/` folder should be *IGNORED* by your VCS. It is created locally for each environment via the `micropy.json` file.
The `micropy.json` file contains information micropy needs in order to resolve your projects required files when other clone your repo. Think of it as a `package.json` for micropython.
## Cloning a Micropy Environment
To setup a Micropy environment locally, simply:
* Install `micropy-cli`
* Navigate to the project directory
* Execute `micropy`
Micropy will automatically configure and install any stubs required by a project thanks to its `micropy.json` file.
## Project Dependencies
While all modules that are included in your targeted micropython firmware are available with autocompletion, intellisense, and linting, most projects require external dependencies.
Currently, handling dependencies with micropython is a bit tricky. Maybe you can install a cpython version of your requirement? Maybe you could just copy and paste it? What if it needs to be frozen?
Micropy handles all these issues for you automatically. Not only does it track your project's dependencies, it keeps both `requirements.txt` and `dev-requirements.txt` updated, enables autocompletion/intellisense for each dep, and allows you to import them just as you would on your device.
This allows you to include your requirement however you want, whether that be as a frozen module in your custom built firmware, or simply in the `/lib` folder on your device.
#### Installing Packages
To add a package as a requirement for your project, run:
`micropy install `
while in your project's root directory.
This will automatically execute the following:
* Source `PACKAGE_NAMES` from pypi, as a url, or a local path
* Retrieve the module/package and stub it, adding it to your local `.micropy` folder.
* Add requirement to your `micropy.json`
* Update `requirements.txt`
To install dev packages that are not needed on your device, but are needed for local development, add the `--dev` flag. This will do everything above **except** stub the requirement.
You can also install all requirements found in `micropy.json`/`requirements.txt`/`dev-requirements.txt` by executing `micropy install` without passing any packages. Micropy will automatically do this when setting up a local environment of an existing micropy project.
#### Example
Lets say your new project will depend on [picoweb](https://pypi.org/project/picoweb/) and [blynklib](https://pypi.org/project/blynklib/). Plus, you'd like to use [rshell](https://pypi.org/project/rshell/) to communicate directly with your device. After creating your project via `micropy init`, you can install your requirements as so:
Now you or anybody cloning your project can import those requirements normally, and have the benefits of all the features micropy brings:
## Stub Management
Stub files are the magic behind how micropy allows features such as linting, Intellisense, and autocompletion to work. To achieve the best results with MicropyCli, its important that you first add the appropriate stubs for the device/firmware your project uses.
> Note: When working in a micropy project, all stub related commands will also be executed on the active project. (i.e if in a project and you run `micropy stubs add `, then that stub retrieved AND added to the active project.)
### Adding Stubs
Adding stubs to Micropy is a breeze. Simply run: `micropy stubs add `
By sourcing [micropy-stubs](https://github.com/BradenM/micropy-stubs), MicroPy has several premade stub packages to choose from.
These packages generally use the following naming schema:
`--`
For example, running `micropy stubs add esp32-micropython-1.11.0` will install the following:
* Micropython Specific Stubs
* ESP32 Micropython v1.11 Device Specific Stubs
* Frozen Modules for both device and firmware
You can search stubs that are made available to Micropy via `micropy stubs search `
Alternatively, using `micropy stubs add `, you can manually add stubs to Micropy.
For manual stub generation, please see [Josvel/micropython-stubber](https://github.com/Josverl/micropython-stubber).
### Creating Stubs
Using `micropy stubs create `, MicropyCli can automatically generate and add stubs from any Micropython device you have on hand. This can be done over both USB and WiFi.
> Note: For stub creation, micropy-cli has additional dependencies.
>
> These can be installed by executing: `pip install micropy-cli[create_stubs]`
### Viewing Stubs
To list stubs you have installed, simply run `micropy stubs list`.
To search for stubs for your device, use `micropy stubs search `.
# See Also
* [VSCode IntelliSense, Autocompletion & Linting capabilities][lemariva-blog]
- An awesome article written by [lemariva](https://github.com/lemariva). It covers creating a micropython project environment from scratch using `micropy-cli` and [pymakr-vsc](pymakr-vsc). Great place to start if you're new to this!
* [Developing for the Raspberry Pi Pico in VS Code][cpwood-medium]
- A getting started guide for developing in micropython on the Raspberry Pi Pico by [cpwood][cpwood-git].
- Also see: [Pico-Go: Micropy-Cli][cpwood-picogo]
* [Awesome MicroPython][awesome-micropy]
- Collection of awesome micropython libraries / resources.
- Features `micropy-cli` along with several other great development tools under the [Development][awesome-micropy-develop] category.
[lemariva-blog]: https://lemariva.com/blog/2019/08/micropython-vsc-ide-intellisense
[lemariva-git]: https://github.com/lemariva
[cpwood-medium]: https://medium.com/all-geek-to-me/developing-for-the-raspberry-pi-pico-in-vs-code-getting-started-6dbb3da5ba97
[cpwood-picogo]: http://pico-go.net/docs/help/micropy/
[cpwood-git]: https://github.com/cpwood/
[awesome-micropy]: https://awesome-micropython.com/
[awesome-micropy-develop]: https://awesome-micropython.com/#development
# Acknowledgements
## Micropython-Stubber
[Josvel/micropython-stubber](https://github.com/Josverl/micropython-stubber)
Josverl's Repo is full of information regarding Micropython compatibility with VSCode and more. To find out more about how this process works, take a look at it.
micropy-cli and [micropy-stubs](https://github.com/BradenM/micropy-stubs) depend on micropython-stubber for its ability to generate frozen modules, create stubs on a pyboard, and more.
================================================
FILE: docs/Makefile
================================================
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
================================================
FILE: docs/_autosummary/micropy.config.config_source.rst
================================================
micropy.config.config\_source
=============================
.. automodule:: micropy.config.config_source
.. rubric:: Classes
.. autosummary::
ConfigSource
================================================
FILE: docs/_autosummary/micropy.config.rst
================================================
micropy.config
==============
.. automodule:: micropy.config
.. rubric:: Classes
.. autosummary::
Config
JSONConfigSource
DictConfigSource
================================================
FILE: docs/_autosummary/micropy.exceptions.rst
================================================
micropy.exceptions
==================
.. automodule:: micropy.exceptions
.. rubric:: Exceptions
.. autosummary::
StubError
StubNotFound
StubValidationError
================================================
FILE: docs/_autosummary/micropy.main.rst
================================================
micropy.main
============
.. automodule:: micropy.main
.. rubric:: Classes
.. autosummary::
MicroPy
================================================
FILE: docs/_autosummary/micropy.packages.rst
================================================
micropy.packages
================
.. automodule:: micropy.packages
.. rubric:: Functions
.. autosummary::
create_dependency_source
.. rubric:: Classes
.. autosummary::
LocalDependencySource
Package
PackageDependencySource
================================================
FILE: docs/_autosummary/micropy.project.modules.rst
================================================
micropy.project.modules
=======================
.. automodule:: micropy.project.modules
.. rubric:: Classes
.. autosummary::
DevPackagesModule
PackagesModule
ProjectModule
StubsModule
TemplatesModule
HookProxy
================================================
FILE: docs/_autosummary/micropy.project.rst
================================================
micropy.project
===============
.. automodule:: micropy.project
.. rubric:: Classes
.. autosummary::
Project
================================================
FILE: docs/_autosummary/micropy.rst
================================================
micropy
=======
.. automodule:: micropy
================================================
FILE: docs/_autosummary/micropy.stubs.rst
================================================
micropy.stubs
=============
.. automodule:: micropy.stubs
.. rubric:: Classes
.. autosummary::
StubManager
source
================================================
FILE: docs/_autosummary/micropy.stubs.source.rst
================================================
micropy.stubs.source
====================
.. automodule:: micropy.stubs.source
.. rubric:: Functions
.. autosummary::
get_source
.. rubric:: Classes
.. autosummary::
LocalStubSource
RemoteStubSource
StubRepo
StubSource
================================================
FILE: docs/_autosummary/micropy.utils.rst
================================================
micropy.utils
=============
.. automodule:: micropy.utils
.. rubric:: Functions
.. autosummary::
create_dir_link
ensure_existing_dir
ensure_valid_url
extract_tarbytes
generate_stub
get_package_meta
get_url_filename
is_dir_link
is_downloadable
is_existing_dir
is_url
iter_requirements
search_xml
stream_download
is_update_available
get_cached_data
get_class_that_defined_method
.. rubric:: Classes
.. autosummary::
PyboardWrapper
Validator
================================================
FILE: docs/base.md
================================================
## Installation
You can download and install the latest version of this software from the Python package index (PyPI) as follows:
`pip install --upgrade micropy-cli`
If applicable, you can test out a pre-release by executing:
`pip install --upgrade --pre micropy-cli`
# Getting Started
## Creating a Project
Creating a new project folder is as simple as:
1. Executing `micropy init `
2. Selecting which features to enable
3. Selecting your target device/firmware
4. Boom. Your workspace is ready.
## Micropy Project Environment
When creating a project with `micropy-cli`, two special items are added:
* A `.micropy/` folder
* A `micropy.json` file
The `.micropy/` contains symlinks from your project to your `$HOME/.micropy/stubs` folder. By doing this, micropy can reference the required stub files for your project as relative to it, rather than using absolute paths to `$HOME/.micropy`. How does this benefit you? Thanks to this feature, you can feel free to push common setting files such as `settings.json` and `.pylint.rc` to your remote git repository. This way, others who clone your repo can achieve a matching workspace in their local environment.
> Note: The generated `.micropy/` folder should be *IGNORED* by your VCS. It is created locally for each environment via the `micropy.json` file.
The `micropy.json` file contains information micropy needs in order to resolve your projects required files when other clone your repo. Think of it as a `package.json` for micropython.
## Cloning a Micropy Environment
To setup a Micropy environment locally, simply:
* Install `micropy-cli`
* Navigate to the project directory
* Execute `micropy`
Micropy will automatically configure and install any stubs required by a project thanks to its `micropy.json` file.
## Project Dependencies
While all modules that are included in your targeted micropython firmware are available with autocompletion, intellisense, and linting, most projects require external dependencies.
Currently, handling dependencies with micropython is a bit tricky. Maybe you can install a cpython version of your requirement? Maybe you could just copy and paste it? What if it needs to be frozen?
Micropy handles all these issues for you automatically. Not only does it track your project's dependencies, it keeps both `requirements.txt` and `dev-requirements.txt` updated, enables autocompletion/intellisense for each dep, and allows you to import them just as you would on your device.
This allows you to include your requirement however you want, whether that be as a frozen module in your custom built firmware, or simply in the `/lib` folder on your device.
#### Installing Packages
To add a package as a requirement for your project, run:
`micropy install `
while in your project's root directory.
This will automatically execute the following:
* Source `PACKAGE_NAMES` from pypi, as a url, or a local path
* Retrieve the module/package and stub it, adding it to your local `.micropy` folder.
* Add requirement to your `micropy.json`
* Update `requirements.txt`
To install dev packages that are not needed on your device, but are needed for local development, add the `--dev` flag. This will do everything above **except** stub the requirement.
You can also install all requirements found in `micropy.json`/`requirements.txt`/`dev-requirements.txt` by executing `micropy install` without passing any packages. Micropy will automatically do this when setting up a local environment of an existing micropy project.
#### Example
Lets say your new project will depend on [picoweb](https://pypi.org/project/picoweb/) and [blynklib](https://pypi.org/project/blynklib/). Plus, you'd like to use [rshell](https://pypi.org/project/rshell/) to communicate directly with your device. After creating your project via `micropy init`, you can install your requirements as so:
Now you or anybody cloning your project can import those requirements normally, and have the benefits of all the features micropy brings:
## Stub Management
Stub files are the magic behind how micropy allows features such as linting, Intellisense, and autocompletion to work. To achieve the best results with MicropyCli, its important that you first add the appropriate stubs for the device/firmware your project uses.
> Note: When working in a micropy project, all stub related commands will also be executed on the active project. (i.e if in a project and you run `micropy stubs add `, then that stub retrieved AND added to the active project.)
### Adding Stubs
Adding stubs to Micropy is a breeze. Simply run: `micropy stubs add `
By sourcing [micropy-stubs](https://github.com/BradenM/micropy-stubs), MicroPy has several premade stub packages to choose from.
These packages generally use the following naming schema:
`--`
For example, running `micropy stubs add esp32-micropython-1.11.0` will install the following:
* Micropython Specific Stubs
* ESP32 Micropython v1.11 Device Specific Stubs
* Frozen Modules for both device and firmware
You can search stubs that are made available to Micropy via `micropy stubs search `
Alternatively, using `micropy stubs add `, you can manually add stubs to Micropy.
For manual stub generation, please see [Josvel/micropython-stubber](https://github.com/Josverl/micropython-stubber).
### Creating Stubs
Using `micropy stubs create `, MicropyCli can automatically generate and add stubs from any Micropython device you have on hand. This can be done over both USB and WiFi.
> Note: For stub creation, micropy-cli has additional dependencies.
>
> These can be installed by executing: `pip install micropy-cli[create_stubs]`
### Viewing Stubs
To list stubs you have installed, simply run `micropy stubs list`.
To search for stubs for your device, use `micropy stubs search `.
# See Also
* [VSCode IntelliSense, Autocompletion & Linting capabilities](https://lemariva.com/blog/2019/08/micropython-vsc-ide-intellisense)
- An awesome article written by [lemariva](https://github.com/lemariva). It covers creating a micropython project environment from scratch using `micropy-cli` and [pymakr-vsc](pymakr-vsc). Great place to start if you're new to this!
# Acknowledgements
## Micropython-Stubber
[Josvel/micropython-stubber](https://github.com/Josverl/micropython-stubber)
Josverl's Repo is full of information regarding Micropython compatibility with VSCode and more. To find out more about how this process works, take a look at it.
micropy-cli and [micropy-stubs](https://github.com/BradenM/micropy-stubs) depend on micropython-stubber for its ability to generate frozen modules, create stubs on a pyboard, and more.
================================================
FILE: docs/cli.rst
================================================
CLI Usage
=====
.. click:: micropy.cli:cli
:prog: micropy
.. click:: micropy.cli:init
:prog: micropy init
:show-nested:
.. click:: micropy.cli:stubs
:prog: micropy stubs
:show-nested:
.. click:: micropy.cli:install
:prog: micropy install
:show-nested:
================================================
FILE: docs/conf.py
================================================
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
from recommonmark.transform import AutoStructify
try:
import importlib.metadata as importlib_metadata
except ModuleNotFoundError:
import importlib_metadata
sys.path.insert(0, os.path.abspath(".."))
source_suffix = [".rst", ".md"]
# -- Project information -----------------------------------------------------
project = "micropy-cli"
copyright = "2021, Braden Mars"
author = "Braden Mars"
github_doc_root = "https://github.com/BradenM/micropy-cli/tree/master/docs/"
# The full version, including alpha/beta/rc tags
release = importlib_metadata.version("micropy-cli")
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
"sphinx_autodoc_typehints",
"sphinx.ext.autosummary",
"sphinx.ext.autosectionlabel",
"sphinx_click.ext",
"recommonmark",
]
autodoc_default_flags = ["members", "show-inheritance"] # Defaults
autosummary_generate = True # Enable Autosummary
autosummary_imported_members = True
autosectionlabel_prefix_document = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# At the bottom of conf.py
def setup(app):
app.add_config_value(
"recommonmark_config",
{
"url_resolver": lambda url: github_doc_root + url,
"auto_toc_tree_section": "Contents",
"enable_eval_rst": True,
},
True,
)
app.add_transform(AutoStructify)
================================================
FILE: docs/header.rst
================================================
Micropy Cli |PyPI| |PyPI - Python Version| |Github - Test Micropy Cli| |Coverage Status|
========================================================================================
Micropy Cli is a project management/generation tool for writing
`Micropython`_ code in modern IDEs such as VSCode. Its primary goal is
to automate the process of creating a workspace complete with:
- **Linting** compatible with Micropython
- VSCode **Intellisense**
- **Autocompletion**
- Dependency Management
- VCS Compatibility
.. figure:: ../.github/img/micropy.svg
:width: 100%
Installation
------------
You can download and install the latest version of this software from
the Python package index (PyPI) as follows:
``pip install --upgrade micropy-cli``
.. _Micropython: https://micropython.org/
.. |PyPI| image:: https://img.shields.io/pypi/v/micropy-cli?logo=pypi&logoColor=white&style=flat-square
:target: https://pypi.org/project/micropy-cli/
.. |PyPI - Python Version| image:: https://img.shields.io/pypi/pyversions/micropy-cli.svg?style=flat-square&logo=python&logoColor=green
:target: https://pypi.org/project/micropy-cli/
.. |Github - Test Micropy Cli| image:: https://img.shields.io/github/workflow/status/BradenM/micropy-cli/Test%20MicropyCli/master?logo=github&style=flat-square
:target: https://github.com/BradenM/micropy-cli/actions
.. |Coverage Status| image:: https://img.shields.io/coveralls/github/BradenM/micropy-cli/master?style=flat-square&logo=coveralls
:target: https://coveralls.io/github/BradenM/micropy-cli
================================================
FILE: docs/index.rst
================================================
.. include:: header.rst
.. toctree::
:caption: Documentation
:maxdepth: 2
base.md
cli
modules
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
================================================
FILE: docs/modules.rst
================================================
API Reference
=============
.. autosummary::
:toctree: _autosummary
micropy
micropy.main
micropy.exceptions
micropy.stubs
micropy.stubs.source
micropy.project
micropy.project.modules
micropy.utils
micropy.config
micropy.config.config_source
micropy.packages
================================================
FILE: micropy/__init__.py
================================================
"""Micropy Cli.
Micropy Cli is a project management/generation tool for writing Micropython
code in modern IDEs such as VSCode. Its primary goal is to automate the
process of creating a workspace complete with:
Linting compatible with Micropython,
VSCode Intellisense,
Autocompletion,
Dependency Management,
VCS Compatibility
and more.
"""
from __future__ import annotations
from micropy.main import MicroPy
from micropy.utils._compat import metadata
__version__ = metadata.version("micropy-cli")
================================================
FILE: micropy/__main__.py
================================================
from __future__ import annotations
import sys
if __name__ == "__main__":
from micropy.cli import cli
sys.exit(cli())
================================================
FILE: micropy/app/__init__.py
================================================
from .main import app
__all__ = ["app"]
================================================
FILE: micropy/app/main.py
================================================
from __future__ import annotations
from enum import Enum
from pathlib import Path
from typing import List, Optional, cast
import micropy.exceptions as exc
import questionary as prompt
import typer
from micropy import logger, utils
from micropy.main import MicroPy
from micropy.project import Project, modules
from micropy.stubs.stubs import Stub
from micropy.utils._compat import metadata
from questionary import Choice
from .stubs import stubs_app
app = typer.Typer(name="micropy-cli", no_args_is_help=True, rich_markup_mode="markdown")
app.add_typer(stubs_app)
@app.callback()
def main_callback(ctx: typer.Context):
"""
**Micropy CLI** is a project management/generation tool for writing [Micropython](https://micropython.org/) code in modern IDEs such as VSCode.
Its primary goal is to automate the process of creating a workspace complete with:
* **Linting** compatible with Micropython
* IDE **Intellisense**
* **Autocompletion**
* Dependency Management
* VCS Compatibility
"""
if ctx.resilient_parsing:
return
micropy = ctx.ensure_object(MicroPy)
if not micropy.project.exists:
return
latest = utils.is_update_available()
if latest:
log = logger.Log.get_logger("MicroPy")
log.title("Update Available!")
log.info(f"Version $B[v{latest}] is now available")
log.info("You can update via: $[pip install --upgrade micropy-cli]\n")
@app.command(name="version")
def main_version():
"""Print Micropy CLI Version."""
vers = metadata.version("micropy-cli")
print(f"Micropy Version: {vers}")
raise typer.Exit()
TemplateEnum = Enum(
"TemplateEnum", {t: t for t in list(modules.TemplatesModule.TEMPLATES.keys())}, type=str
)
def template_callback(
ctx: typer.Context, value: Optional[List[TemplateEnum]]
) -> Optional[List[TemplateEnum]]:
if ctx.resilient_parsing:
return
if not value:
templates = modules.TemplatesModule.TEMPLATES.items()
templ_choices = [Choice(str(val[1]), value=t) for t, val in templates]
value = prompt.checkbox("Choose any Templates to Generate", choices=templ_choices).ask()
if not value:
if not prompt.confirm(
"You have chosen to use NO templates. Are you sure you want to continue?",
default=False,
).ask():
raise typer.Abort()
return []
value = [TemplateEnum(k) for k in value]
return value
def path_callback(ctx: typer.Context, value: Optional[Path]) -> Optional[Path]:
if ctx.resilient_parsing:
return
return value if value else Path.cwd()
def name_callback(ctx: typer.Context, value: Optional[str]) -> Optional[str]:
if ctx.resilient_parsing:
return
if not value:
path = ctx.params.get("path", Path.cwd())
default_name = path.name
prompt_name = prompt.text("Project Name", default=default_name).ask()
if prompt_name is None:
raise typer.Abort("You must provide a project name via prompt or --name option.")
return prompt_name.strip()
return value
def stubs_callback(ctx: typer.Context, value: Optional[List[str]]) -> Optional[List[Stub]]:
if ctx.resilient_parsing:
return
mpy = ctx.ensure_object(MicroPy)
stub_values = (
[i for i in [mpy.stubs.add(s) for s in value] if i is not None]
if value
else list(mpy.stubs)
)
if not stub_values:
mpy.log.error("You don't have any stubs!")
mpy.log.title("To add stubs to micropy, use $[micropy stubs add ]")
mpy.log.info("See: $[micropy stubs --help] for more information.")
raise typer.Abort(1)
if not value:
# if value was not explicitly provided, ask for selections.
stubs = [Choice(str(s), value=s) for s in stub_values]
stub_choices = prompt.checkbox("Which stubs would you like to use?", choices=stubs).ask()
if not stub_choices:
# mpy.log.error("You must choose at least one stub!")
raise typer.BadParameter(
"You must choose at least one stub!",
ctx,
)
return stub_choices
return stub_values
@app.command(name="init")
def main_init(
ctx: typer.Context,
path: Optional[Path] = typer.Argument(
None,
help="Path to project. Defaults to current working directory.",
callback=path_callback,
dir_okay=True,
file_okay=False,
show_default=False,
),
name: Optional[str] = typer.Option(
None,
"--name",
"-n",
help="Project Name. Defaults to path name.",
show_default=False,
callback=name_callback,
),
template: Optional[List[TemplateEnum]] = typer.Option(
None,
"--template",
"-t",
help="Templates to generate for project. Can be specified multiple times. Skips interactive prompt.",
show_default=False,
callback=template_callback,
),
stubs: Optional[List[str]] = typer.Option(
None,
"--stubs",
"-s",
help="Name of stubs to add to project. Can be specified multiple times. Skips interactive prompt.",
callback=stubs_callback,
show_default=False,
),
):
"""Create new Micropython Project.
\b When creating a new project, all files will be placed under the
generated folder.
"""
mpy: MicroPy = ctx.find_object(MicroPy)
mpy.log.title("Creating New Project")
# weird issue where "template" from args
# gets set a [None,None], but its correct in params.
template = ctx.params.get("template", template)
project = Project(path, name=name)
project.add(modules.StubsModule, mpy.stubs, stubs=stubs)
project.add(modules.PackagesModule, "requirements.txt")
project.add(modules.DevPackagesModule, "dev-requirements.txt")
project.add(
modules.TemplatesModule,
templates=[t.value for t in template if t],
run_checks=mpy.RUN_CHECKS,
)
proj_path = project.create()
try:
rel_path = f"./{proj_path.relative_to(Path.cwd())}"
except ValueError:
rel_path = proj_path
mpy.log.title(f"Created $w[{project.name}] at $w[{rel_path}]")
def ensure_project(ctx: typer.Context) -> Project:
mpy = ctx.ensure_object(MicroPy)
project = mpy.project
if not project.exists:
mpy.log.error("You are not currently in an active project!")
raise typer.Abort(1)
# todo: fix type issue.
return cast(Project, project)
def install_local_callback(ctx: typer.Context, value: Optional[Path]) -> Optional[Path]:
"""Handle package installation from local path."""
if ctx.resilient_parsing:
return
if value is None:
return value
mpy = ctx.ensure_object(MicroPy)
project = ensure_project(ctx)
pkg_name = next(iter(ctx.args), None)
mpy.log.title("Installing Local Package")
pkg_path = "-e " + str(value)
project.add_package(pkg_path, dev=ctx.params.get("dev", False), name=pkg_name)
raise typer.Exit()
def install_project_callback(ctx: typer.Context, value: Optional[List[str]]) -> Optional[List[str]]:
"""Handle project requirements install."""
if ctx.resilient_parsing:
return
if "path" in ctx.params:
return
if not value:
# only if no packages are provided.
mpy = ctx.ensure_object(MicroPy)
project = ensure_project(ctx)
mpy.log.title("Installing all Requirements")
try:
project.add_from_file(dev=ctx.params.get("dev", False))
except Exception as e:
mpy.log.error("Failed to load requirements!", exception=e)
raise typer.Abort() from e
else:
mpy.log.success("\nRequirements Installed!")
raise typer.Exit()
return value
@app.command(name="install")
def main_install(
ctx: typer.Context,
packages: Optional[List[str]] = typer.Argument(
None, help="Packages to install.", callback=install_project_callback
),
dev: bool = typer.Option(
default=False,
help="Install as development package. This will not generate stubs for the package.",
show_default=True,
),
path: Optional[Path] = typer.Option(
None,
help="Add dependency from local path. Can be a file or directory.",
callback=install_local_callback,
),
):
"""Install Packages as Project Requirements.
\b
Install a project dependency while enabling
intellisense, autocompletion, and linting for it.
\b
$ micropy install picoweb==1.8.2 blynklib
\b
\b
If no packages are passed and a requirements.txt file is found,
then micropy will install all packages listed in it.
\b
If the --dev flag is passed, then the packages are only
added to micropy.json. They are not stubbed.
\b
To add a dependency from a path, use the --path option
and provide a name for your package:
\b
$ micropy install --path ./src/lib/mypackage MyCustomPackage
\b
\b
You can import installed packages just as you would
on your actual device:
\b
_import _
"""
mpy: MicroPy = ctx.ensure_object(MicroPy)
project = ensure_project(ctx)
mpy.log.title("Installing Packages")
for pkg in packages:
try:
project.add_package(pkg, dev=dev)
except exc.RequirementException as e:
pkg_name = str(e.package)
mpy.log.error(f"Failed to install {pkg_name}!" " Is it available on PyPi?", exception=e)
raise typer.Abort() from e
================================================
FILE: micropy/app/stubs.py
================================================
from __future__ import annotations
import sys
import tempfile
from enum import Enum
from pathlib import Path
from typing import List, Optional, Type
import micropy.exceptions as exc
import typer
from micropy.exceptions import PyDeviceError
from micropy.logger import Log
from micropy.main import MicroPy
from micropy.pyd import (
DevicePath,
MessageHandlers,
MetaPyDeviceBackend,
ProgressStreamConsumer,
PyDevice,
)
from micropy.pyd.backend_rshell import RShellPyDeviceBackend
from micropy.pyd.backend_upydevice import UPyDeviceBackend
from micropy.stubs import source as stubs_source
from micropy.utils.stub import prepare_create_stubs
from stubber.codemod import board as stub_board
from stubber.codemod.modify_list import ListChangeSet
stubs_app = typer.Typer(name="stubs", rich_markup_mode="markdown", no_args_is_help=True)
@stubs_app.callback()
def stubs_callback():
"""Manage Micropy Stubs.
\b
Stub files are what enable linting,
Intellisense, Autocompletion, and more.
\b
To achieve the best results, you can install
stubs specific to your device/firmware using:
- *micropy stubs search* `STUB_NAME`
- *micropy stubs add* `STUB_NAME`
For more info, please check micropy stubs add --help
"""
pass
class CreateBackend(str, Enum):
upydevice = ("upydevice", UPyDeviceBackend)
rshell = ("rshell", RShellPyDeviceBackend)
def __new__(cls, value: str, backend: Type[MetaPyDeviceBackend]):
obj = str.__new__(cls, value)
obj._value_ = value
obj.backend = backend
return obj
def create_changeset(
value: Optional[List[str]], *, replace: bool = False
) -> Optional[ListChangeSet]:
if value is None:
return value
return ListChangeSet.from_strings(add=value, replace=replace)
@stubs_app.command(name="create")
def stubs_create(
ctx: typer.Context,
port: str = typer.Argument(..., help="Serial port used to connect to device"),
backend: CreateBackend = typer.Option(CreateBackend.upydevice, help="PyDevice backend to use."),
variant: stub_board.CreateStubsVariant = typer.Option(
stub_board.CreateStubsVariant.BASE,
"-v",
"--variant",
help="Create Stubs variant.",
rich_help_panel="Stubs",
),
module: Optional[List[str]] = typer.Option(
None,
"-m",
"--module",
help="Modules to look for and stub. This flag can be used multiple times.",
rich_help_panel="Stubs",
),
module_defaults: bool = typer.Option(
True, help="Include createstubs.py default modules.", rich_help_panel="Stubs"
),
exclude: Optional[List[str]] = typer.Option(
None,
"-e",
"--exclude",
help="Modules to exclude from stubber. This flag can be used multiple times.",
rich_help_panel="Stubs",
),
exclude_defaults: bool = typer.Option(
True,
help="Include createstubs.py default module excludes. This flag can be used multiple times.",
rich_help_panel="Stubs",
),
compile: bool = typer.Option(
True,
"-c",
"--compile",
help="Cross compile to .mpy via mpy-cross.",
rich_help_panel="Stubs",
),
):
"""Create stubs from micropython-enabled devices.
Utilize Josverl's [micropython-stubber](https://github.com/josverl/micropython-stubber/)
to generate stubs from your own micropython-enabled device.
\n
**Create stubs with defaults**:\n
- `micropy stubs create /dev/ttyUSB0`
\n
**Specify additional modules**:\n
- `micropy stubs create -m custom_module -m other_module /dev/ttyUSB0`\n
- _Only given modules_: `micropy stubs create -m custom_module --no-module-defaults /dev/ttyUSB0`
\n
**Exclude additional modules**:\n
- `micropy stubs create -e custom_module -e other_module /dev/ttyUSB0`\n
- _Only exclude given modules_: `micropy stubs create -e custom_module --no-module-defaults /dev/ttyUSB0`
\n
**Create Stubs Variants**:\n
- **mem**: Optimized for low memory devices._\n
- **db**: Persist stub progress across reboots.\n
- **lvgl**: Additional support for LVGL devices.\n
"""
mp: MicroPy = ctx.ensure_object(MicroPy)
log = mp.log
log.title(f"Connecting to Pyboard @ $[{port}]")
pyb_log = Log.add_logger("Pyboard", "bright_white")
def _get_desc(name: str, cfg: dict):
desc = f"{pyb_log.get_service()} {name}"
return name, cfg | dict(desc=desc)
message_handler = MessageHandlers(
on_message=lambda x: isinstance(x, str) and pyb_log.info(x.strip())
)
try:
pyb = PyDevice(
port,
auto_connect=True,
stream_consumer=ProgressStreamConsumer(on_description=_get_desc),
message_consumer=message_handler,
backend=backend.backend,
)
except (SystemExit, PyDeviceError):
log.error(f"Failed to connect, are you sure $[{port}] is correct?")
return None
log.success("Connected!")
if module or exclude:
log.title("Preparing createstubs for:")
log.info(f"Modules: {', '.join(module or [])}")
log.info(f"Exclude: {', '.join(exclude or [])}")
create_stubs = prepare_create_stubs(
variant=variant,
modules_set=create_changeset(module, replace=not module_defaults),
exclude_set=create_changeset(exclude, replace=not exclude_defaults),
compile=compile,
)
dev_path = DevicePath("createstubs.mpy") if compile else DevicePath("createstubs.py")
log.info("Executing stubber on pyboard...")
try:
pyb.run_script(create_stubs, DevicePath(dev_path))
except Exception as e:
# TODO: Handle more usage cases
log.error(f"Failed to execute script: {str(e)}", exception=e)
raise
log.success("Done!")
log.info("Copying stubs...")
with tempfile.TemporaryDirectory() as tmpdir:
pyb.copy_from(
DevicePath("/stubs"),
tmpdir,
verify_integrity=True,
# exclude due to ps1 var possibly different.
exclude_integrity={"sys.py", "usys.py"},
)
out_dir = Path(tmpdir)
stub_path = next(out_dir.iterdir())
log.info(f"Copied Stubs: $[{stub_path.name}]")
stub_path = mp.stubs.from_stubber(stub_path, out_dir)
stub = mp.stubs.add(str(stub_path))
pyb.remove(dev_path)
pyb.disconnect()
log.success(f"Added {stub.name} to stubs!")
return stub
@stubs_app.command(name="add")
def stubs_add(ctx: typer.Context, stub_name: str, force: bool = False):
"""Add Stubs from package or path.
\b
In general, stub package names follow this schema:
--
\b
For example:
esp32-micropython-1.11.0
\b
You can search premade stub packages using:
micropy stubs search
Checkout the docs on Github for more info.
"""
mpy: MicroPy = ctx.find_object(MicroPy)
proj = mpy.project
mpy.log.title(f"Adding $[{stub_name}] to stubs")
locator = stubs_source.StubSource(
[stubs_source.RepoStubLocator(mpy.repo), stubs_source.StubInfoSpecLocator()]
)
with locator.ready(stub_name) as stub:
stub_name = stub
try:
stub = mpy.stubs.add(stub_name, force=force)
except exc.StubNotFound:
mpy.log.error(f"$[{stub_name}] could not be found!")
sys.exit(1)
except exc.StubError:
mpy.log.error(f"$[{stub_name}] is not a valid stub!")
sys.exit(1)
else:
mpy.log.success(f"{stub.name} added!")
if proj.exists:
mpy.log.title(f"Adding $[{stub.name}] to $[{proj.name}]")
proj.add_stub(stub)
@stubs_app.command(name="search")
def stubs_search(ctx: typer.Context, query: str, show_outdated: bool = False):
"""Search available stubs."""
mpy: MicroPy = ctx.find_object(MicroPy)
installed_stubs = map(str, mpy.stubs._loaded | mpy.stubs._firmware)
results = [
(r, r.name in installed_stubs)
for r in mpy.repo.search(query, include_versions=show_outdated)
]
results = sorted(results, key=lambda pkg: pkg[0].name)
if not any(results):
mpy.log.warn(f"No results found for: $[{query}].")
sys.exit(0)
mpy.log.title(f"Results for $[{query}]:")
max_name = max(len(n[0].repo_name) for n in results)
for pkg, installed in results:
pad = max_name - len(pkg.repo_name) + 2
pad = pad if (pad % 2 == 0) else pad + 1
spacer = "{:>{pad}}".format("::", pad=pad)
repo_logger = Log.add_logger(f"{pkg.repo_name} {spacer}", "bright_white")
name = "{:>{pad}}".format(f"{pkg.name} ($w[{pkg.version}])", pad=pad)
name = f"{name} $B[(Installed)]" if installed else name
repo_logger.info(name)
@stubs_app.command(name="list")
def stubs_list(ctx: typer.Context):
"""List installed stubs."""
mpy: MicroPy = ctx.find_object(MicroPy)
def print_stubs(stub_list):
for firm, stubs in stub_list:
if stubs:
title = str(firm).capitalize()
mpy.log.title(f"$[{title}]:")
for stub in stubs:
mpy.log.info(str(stub))
mpy.log.title("Installed Stubs:")
mpy.log.info(f"Total: {len(mpy.stubs)}")
print_stubs(mpy.stubs.iter_by_firmware())
mpy.verbose = False
proj = mpy.project
if proj.exists:
mpy.log.title(f"Stubs used in {proj.name}:")
mpy.log.info(f"Total: {len(proj.stubs)}")
stubs = mpy.stubs.iter_by_firmware(stubs=proj.stubs)
print_stubs(stubs)
================================================
FILE: micropy/config/__init__.py
================================================
"""Configuration files and interfaces for them."""
from .config import Config
from .config_dict import DictConfigSource
from .config_json import JSONConfigSource
__all__ = ["Config", "JSONConfigSource", "DictConfigSource"]
================================================
FILE: micropy/config/config.py
================================================
from copy import deepcopy
from typing import Any, List, Optional, Sequence, Tuple, Type, Union
import dpath
from boltons import iterutils
from micropy.logger import Log, ServiceLog
from .config_json import JSONConfigSource
from .config_source import ConfigSource
"""Config Interface"""
class Config:
"""Configuration File Interface.
Automatically syncs config in memory
with config saved to disk.
Args:
path (Path): Path to save file at.
source_format (ConfigSource, optional): Configuration File Format.
Defaults to JSONConfigSource.
default (dict, optional): Default configuration.
Defaults to {}.
"""
def __init__(
self,
*args: Any,
source_format: Type[ConfigSource] = JSONConfigSource,
default: Optional[dict] = None,
):
default = default or dict()
self.log: ServiceLog = Log.add_logger(f"{__name__}")
self.format = source_format
self._source: ConfigSource = self.format(*args)
self._config = deepcopy(default)
if self._source.exists:
with self._source as src:
self.log.debug("loaded config values")
dpath.util.merge(self._config, src, flags=dpath.MERGE_REPLACE)
@property
def source(self) -> ConfigSource:
return self._source
@source.setter
def source(self, value: Any) -> ConfigSource:
self._source = self.format(value)
return self._source
@property
def config(self) -> dict:
return self._config
def raw(self) -> dict:
return self._config
def sync(self) -> dict:
"""Sync in-memory config with disk.
Returns:
dict: updated config
"""
with self.source as src:
dpath.util.merge(src, self.config, flags=dpath.MERGE_REPLACE)
return self.config
def parse_key(self, key: str) -> Tuple[Sequence[str], str]:
"""Parses key.
Splits it into a path and 'final key'
object. Each key is separates by a: "/"
Example:
>>> self.parse_key('item/subitem/value')
(('item', 'subitem'), 'value')
Args:
key (str): key in dot notation
Returns:
Tuple[Sequence[str], str]: Parsed key
"""
full_path = tuple(i for i in key.split("/"))
path = full_path[:-1]
p_key = full_path[-1]
return (path, p_key)
def get(self, key: str, default: Any = None) -> Any:
"""Retrieve config value.
Args:
key (str): Key (in dot-notation) of value to return.
default (Any, optional): Default value to return.
Defaults to None.
Returns:
Any: Value at key given
"""
try:
value = dpath.util.get(self.config, key)
except KeyError:
value = default
else:
return value
return value
def set(self, key: str, value: Any) -> Any:
"""Set config value.
Args:
key (str): Key (in dot-notation) to update.
value (Any): Value to set
Returns:
Any: Updated config
"""
dpath.set(self._config, key, value)
self.log.debug(f"set config value [{key}] => {value}")
return self.sync()
def add(self, key: str, value: Any) -> Any:
"""Overwrite or add config value.
Args:
key: Key to set
value: Value to add or update too
Returns:
Updated config
"""
dpath.new(self._config, key, value)
self.log.debug(f"added config value [{key}] -> {value}")
return self.sync()
def pop(self, key: str) -> Any:
"""Delete and return value at key.
Args:
key (str): Key to pop.
Returns:
Any: Popped value.
"""
path, target = self.parse_key(key)
value = self.get(key)
remapped = iterutils.remap(
self._config, lambda p, k, v: False if p == path and k == target else True
)
self._config = remapped
self.log.debug(f"popped config value {value} <- [{key}]")
return self.sync()
def extend(self, key: str, value: List[Any], unique: bool = False) -> dict:
"""Extend a list in config at key path.
Args:
key: Key to path to extend.
value: List of values to extend by.
unique: Only extend values if not already in values.
Returns:
Updated Config
"""
to_update = list(deepcopy(self.get(key, value)))
if unique:
value = [v for v in value if v not in to_update]
dpath.merge(to_update, value, flags=dpath.MERGE_ADDITIVE)
self.set(key, to_update)
return self.sync()
def upsert(self, key: str, value: Union[List[Any], dict]) -> dict:
"""Update or insert values into key list or dict.
Args:
key: Key to value to upsert.
value: Value to upsert by.
Returns:
Updated config.
"""
to_update = deepcopy(self.get(key, value))
dpath.merge(to_update, value, flags=dpath.MERGE_REPLACE)
self.add(key, to_update)
return self.sync()
def search(self, key):
"""Retrieve all values at key (with glob pattern).
Args:
key: Key with pattern to search with.
Returns:
Values matching key and pattern.
"""
return dpath.values(self.config, key)
================================================
FILE: micropy/config/config_dict.py
================================================
from typing import Optional
from .config_source import ConfigSource
class DictConfigSource(ConfigSource):
def __init__(self, config: Optional[dict] = None):
"""Dict Config Source.
Args:
config (dict, optional): Initial Config.
Defaults to {}.
"""
super().__init__(initial_config=config)
@property
def exists(self) -> bool:
return any(self.config.keys())
def process(self) -> dict:
return self.config
def prepare(self):
return super().prepare()
def save(self, content: dict) -> dict:
return content
================================================
FILE: micropy/config/config_json.py
================================================
import json
from pathlib import Path
from boltons.fileutils import AtomicSaver
from .config_source import ConfigSource
class JSONConfigSource(ConfigSource):
"""JSON Config File Source.
Args:
path (Path): Path to save config too.
"""
def __init__(self, path: Path):
super().__init__()
self._file_path: Path = path
@property
def file_path(self) -> Path:
"""Path to config file."""
return self._file_path
@file_path.setter
def file_path(self, value: Path) -> Path:
"""Set config file path.
Args:
value (Path): New path to config file
Returns:
Path: Path to config file
"""
self._file_path = value
return self._file_path
@property
def exists(self) -> bool:
return self.file_path.exists()
def process(self) -> dict:
"""Load config from JSON file.
Returns:
dict: config in file
"""
content = self.file_path.read_text()
if not content:
return {}
config = json.loads(content)
return config
def prepare(self):
if not self.file_path.exists():
self.log.debug(f"creating new config file: {self.file_path}")
self.file_path.parent.mkdir(parents=True, exist_ok=True)
self.file_path.touch()
def save(self, content: dict) -> Path:
"""Save current config.
Args:
content (dict): content to write to file.
Returns:
Path: path to config file.
"""
config = json.dumps(content, indent=4, separators=(",", ": "))
with AtomicSaver((str(self.file_path)), text_mode=True) as file:
file.write(config)
return self.file_path
================================================
FILE: micropy/config/config_source.py
================================================
"""Config Abstract."""
import abc
import contextlib
from typing import Any, Optional
from micropy.logger import Log, ServiceLog
class ConfigSource(contextlib.AbstractContextManager, metaclass=abc.ABCMeta):
"""Abstract Base Class for Config Sources.
Args:
initial_config (dict, optional): Initial config values.
Defaults to {}.
"""
def __init__(self, initial_config: Optional[dict] = None):
self._config: dict = initial_config or dict()
self.log: ServiceLog = Log.add_logger(__name__)
@property
def config(self) -> dict:
"""Current Config Content."""
return self._config
@config.setter
def config(self, value: dict) -> dict:
"""Set current config content.
Args:
value (dict): New value to set
Returns:
dict: Current config
"""
self._config = value
return self._config
@abc.abstractproperty
def exists(self) -> bool:
"""Property to check if source exists."""
@abc.abstractmethod
def save(self, content: Any) -> Any:
"""Method to save config."""
@abc.abstractmethod
def process(self) -> dict:
"""Read and process config file.
Returns:
dict: Config file content
"""
@abc.abstractmethod
def prepare(self) -> Any:
"""Method to prepare on enter."""
def __enter__(self) -> dict:
self.prepare()
self._config = self.process()
return self._config
def __exit__(self, *args):
self.save(self._config)
================================================
FILE: micropy/data/__init__.py
================================================
"""
micropy.data
~~~~~~~~~~~~~~
This module is merely to provide an easy method of locating
data files used by MicropyCli
"""
from pathlib import Path
__all__ = ["ROOT", "SCHEMAS", "REPO_SOURCES", "FILES", "STUB_DIR", "LOG_FILE", "STUBBER"]
# Paths
MOD_PATH = Path(__file__).parent
PATH = MOD_PATH.absolute()
ROOT = MOD_PATH.parent.absolute()
# Stub Schemas
SCHEMAS = PATH / "schemas"
# Default Stub Sources
REPO_SOURCES = PATH / "sources.json"
# Application Data
FILES = Path.home() / ".micropy"
STUB_DIR = FILES / "stubs"
LOG_FILE = FILES / "micropy.log"
# Libraries
LIB = ROOT / "lib"
STUBBER = LIB / "stubber"
================================================
FILE: micropy/data/schemas/firmware.json
================================================
{
"type": "object",
"required": [
"name",
"repo",
"firmware",
"modules",
"devices",
"versions"
],
"properties": {
"scope": {
"type": "string"
},
"name": {
"type": "string"
},
"repo": {
"type": "string"
},
"module_path": {
"type": ["string", "array"]
},
"firmware": {
"type": "string"
},
"excluded_modules": {
"type": "array",
"items": {
"type": "string"
}
},
"modules": {
"type": "array",
"items": {
"type": "string"
}
},
"devices": {
"type": "array",
"items": {
"type": "string"
}
},
"path": {
"type": "string"
},
"versions": {
"type": "array",
"items": {
"type": "object",
"required": ["version", "git_tag", "sha", "latest", "devices"],
"properties": {
"version": {
"type": "string"
},
"git_tag": {
"type": "string"
},
"sha": {
"type": "string"
},
"latest": {
"type": "boolean",
"default": false
},
"devices": {
"type": "array",
"items": {
"type": "string"
}
}
}
}
}
}
}
================================================
FILE: micropy/data/schemas/stubs.json
================================================
{
"type": "object",
"required": ["firmware", "modules"],
"properties": {
"firmware": {
"type": "object",
"required": [
"nodename",
"version",
"sysname"
],
"properties": {
"family": {
"type": "string"
},
"machine": {
"type": "string"
},
"firmware": {
"type": "string"
},
"nodename": {
"type": "string"
},
"version": {
"type": "string"
},
"release": {
"type": "string"
},
"sysname": {
"type": "string"
},
"name": {
"type": "string"
}
}
},
"stubber": {
"$id": "#/properties/stubber",
"type": "object",
"title": "The Stubber Schema",
"required": ["version"],
"properties": {
"version": {
"type": "string"
}
}
},
"modules": {
"$id": "#/properties/modules",
"type": "array",
"title": "The Modules Schema",
"items": {
"type": "object",
"required": ["file", "module"],
"properties": {
"file": {
"type": "string"
},
"module": {
"type": "string"
}
}
}
}
}
}
================================================
FILE: micropy/data/sources.json
================================================
[
{
"display_name": "BradenM/micropy-stubs",
"name": "micropy-stubs",
"source": "https://raw.githubusercontent.com/BradenM/micropy-stubs/master/source.json"
},
{
"display_name": "Josverl/micropython-stubs",
"name": "micropython-stubs",
"source": "https://raw.githubusercontent.com/Josverl/micropython-stubs/main/publish/package_data.jsondb"
}
]
================================================
FILE: micropy/exceptions.py
================================================
"""Micropy Exceptions."""
from __future__ import annotations
class MicropyException(Exception):
"""Generic MicroPy Exception."""
class StubError(MicropyException):
"""Exception for any errors raised by stubs."""
def __init__(self, message=None, stub=None):
super().__init__(message)
self.stub = stub
self.message = message
if message is None:
message = "An error occurred with this stub."
class StubValidationError(StubError):
"""Raised when a stub fails validation."""
def __init__(self, path, errors, *args, **kwargs):
msg = f"Stub at[{str(path)}] encountered" f" the following validation errors: {str(errors)}"
super().__init__(msg, *args, **kwargs)
def __str__(self):
return self.message
class StubNotFound(StubError):
"""Raised when a stub cannot be found."""
def __init__(self, stub_name=None):
stub_name = stub_name or "Unknown"
msg = f"{stub_name} is not available!"
super().__init__(msg)
class RequirementException(MicropyException):
"""A Requirement Exception Occurred."""
def __init__(self, *args, **kwargs):
self.package = kwargs.pop("package", None)
super().__init__(*args, **kwargs)
class RequirementNotFound(RequirementException):
"""A requirement could not be found."""
class PyDeviceError(MicropyException):
"""Generic PyDevice exception."""
def __init__(self, message: str = None):
super().__init__(message)
self.message = message
class PyDeviceConnectionError(PyDeviceError):
_default_message = "Failed to connect to pydevice @ {location}."
def __init__(self, location: str):
super().__init__(self._default_message.format(location=location))
class PyDeviceFileIntegrityError(PyDeviceError):
_default_message = (
"Failed to verify integrity: {device_path} (device={device_sum}, recv={digest})"
)
def __init__(self, device_path: str, device_sum: str, digest: str):
super().__init__(
self._default_message.format(
device_path=device_path, device_sum=device_sum, digest=digest
)
)
================================================
FILE: micropy/logger.py
================================================
"""Logging functionality.
TODO: Split logging from UI, refactor.
"""
import logging
import re
from contextlib import contextmanager
from logging.handlers import RotatingFileHandler
import click
from micropy import data
class Log:
"""Borg for easy access to any Log from anywhere in the package."""
__shared_state = {}
def __init__(self):
self.__dict__ = self.__shared_state
self.parent_logger = ServiceLog()
self.loggers = [self.parent_logger]
@classmethod
def add_logger(cls, service_name, base_color="white", **kwargs):
"""Creates a new child ServiceLog instance."""
_self = cls()
parent = kwargs.pop("parent", _self.parent_logger)
logger = ServiceLog(service_name, base_color, parent=parent, **kwargs)
_self.loggers.append(logger)
return logger
@classmethod
def get_logger(cls, service_name):
"""Retrieves a child logger by service name."""
_self = cls()
logger = next(i for i in _self.loggers if i.service_name == service_name)
return logger
class ServiceLog:
"""Handles logging to stdout and micropy.log.
:param service_name: Active service to display
:type service_name: str
:param base_color: Color of name on output
:type base_color: str
"""
LOG_FILE = data.LOG_FILE
def __init__(self, service_name="MicroPy", base_color="bright_green", **kwargs):
self.parent = kwargs.get("parent", None)
self.LOG_FILE.parent.mkdir(exist_ok=True)
self.base_color = base_color
self.service_name = service_name
self.load_handler()
self.info_color = kwargs.get("info_color", "white")
self.accent_color = kwargs.get("accent_color", "yellow")
self.warn_color = kwargs.get("warn_color", "green")
self.show_title = kwargs.get("show_title", True)
self.stdout = kwargs.get("stdout", True)
@contextmanager
def silent(self):
self.stdout = False
yield self
self.stdout = True
def load_handler(self):
"""Loads Logging Module Formatting."""
self.log = logging.getLogger()
if not self.log.hasHandlers():
self.log.setLevel(logging.DEBUG)
self.log_handler = RotatingFileHandler(
str(self.LOG_FILE),
mode="a",
maxBytes=2 * 1024 * 1024,
backupCount=2,
encoding=None,
delay=0,
)
self.log_handler.setLevel(logging.DEBUG)
self.log.addHandler(self.log_handler)
self.log_handler = self.log.handlers[0]
log_format = "[%(asctime)s] %(levelname)s: " f"{self.service_name.lower()}: " "%(message)s"
self.log_handler.setFormatter(logging.Formatter(log_format, "%Y-%m-%d %H:%M:%S"))
def parse_msg(self, msg, accent_color=None):
"""Parses any color codes accordingly.
:param str msg:
:param str accent_color: (Default value = None)
:return: Parsed Message
:rtype: str
"""
msg_special = re.findall(r"\$(.*?)\[(.*?)\]", msg)
color = accent_color or self.accent_color
special = {"fg": color, "bold": True}
clean = msg
_parts = re.split(r"\$.*?\[(.*?)\]", msg)
parts = [(p, None) for p in _parts]
for w in msg_special:
if w[0] == "w":
special["fg"] = self.warn_color
if w[0] == "B":
special.pop("fg")
sindex = _parts.index(w[1])
parts[sindex] = (w[1], special)
clean = msg.replace(f"${w[0]}[{w[1]}]", w[1])
clean = clean.encode("ascii", "ignore").decode("utf-8").strip()
return (parts, clean)
def get_parents(self, names=None):
"""Retrieve all parents."""
names = names or []
if len(names) == 0:
names = [self.service_name]
if self.parent:
names.insert(0, self.parent.service_name)
names = self.parent.get_parents(names)
return names
def get_service(self, **kwargs):
"""Retrieves formatted service title.
:param **kwargs:
:return: formatted title
:rtype: str
"""
if not self.show_title:
return f"{self.parent.get_service(bold=True)}"
color = kwargs.pop("fg", self.base_color)
title = click.style(f"{self.service_name}", fg=color, **kwargs)
title = f"{title}{click.style(' ', fg=color)}"
if self.parent is not None:
title = f"{self.parent.get_service(bold=True)} {title}"
return title
def iter_formatted(self, message, **kwargs):
"""Iterate formatted message tuple into styled string.
Args:
message (tuple): tuple as (msg, style)
"""
if isinstance(message, str):
message, _ = self.parse_msg(message)
for msg in message:
text, mstyle = msg
mstyle = mstyle or kwargs
yield click.style(text, **mstyle)
def echo(self, msg, **kwargs):
"""Prints msg to stdout.
:param str msg: message to print
:param **kwargs:
"""
title_color = kwargs.pop("title_color", self.base_color)
title_bold = kwargs.pop("title_bold", True)
accent_color = kwargs.pop("accent", self.accent_color)
service_title = self.get_service(fg=title_color, bold=title_bold)
message, clean = self.parse_msg(msg, accent_color)
log_attr = kwargs.pop("log", None)
if log_attr:
self.load_handler()
log_func = getattr(logging, log_attr)
log_func(clean)
if self.stdout:
init_msg, init_style = message[0]
first_part, nl_part, _ = init_msg.partition("\n")
fp_clean = first_part.encode("ascii", "ignore").decode("unicode_escape")
if not fp_clean.strip() and nl_part == "\n":
init_msg = init_msg.replace("\n", "")
message[0] = (init_msg, init_style)
click.secho("")
click.secho(f"{service_title} ", nl=False)
post_nl = kwargs.pop("nl", None)
formatted = list(self.iter_formatted(message, **kwargs))
for msg in formatted:
do_nl = msg == formatted[-1]
click.echo(msg, nl=do_nl)
if post_nl:
click.echo("")
def info(self, msg, **kwargs):
"""Prints message with info formatting.
:param msg:
:param **kwargs:
:return: method to print msg
:rtype: method
"""
return self.echo(msg, log="info", **kwargs)
def title(self, msg, **kwargs):
"""Prints bolded info message.
Args:
msg (str): Message
"""
return self.info(f"\n{msg}", bold=True)
def error(self, msg, exception=None, **kwargs):
"""Prints message with error formatting.
:param msg:
:param **kwargs:
:return: method to print msg
:rtype: method
"""
bold = kwargs.pop("bold", (exception is not None))
self.echo(
msg,
log="error",
title_color="red",
title_bold=True,
fg="red",
accent="red",
bold=bold,
**kwargs,
)
if exception:
return self.exception(exception)
def warn(self, msg, **kwargs):
"""Prints message with warn formatting.
:param msg:
:param **kwargs:
:return: method to print msg
:rtype: method
"""
return self.echo(msg, log="warning", title_color="red", title_bold=True)
def exception(self, error, **kwargs):
"""Prints message with exception formatting.
:param error:
:param **kwargs:
:return: method to print msg
:rtype: method
"""
name = type(error).__name__
msg = f"{name}: {str(error)}"
return self.echo(msg, log="exception", title_color="red", fg="red", accent="red", **kwargs)
def success(self, msg, **kwargs):
"""Prints message with success formatting.
:param msg:
:param **kwargs:
:return: method to print msg
:rtype: method
:return: method to print msg
:rtype: method
"""
message = f"\u2714 {msg}"
return self.echo(message, log="info", fg="green", **kwargs)
def debug(self, msg, **kwargs):
"""Prints message with debug formatting.
:param msg:
:param **kwargs:
:return: method to log msg
:rtype: method
"""
if self.stdout:
with self.silent():
return self.debug(msg, **kwargs)
self.echo(msg, log="debug")
return msg
================================================
FILE: micropy/main.py
================================================
"""Main Module."""
from __future__ import annotations
from pathlib import Path
from typing import List, Optional
import attr
from micropy import data, utils
from micropy.logger import Log
from micropy.project import Project, modules
from micropy.stubs import RepositoryInfo, StubManager, StubRepository
from pydantic import parse_file_as
@attr.define(kw_only=True)
class MicroPyOptions:
root_dir: Path = attr.field(default=data.FILES)
stubs_dir: Path = attr.Factory(lambda self: self.root_dir / "stubs", takes_self=True)
class MicroPy:
"""Handles App State Management."""
RUN_CHECKS = True
repo: StubRepository
config: MicroPyOptions
_stubs: Optional[StubManager] = None
def __init__(self, *, options: Optional[MicroPyOptions] = None):
self.config = options or MicroPyOptions()
self.log = Log.get_logger("MicroPy")
self.verbose = True
self.log.debug("MicroPy Loaded")
repo_list = parse_file_as(List[RepositoryInfo], data.REPO_SOURCES)
self.repo = StubRepository()
for repo_info in repo_list:
self.repo = self.repo.add_repository(repo_info)
if not self.config.stubs_dir.exists():
self.setup()
def setup(self):
"""Creates necessary directories for micropy."""
self.log.debug("Running first time setup...")
self.log.debug(f"Creating .micropy directory @ {self.config.root_dir}")
self.config.stubs_dir.mkdir(parents=True, exist_ok=True)
@property
def stubs(self) -> StubManager:
"""Primary Stub Manager for MicroPy.
Returns:
StubManager: StubManager Instance
"""
if not self._stubs:
self._stubs = StubManager(resource=self.config.stubs_dir, repos=self.repo)
return self._stubs
@utils.lazy_property
def project(self):
"""Current active project if available.
Returns:
Project: Instance of Current Project
"""
proj = self.resolve_project(".", verbose=self.verbose)
return proj
def resolve_project(self, path, verbose=True):
"""Returns project from path if it exists.
Args:
path (str): Path to test
verbose (bool): Log to stdout. Defaults to True.
Returns:
Project if it exists
"""
path = Path(path).absolute()
proj = Project(path)
proj.add(modules.StubsModule, self.stubs)
proj.add(modules.PackagesModule, "requirements.txt")
proj.add(modules.DevPackagesModule, "dev-requirements.txt")
proj.add(modules.TemplatesModule, run_checks=self.RUN_CHECKS)
if proj.exists:
if verbose:
self.log.title("Loading Project")
proj.load()
if verbose:
self.log.success("Ready!")
return proj
return proj
================================================
FILE: micropy/packages/__init__.py
================================================
"""Packages Module.
Allows user to address different dependency types (package, module,
path, pypi, etc.) through a single uniform api.
"""
from pathlib import Path
from typing import Any, Optional, Union
import requirements
from .package import Package
from .source_package import PackageDependencySource, VCSDependencySource
from .source_path import LocalDependencySource
def create_dependency_source(
requirement: str, name: Optional[str] = None, **kwargs: Any
) -> Union[LocalDependencySource, PackageDependencySource, VCSDependencySource]:
"""Factory for creating a dependency source object.
Args:
requirement (str): Package name/path/constraints in string form.
name (str, optional): Override package name.
Defaults to None.
Returns:
Appropriate Dependency Source
"""
req = next(requirements.parse(str(requirement)))
if req.local_file:
path = Path(req.path)
name = name or path.name
pkg = Package(name, req.specs, path=req.path)
source = LocalDependencySource(pkg, path)
return source
pkg = Package(**req.__dict__)
if pkg.vcs is not None or pkg.revision is not None:
return VCSDependencySource(pkg, **kwargs)
return PackageDependencySource(pkg, **kwargs)
__all__ = [
"Package",
"PackageDependencySource",
"LocalDependencySource",
"create_dependency_source",
]
================================================
FILE: micropy/packages/package.py
================================================
from pathlib import Path
from typing import List, Optional, Tuple
import requirements
from packaging.utils import canonicalize_name
class Package:
def __init__(
self,
name: str,
specs: List[Tuple[str, str]],
path: Optional[Path] = None,
uri: Optional[str] = None,
vcs: Optional[str] = None,
revision: Optional[str] = None,
line: Optional[str] = None,
**kwargs,
):
"""Generic Python Dependency.
Args:
name (str): Name of package
specs (List[Tuple[str, str]]): Package constraints.
path: path to package
"""
self._name = name
self._specs = specs
self._path = path
self._uri = uri
self._vcs = vcs
self._revision = revision
self._line = line
self.editable = self._path is not None
@property
def name(self) -> str:
return canonicalize_name(self._name)
@property
def path(self) -> Optional[Path]:
if not self._path:
return None
return Path(self._path)
@property
def full_name(self) -> str:
if self._line and self._vcs:
return self._line
if self._path:
return self.pretty_specs
if not self._specs:
return self.name
return f"{self.name}{self.pretty_specs}"
@property
def uri(self) -> Optional[str]:
if self._vcs and self._vcs in self._uri[:4]:
# handle 'git+https' schemas
return self._uri[4:]
return self._uri
@property
def vcs(self) -> Optional[str]:
return self._vcs
@property
def revision(self) -> Optional[str]:
return self._revision
@property
def line(self) -> Optional[str]:
return self._line
@property
def specs(self) -> List[Tuple[str, str]]:
return self._specs
@property
def pretty_specs(self) -> str:
if self.line and self.vcs:
return self.line
if self._path:
return f"-e {self._path}"
if not self.specs:
return "*"
_specs = ["".join(i for i in s) for s in self.specs]
return "".join(_specs)
@classmethod
def from_text(cls, name: str, specs: str) -> "Package":
"""Create package from text.
Args:
name: name of package
specs: package constraints
Returns:
Package instance
"""
if "http" in specs:
req = next(requirements.parse(specs))
return cls(**req.__dict__)
if "-e" in specs:
req = next(requirements.parse(specs))
return cls(name, req.specs, path=req.path)
req_name = name
if specs != "*":
req_name = f"{name}{specs}"
req = next(requirements.parse(req_name))
return cls(req.name, req.specs)
def __str__(self) -> str:
return self.full_name
================================================
FILE: micropy/packages/source.py
================================================
from contextlib import AbstractContextManager, ExitStack, contextmanager
from pathlib import Path
from typing import List, Optional, Tuple
from boltons import fileutils
from micropy import utils
from micropy.logger import Log, ServiceLog
from .package import Package
class DependencySource(AbstractContextManager):
"""Base class for managing dependency sources.
Args:
package (Package): package the source points too.
"""
_ignore_stubs: List[str] = ["setup.py", "__version__", "test_"]
def __init__(self, package: Package):
self.is_local = False
self._package = package
self.log: ServiceLog = Log.add_logger(repr(self))
@property
def package(self) -> Package:
return self._package
@contextmanager
def handle_cleanup(self):
with ExitStack() as stack:
stack.push(self)
yield
# no errors, continue on
stack.pop_all()
def get_root(self, path: Path) -> Optional[Path]:
"""Determines package root if it has one.
Args:
path (Path): Path to check
Returns:
bool: True if is package
"""
init = next(path.rglob("__init__.py"), None)
if init:
return init.parent
return None
def generate_stubs(self, path: Path) -> List[Tuple[Path, Path]]:
"""Generate Stub Files from a package.
Args:
path (Path): Path to package.
Returns:
List[Tuple[Path, Path]]: List of tuples containing
a path to the original file and stub, respectively.
"""
py_files = fileutils.iter_find_files(str(path), patterns="*.py", ignored=self._ignore_stubs)
stubs = [utils.generate_stub(f) for f in py_files]
return stubs
def __enter__(self):
"""Method to prepare source."""
def __exit__(self, *args):
return super().__exit__(*args)
def __repr__(self):
return f"<{self.__class__.__name__} {self.package}>"
================================================
FILE: micropy/packages/source_package.py
================================================
import shutil
from pathlib import Path
from tempfile import mkdtemp
from typing import Any, Callable, List, Optional, Tuple, Union
from git import Repo
from micropy import utils
from micropy.exceptions import RequirementNotFound
from .package import Package
from .source import DependencySource
class PackageDependencySource(DependencySource):
"""Dependency Source for pypi packages.
Args:
package (Package): Package source points too.
format_desc: Callback to format progress bar description.
Defaults to None.
"""
repo: str = "https://pypi.org/pypi/{name}/json"
def __init__(self, package: Package, format_desc: Optional[Callable[..., Any]] = None):
super().__init__(package)
try:
utils.ensure_valid_url(self.repo_url)
except Exception as e:
raise RequirementNotFound(
f"{self.repo_url} is not a valid url!", package=self.package
) from e
else:
self._meta: dict = utils.get_package_meta(str(self.package), self.repo_url)
self.format_desc = format_desc or (lambda n: n)
@property
def repo_url(self) -> str:
_url = self.repo.format(name=self.package.name)
return _url
@property
def source_url(self) -> str:
return self._meta.get("url", None)
@property
def file_name(self) -> str:
return utils.get_url_filename(self.source_url)
def fetch(self) -> bytes:
"""Fetch package contents into memory.
Returns:
bytes: Package archive contents.
"""
self.log.debug(f"fetching package: {self.file_name}")
desc = self.format_desc(self.file_name)
content = utils.stream_download(self.source_url, desc=desc)
return content
def __enter__(self) -> Union[Path, List[Tuple[Path, Path]]]:
"""Prepare Pypi package for installation.
Extracts the package into a temporary directory then
generates stubs for type hinting.
This helps with intellisense.
If the dependency is a module, a list
of tuples with the file and stub path, respectively,
will be returned. Otherwise, the path to the package
root will be returned.
Returns:
Root package path or list of files.
"""
self.tmp_path = Path(mkdtemp())
with self.handle_cleanup():
path = utils.extract_tarbytes(self.fetch(), self.tmp_path)
stubs = self.generate_stubs(path)
pkg_root = self.get_root(path)
return pkg_root or stubs
def __exit__(self, *args):
shutil.rmtree(self.tmp_path, ignore_errors=True)
return super().__exit__(*args)
class VCSDependencySource(DependencySource):
"""Dependency Source for vcs packages."""
def __init__(self, package: Package, format_desc: Optional[Callable[..., Any]] = None):
super().__init__(package)
self.log.debug(
f"VCS package!, {self.package.revision}@{self.package.vcs}@{self.package.full_name}"
)
self._repo: Optional[Repo] = None
try:
utils.ensure_valid_url(self.repo_url)
except Exception as e:
raise RequirementNotFound(
f"{self.repo_url} is not a valid VCS url!", package=self.package
) from e
else:
self.format_desc = format_desc or (lambda n: n)
@property
def repo_url(self) -> str:
return self.package.uri
@property
def source_url(self) -> str:
return self.package.uri
@property
def file_name(self) -> str:
return self.package.name
def fetch(self, dest_path: Path) -> Path:
"""Clones VCS repository to a given directory.
Args:
dest_path: Path to clone directory too.
Returns:
Path to clone repository.
"""
self.log.debug(f"fetching vcs package: ${self.file_name} @ ${self.repo_url}")
self.format_desc(self.file_name)
self._repo = Repo.clone_from(self.repo_url, str(dest_path))
return dest_path
def __enter__(self) -> Union[Path, List[Tuple[Path, Path]]]:
"""Prepare VCS repository for installation.
See PackageDependencySource.__enter__
Returns:
Root package path or list of files.
"""
self.tmp_path = Path(mkdtemp())
with self.handle_cleanup():
path = self.fetch(self.tmp_path)
stubs = self.generate_stubs(path)
pkg_root = self.get_root(path)
return pkg_root or stubs
def __exit__(self, *args):
shutil.rmtree(self.tmp_path, ignore_errors=True)
return super().__exit__(*args)
================================================
FILE: micropy/packages/source_path.py
================================================
from pathlib import Path
from typing import List, Optional, Tuple, Union
from .package import Package
from .source import DependencySource
class LocalDependencySource(DependencySource):
"""Dependency Source that is available locally.
Args:
package (Package): Package source points too.
path (Path): Path to package.
"""
def __init__(self, package: Package, path: Path):
super().__init__(package)
self._path = path
self.is_local = True
@property
def path(self) -> Path:
return self._path
def __enter__(self) -> Union[Path, List[Tuple[Path, Optional[Path]]]]:
"""Determines appropriate path.
Returns:
Path to package root or list of files.
"""
return self.path
================================================
FILE: micropy/project/__init__.py
================================================
"""Module for generating/managing projects."""
from . import modules
from .project import Project
__all__ = ["Project", "modules"]
================================================
FILE: micropy/project/checks.py
================================================
"""Various requirement checks for templates."""
import subprocess as subproc
from functools import partial as _p
from micropy.logger import Log
from packaging import version
VSCODE_MS_PY_MINVER = "2019.9.34474"
log = Log.get_logger("MicroPy")
def iter_vscode_ext(name=None):
"""Iterates over installed VSCode Extensions.
Args:
name (str, optional): Name of Extension to Yield
"""
_cmd = "code --list-extensions --show-versions"
proc = subproc.run(_cmd, capture_output=True, shell=True)
results = [e.strip() for e in proc.stdout.splitlines()]
for ext in results:
ename, vers = ext.split("@")
if not name:
yield (ename, vers)
if name and ename == name:
yield (ename, vers)
def vscode_ext_min_version(ext, min_version=VSCODE_MS_PY_MINVER, info=None):
"""Check if installed VScode Extension meets requirements.
Args:
ext (str): Name of Extension to Test
min_version (str, optional): Minimum version.
Defaults to VSCODE_MS_PY_MINVER.
info (str, optional): Additional information to output.
Defaults to None.
Returns:
bool: True if requirement is satisfied, False otherwise.
"""
try:
name, vers = next(iter_vscode_ext(name=ext), (ext, "0.0.0"))
except Exception as e:
log.debug(f"vscode check failed to run: {e}")
log.debug("skipping...")
return True
else:
cur_vers = version.parse(vers)
min_vers = version.parse(min_version)
if cur_vers >= min_vers:
return True
log.error(f"\nVSCode Extension {ext} failed to satisfy requirements!", bold=True)
log.error(f"$[Min Required Version]: {min_vers}")
log.error(f"$[Current Version:] {cur_vers}")
if info:
log.warn(info)
return False
TEMPLATE_CHECKS = {
"ms-python": _p(
vscode_ext_min_version,
"ms-python.python",
info=("VSCode Integration will fail! " "See $[BradenM/micropy-cli#50] for details.\n"),
),
}
================================================
FILE: micropy/project/modules/__init__.py
================================================
"""Project Modules."""
from .modules import HookProxy, ProjectModule
from .packages import DevPackagesModule, PackagesModule
from .stubs import StubsModule
from .templates import TemplatesModule
__all__ = [
"TemplatesModule",
"PackagesModule",
"StubsModule",
"ProjectModule",
"DevPackagesModule",
"HookProxy",
]
================================================
FILE: micropy/project/modules/modules.py
================================================
from __future__ import annotations
import abc
import inspect
from copy import deepcopy
from functools import wraps
from typing import Any, Callable, List, Optional, Tuple, Type, TypeVar, Union
from micropy import utils
from micropy.config import Config
from micropy.logger import Log, ServiceLog
"""Project Packages Module Abstract Implementation"""
T = TypeVar("T")
ProxyItem = List[Tuple[T, str]]
class ProjectModule(metaclass=abc.ABCMeta):
"""Abstract Base Class for Project Modules."""
PRIORITY: int = 0
_hooks: List[HookProxy] = []
def __init__(self, parent: Optional[ProjectModule] = None, log: Optional[ServiceLog] = None):
self._parent = parent
self.log = log
@property
def parent(self):
"""Component Parent."""
return self._parent
@parent.setter
def parent(self, parent: Type[ProjectModule]) -> Type[ProjectModule]:
"""Sets component parent.
Args:
parent (Any): Parent to set
"""
self._parent = parent
return self.parent
@abc.abstractproperty
def config(self) -> Union[dict, Config]:
"""Config values specific to component."""
@abc.abstractmethod
def load(self):
"""Method to load component."""
@abc.abstractmethod
def create(self, *args: Any, **kwargs: Any) -> Any:
"""Method to create component."""
@abc.abstractmethod
def update(self):
"""Method to update component."""
# FIXME: B027
def add(self, component: Type[ProjectModule], *args: Any, **kwargs: Any) -> Any: # noqa: B027
"""Adds component.
Args:
component (Any): Component to add.
"""
# FIXME: B027
def remove(self, component: Type[ProjectModule]) -> Any: # noqa: B027
"""Removes component.
Args:
component (Any): Component to remove.
"""
@classmethod
def hook(cls, *args: Any, **kwargs: Any) -> Callable[..., Any]:
"""Decorator for creating a Project Hook.
Allows decorated method to be called from parent
container.
Returns:
Callable: Decorated function.
"""
def _hook(func: T) -> Callable[..., Any]:
name = kwargs.get("name", func.__name__)
hook = next((i for i in cls._hooks if i._name == name), None)
if not hook:
hook = HookProxy(name)
ProjectModule._hooks.append(hook)
hook.add_method(func, **kwargs)
@wraps(func)
def wrapper(*args: Any, **kwargs: Any) -> T:
return func(*args, **kwargs)
return wrapper
return _hook
def resolve_hook(self, name: str) -> Union[Optional[HookProxy], T]:
"""Resolves appropriate hook for attribute name.
Args:
name (str): Attribute name to resolve hook for.
Returns:
Optional[HookProxy]: Callable Proxy for ProjectHook.
NoneType: Name could not be resolved.
"""
_hook = None
for hook in self._hooks:
if hook._name == name:
_hook = hook
_hook.add_instance(self)
if _hook.is_descriptor():
return _hook.get()
return _hook
class HookProxy:
"""Proxy for Project Hooks.
Allows multiple project hooks with the same name by
creating individual hooks for any defined permutations
of kwargs.
This is accomplished by creating a unique name for each
permutation proxying the original attribute name to the
appropriate method determined from the provided kwargs.
Args:
name (str): Name of Proxy
"""
def __init__(self, name: str):
self.methods: List[ProxyItem[Callable[..., Any]]] = []
self.instances: List[Type[ProjectModule]] = []
self._name: str = name
self.log: ServiceLog = Log.add_logger(str(self))
def __call__(self, *args, **kwargs):
proxy_kwargs = deepcopy(kwargs)
proxy = self.resolve_proxy(**proxy_kwargs)
if proxy:
return getattr(*proxy)(*args, **kwargs)
def __str__(self):
name = f"HookProxy({self._name})"
return name
def __repr__(self):
name = f"HookProxy(name={self._name}, methods=[{self.methods}])"
return name
def resolve_proxy(self, **kwargs: Any) -> (Type[ProjectModule], str):
"""Resolves appropriate instance and method to proxy to.
If additional kwargs are provided and a proxy is not found,
the function will continue to remove one kwarg and
recurse into itself until either a match is found or it runs
out of kwargs.
Returns:
Instance and method name if resolved, otherwise None.
"""
proxy_kwargs = deepcopy(kwargs)
for method, name in self.methods:
_name = self.get_name(method, proxy_kwargs)
if name == _name:
instance = self._get_instance(method)
self.log.debug(f"{self._name} proxied to [{_name}@{instance}]")
return (instance, method.__name__)
if proxy_kwargs:
self.log.debug(
f"could not resolve proxy: {self._name}[{proxy_kwargs}], broadening search..."
)
proxy_kwargs.popitem()
return self.resolve_proxy(**proxy_kwargs)
return None
def _get_instance(self, attr: Callable[..., Any]) -> Optional[Type[ProjectModule]]:
"""Retrieves instance from attribute.
Args:
attr (Callable): Attribute to use.
Returns:
Instance the attribute belongs to.
"""
_class = utils.get_class_that_defined_method(attr)
if _class:
instance = next((i for i in self.instances if isinstance(i, _class)), None)
return instance
def is_descriptor(self) -> bool:
"""Determine if initial method provided is a descriptor."""
method = self.methods[0][0]
instance = self._get_instance(method)
if instance:
attr = inspect.getattr_static(instance, self._name)
return inspect.isdatadescriptor(attr)
return False
def get(self) -> T:
"""Get initial method descriptor value."""
instance = self._get_instance(self.methods[0][0])
self.log.debug(f"{self._name} proxied to [property@{instance}]")
return getattr(instance, self._name)
def add_method(self, func: Callable[..., Any], **kwargs: Any) -> ProxyItem[Callable[..., Any]]:
"""Adds method to Proxy.
Any kwargs provided will be used to generate the unique
hook name.
Args:
func (Callable): Method to add
Example:
>>> def test_func(arg1, kwarg1=False):
pass
>>> self.add_method(test_func, {'kwarg1': False})
(test_func, '_hook__test_func__kwarg1_False')
Returns:
Tuple[Callable, str]: Tuple containing method and unique hook name.
"""
name = self.get_name(func, kwargs)
hook = (func, name)
self.methods.append(hook)
self.log.debug(f"Method added to proxy: {hook}")
return hook # type: ignore
def add_instance(self, inst: Any) -> Any:
"""Add instance to Proxy.
Args:
inst (Any): Instance to add.
"""
return self.instances.append(inst)
def get_name(self, func: Callable[..., Any], params: Optional[dict] = None) -> str:
"""Generates name from method and provided kwargs.
Args:
func (Callable): Method to generate name for.
params (Dict[Any, Any], optional): Any kwargs to update the defaults with.
Defaults to None. If none, uses default kwargs.
Returns:
str: Generated name
"""
params = params or {}
sig = inspect.signature(func)
_default = {
p.name: p.default
for p in sig.parameters.values()
if p.kind == p.POSITIONAL_OR_KEYWORD and p.default is not p.empty
}
params = {**_default, **params}
name = f"_hook__{self._name}__{'__'.join(f'{k}_{v}' for k, v in params.items())}"
return name
================================================
FILE: micropy/project/modules/packages.py
================================================
"""Project Packages Module."""
import shutil
from pathlib import Path
from typing import Any, Optional, Union
from boltons import fileutils
from micropy import utils
from micropy.config import Config
from micropy.packages import (
LocalDependencySource,
Package,
PackageDependencySource,
create_dependency_source,
)
from micropy.project.modules import ProjectModule
class PackagesModule(ProjectModule):
"""Project Module for handling requirements.
Args:
path (str): Path to create requirements file at.
packages (dict, optional): Initial packages to use.
Defaults to None.
"""
name: str = "packages"
PRIORITY: int = 7
def __init__(self, path, **kwargs):
super().__init__(**kwargs)
self._path = Path(path)
@property
def packages(self):
_packages = self.config.get(self.name, {})
return _packages
@property
def path(self):
"""Path to requirements file.
Returns:
Path: Path to file
"""
path = self.parent.path / self._path
return path
@property
def pkg_path(self):
"""Path to package data folder.
Returns:
Path: Path to folder.
"""
return self.parent.data_path / self.parent.name
@property
def config(self) -> Config:
"""Config values specific to component.
Returns:
Component config.
"""
return self.parent.config
@property
def context(self) -> Config:
"""Context values specific to component.
Returns:
Context values.
"""
return self.parent.context
@property
def cache(self) -> Config:
"""Project Cache.
Returns:
Project wide cache
"""
return self.parent.cache
def install_package(self, source: Union[LocalDependencySource, PackageDependencySource]) -> Any:
with source as files:
if source.is_local:
self.log.debug(f"installing {source} as local")
return
if isinstance(files, list):
self.log.debug(f"installing {source} as module(s)")
# Iterates over flattened list of stubs tuple
file_paths = [(f, (self.pkg_path / f.name)) for f in list(sum(files, ()))]
for paths in file_paths:
shutil.move(*paths) # overwrites if existing
return file_paths
self.log.debug(f"installing {source} as package")
pkg_path = self.pkg_path / source.package.name
return fileutils.copytree(files, pkg_path)
@ProjectModule.hook(dev=False)
def add_from_file(self, path: Optional[Path] = None, dev: bool = False, **kwargs: Any) -> dict:
"""Loads all requirements from file.
Args:
path: Path to file. Defaults to self.path.
dev: If dev requirements should be loaded.
Defaults to False.
"""
path = path or self.path
reqs = utils.iter_requirements(path)
self.log.debug(f"loading requirements from: {path}")
for r in reqs:
pkg = create_dependency_source(r.line).package
if not self.packages.get(pkg.name):
self.config.add(self.name + "/" + pkg.name, pkg.pretty_specs)
if pkg.editable:
self.context.extend("local_paths", [pkg.path], unique=True)
return self.packages
@ProjectModule.hook()
def add_package(self, package, dev=False, **kwargs):
"""Add requirement to project.
Args:
package (str): package name/spec
dev (bool, optional): If dev requirements should be loaded.
Defaults to False.
Returns:
dict: Dictionary of packages
"""
self.log.debug(f"adding new dependency: {package}")
source = create_dependency_source(package, **kwargs)
pkg = source.package
self.log.info(f"Adding $[{pkg.name}] to requirements...")
if self.packages.get(pkg.name, None):
self.log.error(f"$[{pkg}] is already installed!")
self.update()
return None
self.config.add(self.name + "/" + pkg.name, pkg.pretty_specs)
try:
self.load()
except Exception as e:
self.log.error(f"Failed to install: {pkg.name}", exception=e)
self.config.pop(self.name + "/" + pkg.name)
raise
else:
if pkg.editable:
self.context.extend("local_paths", [pkg.path], unique=True)
self.log.success("Package installed!")
finally:
self.parent.update()
# TODO: B012
return self.packages # noqa
def load(self, fetch=True, **kwargs):
"""Retrieves and stubs project requirements."""
self.pkg_path.mkdir(parents=True, exist_ok=True)
if self.path.exists():
self.add_from_file(self.path)
pkg_keys = set(self.packages.keys())
pkg_cache = self.cache.get(self.name)
new_pkgs = pkg_keys.copy()
if pkg_cache:
new_pkgs = new_pkgs - set(pkg_cache)
new_packages = [
Package.from_text(name, spec)
for name, spec in self.packages.items()
if name in new_pkgs
]
if fetch:
if new_packages:
self.log.title("Fetching Requirements")
for req in new_packages:
def format_desc(p):
return "".join(self.log.iter_formatted(f"$B[{p}]"))
source = create_dependency_source(
str(req),
name=req.name,
format_desc=lambda p: f"{self.log.get_service()} {format_desc(p)}",
)
self.install_package(source)
self.update()
self.cache.upsert(self.name, list(pkg_keys))
def create(self):
"""Create project files."""
self.pkg_path.mkdir(parents=True, exist_ok=True)
if not self.config.get(self.name):
self.config.add(self.name, {})
return self.update()
def update(self):
"""Dumps packages to file at path."""
if not self.path.exists():
self.path.touch()
pkgs = [Package.from_text(name, spec) for name, spec in self.config.get(self.name).items()]
self.log.debug(f"dumping to {self.path.name}")
with self.path.open("r+") as f:
content = [c.strip() for c in f.readlines() if c.strip() != ""]
_lines = sorted({str(p) for p in pkgs} | set(content))
lines = [line + "\n" for line in _lines]
self.log.debug(f"dumping: {lines}")
f.seek(0)
f.writelines(lines)
local_paths = [p.path for p in pkgs if p.editable]
if local_paths:
self.context.add("local_paths", local_paths)
self.context.extend("paths", [self.pkg_path], unique=True)
class DevPackagesModule(PackagesModule):
"""Project Module for Dev Packages."""
PRIORITY: int = 8
def __init__(self, path, **kwargs):
super().__init__(path, **kwargs)
self.name = "dev-packages"
def create(self):
"""Creates component."""
self.config.add(f"{self.name}/micropy-cli", "*")
super().create()
def load(self, *args, **kwargs):
"""Load component."""
super().load(*args, **kwargs, fetch=False)
@ProjectModule.hook(dev=True)
def add_package(self, package, **kwargs):
"""Adds package."""
return super().add_package(package, **kwargs)
@ProjectModule.hook(dev=True)
def add_from_file(self, path=None, **kwargs):
"""Adds packages from file."""
return super().add_from_file(path=path, **kwargs)
================================================
FILE: micropy/project/modules/stubs.py
================================================
"""Project Stubs Module."""
import sys
from pathlib import Path
from typing import Any, List, Sequence, Union
from boltons import setutils
from micropy.project.modules import ProjectModule
from micropy.stubs import StubManager
from micropy.stubs.stubs import DeviceStub
class StubsModule(ProjectModule):
"""Project module for handling Stubs.
Args:
stub_manager (StubManager): StubManager instance.
stubs (List[Type[Stub]], optional): Initial Stubs to use.
"""
PRIORITY: int = 9
def __init__(
self, stub_manager: StubManager, stubs: Sequence[DeviceStub] = None, **kwargs: Any
):
super().__init__(**kwargs)
self.stub_manager: StubManager = stub_manager
self._stubs: Sequence[DeviceStub] = stubs or []
@property
def context(self):
"""Component stub context."""
return self.parent.context
@property
def config(self) -> dict:
"""Component specific config values.
Returns:
dict: Current config.
"""
return self.parent.config
@property
@ProjectModule.hook()
def stubs(self) -> Union[StubManager, Sequence[DeviceStub]]:
"""Component stubs.
Returns:
List[micropy.stubs.Stub]: List of stubs used in project.
"""
_stubs = self.context.get("stubs", [])
return self._resolve_subresource(_stubs)
def get_stub_tree(self, stubs) -> Sequence[Path]:
"""Retrieve and order paths to base stubs and any stubs they depend on.
Args:
stubs: List of Stub Items
Returns:
Paths to all stubs project depends on.
"""
stub_tree = setutils.IndexedSet()
base_stubs = setutils.IndexedSet([s.stubs for s in stubs])
frozen = [s.frozen for s in stubs]
fware_mods = [s.firmware.frozen for s in stubs if s.firmware is not None]
stub_tree.update(*frozen, *fware_mods, *base_stubs)
return list(stub_tree)
def _resolve_subresource(
self, stubs: List[DeviceStub]
) -> Union[StubManager, Sequence[DeviceStub]]:
"""Resolves stub resource.
Args:
stubs (stubs): Stubs Passed to Manager
"""
if not hasattr(self, "_parent"):
return self._stubs
if not self.parent.exists:
return self._stubs
try:
resource = set(self.stub_manager.resolve_subresource(stubs, self.parent.data_path))
except OSError as e:
self.log.error("Failed to Create Stub Links!", exception=e)
sys.exit(1)
else:
self.config.upsert("stubs", {s.name: s.stub_version for s in stubs})
return resource
def _load_stub_data(self, stub_data=None, **kwargs):
"""Loads Serialized Stub Data.
Args:
stub_data (dict): Dict of Stubs
"""
for name, location in stub_data.items():
_path = Path(location).absolute()
if Path(_path).exists():
yield self.stub_manager.add(_path)
yield self.stub_manager.add(name)
def load(self, **kwargs):
"""Loads stubs from info file.
Args:
stub_list (dict): Dict of Stubs
"""
self.config.upsert("stubs", {s.name: s.stub_version for s in self._stubs})
stubs = list(self._load_stub_data(stub_data=self.config.get("stubs")))
stubs.extend(self.stubs)
stubs = self._resolve_subresource(stubs)
self.context.upsert("stubs", stubs)
self.context.upsert("paths", self.get_stub_tree(self.stubs))
return self.stubs
def create(self):
"""Create stub project files."""
self.log.info(f"Stubs: $[{' '.join(str(s) for s in self.stubs)}]")
return self.load()
def update(self):
"""Update current project stubs."""
self.load()
return self.stubs
@ProjectModule.hook()
def add_stub(self, stub, **kwargs):
"""Add stub to project.
Args:
stub (Stub): Stub object to add
Returns:
[Stubs]: Project Stubs
"""
self.context.extend("stubs", [stub])
self.log.info("Loading project...")
self._resolve_subresource(self.stubs)
self.log.info("Updating Project Info...")
self.parent.update()
self.log.info(f"Project Stubs: $[{' '.join(str(s) for s in self.stubs)}]")
self.log.success("\nProject Updated!")
return self.stubs
================================================
FILE: micropy/project/modules/templates.py
================================================
"""Project Templates Module."""
from micropy.project.modules import ProjectModule
from micropy.project.template import TemplateProvider
class TemplatesModule(ProjectModule):
"""Project Templates Module.
Generates and manages project files using the Projects
context.
Args:
templates (List[str]): List of templates to use.
run_checks (bool, optional): Whether to execute checks or not.
Defaults to True.
"""
PRIORITY: int = 1
TEMPLATES = TemplateProvider.TEMPLATES
_dynamic = ["vscode", "pylint"]
def __init__(self, templates=None, run_checks=True, **kwargs):
self._templates = templates or []
super().__init__(**kwargs)
self.run_checks = run_checks
@property
def config(self):
"""Template config.
Returns:
dict: Current configuration
"""
return self.parent.config
def get_provider(self, templates):
return TemplateProvider(templates, run_checks=self.run_checks, log=self.log)
def load(self, **kwargs):
"""Loads project templates."""
self.provider = self.get_provider(self.config.get("config"))
templates = [k for k, v in self.config.get("config").items() if v]
self.log.debug(f"Loading Templates: {templates}")
self.provider = TemplateProvider(templates, **kwargs)
self.update()
def create(self):
"""Generates project files.
Returns:
dict: Project context
"""
self.log.title("Rendering Templates")
self.log.info("Populating Stub Info...")
for key in self._templates:
if key in self._dynamic:
self.config.add("config" + "/" + key, True)
self.provider = self.get_provider(self._templates)
for t in self.provider.templates:
self.provider.render_to(t, self.parent.path, **self.parent.context.raw())
self.log.success("Stubs Injected!")
return self._templates
def update(self):
"""Updates project files.
Returns:
dict: Project context
"""
self.provider = self.get_provider(self.config.get("config"))
self.log.debug(f"updating templates with context: {self.parent.context.raw()}")
for tmp in self.provider.templates:
self.provider.update(tmp, self.parent.path, **self.parent.context.raw())
return self.parent.context
================================================
FILE: micropy/project/project.py
================================================
"""Hosts functionality relating to generation of user projects."""
from pathlib import Path
from typing import Any, Iterator, List, Optional, Type
from boltons.queueutils import PriorityQueue
from micropy.config import Config, DictConfigSource
from micropy.logger import Log, ServiceLog
from micropy.project.modules import ProjectModule
class Project(ProjectModule):
"""Micropy Project.
Args:
path (str): Path to project root.
name (str, optional): Name of Project.
Defaults to None. If none, uses name of current directory.
"""
def __init__(self, path: str, name: Optional[str] = None, **kwargs: Any):
self._children: List[Type[ProjectModule]] = []
self.path: Path = Path(path).absolute()
self.data_path: Path = self.path / ".micropy"
self.info_path: Path = self.path / "micropy.json"
self.cache_path: Path = self.data_path / ".cache"
self._cache = Config(self.cache_path)
self._context = Config(source_format=DictConfigSource, default={"datadir": self.data_path})
self.name: str = name or self.path.name
default_config = {
"name": self.name,
}
self._config: Config = Config(self.info_path, default=default_config)
self.log: ServiceLog = Log.add_logger(self.name, show_title=False)
def __getattr__(self, name: str) -> Any:
results = iter([c.resolve_hook(name) for c in self._children])
for res in results:
if res is not None:
self.log.debug(f"Hook Resolved: {name} -> {res}")
return res
return self.__getattribute__(name)
@property
def exists(self) -> bool:
"""Whether this project exists.
Returns:
bool: True if it exists
"""
return self.info_path.exists()
@property
def config(self) -> Config:
"""Project Configuration.
Returns:
Config: Project Config Instance
"""
return self._config
@property
def context(self) -> Config:
"""Project context used in templates.
Returns:
Config: Current context
"""
return self._context
@property
def cache(self) -> Config:
"""Project wide cache.
Returns:
Cache instance
"""
return self._cache
def iter_children_by_priority(self) -> Iterator[Type[ProjectModule]]:
"""Iterate project modules by priority.
Yields:
the next child item
"""
pq = PriorityQueue()
for i in self._children:
pq.add(i, i.PRIORITY)
more = pq.peek(default=False)
while more:
yield pq.pop()
more = pq.peek(default=False)
def add(self, component, *args, **kwargs):
"""Adds project component.
Args:
component (Any): Component to add.
"""
child = component(*args, **kwargs, log=self.log, parent=self)
self._children.append(child)
self.log.debug(f"adding module: {type(child).__name__}")
def remove(self, component):
"""Removes project component.
Args:
component (Any): Component to remove.
"""
child = next(i for i in self._children if isinstance(i, component))
self._children.remove(child)
def load(self, **kwargs: Any) -> "Project":
"""Loads all components in Project.
Returns:
Current Project Instance
"""
self.name = self._config.get("name")
self.data_path.mkdir(exist_ok=True)
for child in self.iter_children_by_priority():
child.load(**kwargs)
return self
def create(self):
"""Creates new Project.
Returns:
Path: Path relative to current active directory.
"""
self.log.title(f"Initiating $[{self.name}]")
self.data_path.mkdir(exist_ok=True, parents=True)
ignore_data = self.data_path / ".gitignore"
ignore_data.write_text("*")
self.log.debug(f"Generated Project Context: {self.context}")
for child in self.iter_children_by_priority():
child.create()
self.info_path.touch()
self.config.sync()
self.log.success("Project Created!")
return self.path
def update(self):
"""Updates all project components.
Returns:
Current active project.
"""
self.log.debug("Updating all project modules...")
for child in self.iter_children_by_priority():
child.update()
return self
================================================
FILE: micropy/project/template/.gitignore
================================================
# Created by https://www.gitignore.io/api/python,visualstudiocode
# Edit at https://www.gitignore.io/?templates=python,visualstudiocode
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
### VisualStudioCode Patch ###
# Ignore all local history of files
.history
# End of https://www.gitignore.io/api/python,visualstudiocode
### Micropy Cli ###
.micropy/
!micropy.json
!src/lib
================================================
FILE: micropy/project/template/.pylintrc
================================================
[MAIN]
# Loaded Stubs: {% for stub in stubs %} {{ stub }} {% endfor %}
init-hook='import sys;sys.path[1:1]=["src/lib",{% for path in paths %}"{{ path|replace("\\", "/") }}", {% endfor %} ]'
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
confidence=INFERENCE
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable = missing-docstring, line-too-long, trailing-newlines, broad-except, logging-format-interpolation, invalid-name, empty-docstring,
no-method-argument, assignment-from-no-return, too-many-function-args, unexpected-keyword-arg
# the 2nd line deals with the limited information in the generated stubs.
================================================
FILE: micropy/project/template/.vscode/extensions.json
================================================
{
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
// List of extensions which should be recommended for users of this workspace.
"recommendations": [
"ms-python.python", // micropy-cli: required for vscode micropython integrations
"VisualStudioExptTeam.vscodeintellicode" // micropy-cli: optional for advanced intellisense
]
}
================================================
FILE: micropy/project/template/.vscode/settings.json
================================================
{
// Loaded Stubs: {% for stub in stubs %} {{ stub }} {% endfor %}
{% if language_server == 'pylance' %}
"python.languageServer": "Pylance",
"python.analysis.autoSearchPath": true,
"python.autoComplete.extraPaths": {{ paths }},
"python.analysis.diagnosticSeverityOverrides": { "reportMissingModuleSource": "none" },
"python.analysis.typeCheckingMode": "basic",
"python.autoComplete.typeshedPaths": {{ typeshed_paths }},
"python.analysis.typeshedPaths": {{ typeshed_paths }},
{% endif %}
{% if language_server == 'mpls' %}
"python.jediEnabled": false,
"python.autoComplete.typeshedPaths": {{ paths }},
"python.analysis.typeshedPaths": {{ paths }},
{% endif %}
"python.linting.enabled": true,
"python.linting.pylintEnabled": true
}
================================================
FILE: micropy/project/template/pymakr.conf
================================================
{
"address": "192.168.4.1",
"username": "micro",
"password": "python",
"sync_folder": "src",
"open_on_start": true,
"safe_boot_on_upload": false,
"py_ignore": [
"pymakr.conf",
".vscode",
".gitignore",
".git",
"project.pymakr",
"env",
"venv",
".python-version",
".micropy/",
"micropy.json"
],
"fast_upload": false
}
================================================
FILE: micropy/project/template/src/boot.py
================================================
# boot.py - - runs on boot-up
================================================
FILE: micropy/project/template/src/main.py
================================================
# main.py
================================================
FILE: micropy/project/template.py
================================================
"""Module for handling jinja2 and MicroPy Templates."""
import json
import os
from itertools import chain
from pathlib import Path
from typing import Iterator, List, Literal, Union
from jinja2 import Environment, FileSystemLoader
from micropy.logger import Log
class Template:
"""Base Template Builder Class.
Args:
template (jinja2.Template): Jinja2 Template Instance
Raises:
NotImplementedError: Method must be overridden by subclass
"""
FILENAME = None
CHECKS = []
def __init__(self, template, **kwargs):
self.template = template
self.stubs = kwargs.get("stubs", [])
self.paths = kwargs.get("paths", [])
self.datadir = kwargs.get("datadir", None)
self.local_paths = kwargs.get("local_paths", [])
@property
def context(self):
"""Context for template."""
raise NotImplementedError
def iter_clean(self, data=None):
"""Yields cleaned data.
Args:
data (str, optional): Alternative data to clean.
Defaults to None. If none, uses template render.
"""
render = data or self.template.render(self.context)
for line in render.splitlines(True):
_line = line.strip()
if not _line.startswith("//"):
yield line
def run_checks(self):
"""Runs all template checks.
Returns:
bool: True if all checks passed
"""
if not self.CHECKS:
return True
results = [not ck() for ck in self.CHECKS]
return any(results)
def update(self, root):
"""Update Template File.
Args:
root (str): Path to project root
Raises:
NotImplementedError: Raised if Subclass has not Implemented Update
Returns:
func: Template Update Func
"""
update_func = getattr(self, "update_method", None)
update_kwargs = getattr(self, "update_kwargs", {})
if not update_func:
return None
path = root / self.FILENAME
return update_func(path, **update_kwargs)
def update_as_json(self, path):
"""Update template file as JSON.
Args:
path (str): File path to update
"""
render = json.loads("".join(self.iter_clean()))
data = json.loads("".join(self.iter_clean(path.read_text())))
data.update(render)
with path.open("w+") as f:
json.dump(data, f, indent=4)
def update_as_text(self, path, by_contains=None):
"""Update template file as text.
Args:
path (str): file path to update.
by_contains ([str], optional): Update lines that contain a string.
Defaults to None.
"""
r_lines = list(self.iter_clean())
upd_lines = []
if by_contains:
upd_lines = [
r_lines.index(line) for line in r_lines if any(i in line for i in by_contains)
]
with path.open("r+") as f:
c_lines = self.iter_clean(f.read())
f.seek(0)
for it, line in enumerate(c_lines):
_line = line
if it in upd_lines:
_line = r_lines[it]
f.write(_line)
def render_stream(self):
"""Returns template stream from context."""
stream = self.template.stream(self.context)
return stream
def iter_relative_paths(self, paths: List[Path], strict: bool = False) -> Iterator[Path]:
"""Iterate over a list of paths relative to project root.
Args:
paths: List of paths to make relative.
strict: Raises ValueError if True and path cannot be made relative.
Defaults to False.
Raises:
ValueError: Path could not be made relative and `strict` is True.
Yields:
Path relative to project root.
"""
for p in paths:
path = p
if not p.is_absolute():
path = Path(os.path.relpath(path)).resolve()
try:
yield path.relative_to(self.datadir.parent)
except ValueError:
yield path
def __str__(self):
cls_name = self.__class__.__name__
return f"{cls_name}[{self.template.name}]::[{self.context}]"
class GenericTemplate(Template):
"""Generic Template for files without context."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.FILENAME = self.template.name
@property
def context(self):
"""Empty Context."""
return {}
class CodeTemplate(Template):
"""Template for VSCode settings."""
FILENAME = ".vscode/settings.json"
# TODO: rewrite this module and have proper DI
language_server: Union[Literal["mpls"], Literal["pylance"]]
def __init__(self, *args, **kwargs):
self.update_method = self.update_as_json
# TODO: but for now, assume pylance.
self.language_server = kwargs.get("language_server", "pylance")
super().__init__(*args, **kwargs)
@property
def context(self):
"""VScode Config Context."""
paths = self.paths
if self.datadir:
paths = list(self.iter_relative_paths(self.paths, strict=True))
if self.local_paths:
paths.extend(self.iter_relative_paths(self.local_paths))
stub_paths = json.dumps([str(s) for s in paths])
ctx = {
"stubs": self.stubs or [],
"paths": stub_paths,
"typeshed_paths": json.dumps([str(s) for s in [*paths, "typings"]]),
"language_server": self.language_server,
}
return ctx
class PylintTemplate(Template):
"""Template for Pylint settings."""
FILENAME = ".pylintrc"
def __init__(self, *args, **kwargs):
self.update_method = self.update_as_text
self.update_kwargs = {"by_contains": ["sys.path[1:1]"]}
super().__init__(*args, **kwargs)
@property
def context(self):
"""Pylint Config Context."""
paths = self.paths
if self.datadir:
paths = list(self.iter_relative_paths(self.paths, strict=True))
if self.local_paths:
paths.extend(self.iter_relative_paths(self.local_paths))
ctx = {"stubs": self.stubs or [], "paths": paths or []}
return ctx
class TemplateProvider:
"""Template Provider."""
_template_files = {
"vscode": CodeTemplate,
"pylint": PylintTemplate,
"vsextensions": ".vscode/extensions.json",
"pymakr": "pymakr.conf",
"main": "src/main.py",
"boot": "src/boot.py",
"gitignore": ".gitignore",
}
ENVIRONMENT = None
TEMPLATE_DIR = Path(__file__).parent / "template"
TEMPLATES = {
"vscode": (
["vscode", "vsextensions"],
"VSCode Settings for Autocompletion/Intellisense",
),
"pymakr": (["pymakr"], "Pymakr Configuration"),
"pylint": (["pylint"], "Pylint MicroPython Settings"),
"gitignore": (["gitignore"], "Git Ignore Template"),
"bootstrap": (["main", "boot"], "main.py & boot.py files"),
}
def __init__(self, templates, log=None, **kwargs):
"""Template Factory.
Args:
templates ([str]): List of Templates to use
log (callable, optional): Log instance to use.
Defaults to None. If none, creates a new one.
run_checks (bool, optional): Whether to run template checks.
Defaults to True.
"""
self.run_checks = kwargs.get("run_checks", True)
self.template_names = set(
chain.from_iterable([self.TEMPLATES.get(t)[0] for t in templates])
)
self.files = {k: v for k, v in self._template_files.items() if k in self.template_names}
self.log = log or Log.add_logger("Templater")
if self.__class__.ENVIRONMENT is None:
loader = FileSystemLoader(str(self.TEMPLATE_DIR))
self.__class__.ENVIRONMENT = Environment(loader=loader)
self.log.debug("Created Jinja2 Environment")
self.log.debug(f"Detected Templates: {self.ENVIRONMENT.list_templates()}")
def get(self, name, *args, **kwargs):
"""Retrieve appropriate Template instance by name.
Args:
name (str): Name of template
Returns:
Template: Template instance
"""
temp_def = self.files.get(name)
file_attr = getattr(temp_def, "FILENAME", None)
filename = temp_def if file_attr is None else file_attr
temp_cls = GenericTemplate if file_attr is None else temp_def
file_temp = self.ENVIRONMENT.get_template(filename)
self.log.debug(f"Retrieving {name} as {temp_cls} from {file_temp.name}")
template = temp_cls(file_temp, *args, **kwargs)
return template
def render_to(self, name, parent_dir, *args, **kwargs):
"""Renders Template to a file under parent directory.
Args:
name (str): Name of template
parent_dir (str): Path to root dir
"""
template = self.get(name, **kwargs)
self.log.debug(f"Loaded: {str(template)}")
if self.run_checks:
self.log.debug(f"Verifying {template} requirements...")
template.run_checks()
out_dir = parent_dir / template.FILENAME
if not os.path.isfile(out_dir):
self.log.debug(f"Create: {out_dir}")
parent_dir.mkdir(exist_ok=True)
out_dir.parent.mkdir(exist_ok=True, parents=True)
self.log.debug(f"Rendered: {name} to {str(out_dir)}")
self.log.info(f"$[{name.capitalize()}] File Generated!")
stream = template.render_stream()
return stream.dump(str(out_dir))
else:
self.log.debug(f"Update: {out_dir}")
template.update(parent_dir)
self.log.info(f"$[{name.capitalize()}] File Updated!")
def update(self, name, root_dir, **kwargs):
"""Update existing Template.
Args:
name (str): Template name
root_dir (str): Path to project root
Returns:
Template: Updated Template Instance
"""
template = self.get(name, **kwargs)
self.log.debug(f"Loaded: {str(template)}")
try:
template.update(root_dir)
except FileNotFoundError:
self.log.debug("Template does not exist!")
return self.render_to(name, root_dir, **kwargs)
self.log.debug(f"Updated: {str(template)}")
return template
@property
def templates(self):
"""returns all template names."""
return self.files.keys()
================================================
FILE: micropy/py.typed
================================================
================================================
FILE: micropy/pyd/__init__.py
================================================
"""Module for interfacing with py-devices."""
from .abc import (
DevicePath,
HostPath,
MessageConsumer,
MetaPyDevice,
MetaPyDeviceBackend,
PyDeviceConsumer,
StreamConsumer,
)
from .consumers import ConsumerDelegate, MessageHandlers, ProgressStreamConsumer, StreamHandlers
from .pydevice import PyDevice
__all__ = [
"PyDevice",
"ConsumerDelegate",
"ProgressStreamConsumer",
"StreamHandlers",
"MessageHandlers",
"PyDeviceConsumer",
"MessageConsumer",
"StreamConsumer",
"MetaPyDevice",
"MetaPyDeviceBackend",
"DevicePath",
"HostPath",
]
================================================
FILE: micropy/pyd/abc.py
================================================
from __future__ import annotations
import abc
from io import BytesIO, StringIO
from pathlib import Path
from typing import Any, AnyStr, Generic, NewType, Protocol, TypeVar
HostPath = NewType("HostPath", str)
DevicePath = NewType("DevicePath", str)
class StartHandler(Protocol):
def __call__(self, *, name: str = None, size: int | None = None) -> Any:
...
class UpdateHandler(Protocol):
def __call__(self, *, size: int | None = None) -> Any:
...
class EndHandler(Protocol):
def __call__(self) -> Any:
...
class MessageHandler(Protocol):
def __call__(self, data: AnyStr) -> Any:
...
class StreamConsumer(Protocol):
@property
@abc.abstractmethod
def on_start(self) -> StartHandler:
...
@property
@abc.abstractmethod
def on_update(self) -> UpdateHandler:
...
@property
@abc.abstractmethod
def on_end(self) -> EndHandler:
...
class MessageConsumer(Protocol):
@property
@abc.abstractmethod
def on_message(self) -> MessageHandler:
...
class PyDeviceConsumer(MessageConsumer, StreamConsumer, Protocol):
...
class MetaPyDeviceBackend(abc.ABC):
location: str
@abc.abstractmethod
def establish(self, target: str) -> MetaPyDeviceBackend:
...
@abc.abstractmethod
def connect(self) -> None:
...
@abc.abstractmethod
def disconnect(self) -> None:
...
@abc.abstractmethod
def reset(self) -> None:
...
@abc.abstractmethod
def resolve_path(self, target_path: DevicePath | str | Path) -> DevicePath:
...
@property
@abc.abstractmethod
def connected(self) -> bool:
...
@abc.abstractmethod
def push_file(
self,
source_path: HostPath,
target_path: DevicePath,
*,
consumer: PyDeviceConsumer | None,
**kwargs,
) -> None:
...
@abc.abstractmethod
def pull_file(
self,
source_path: DevicePath,
target_path: HostPath,
*,
consumer: PyDeviceConsumer | None,
**kwargs,
) -> None:
...
@abc.abstractmethod
def list_dir(self, path: DevicePath) -> list[DevicePath]:
...
@abc.abstractmethod
def remove(self, path: DevicePath) -> None:
...
@abc.abstractmethod
def copy_dir(
self,
source_path: DevicePath,
target_path: HostPath,
*,
consumer: PyDeviceConsumer | None,
**kwargs,
):
...
@abc.abstractmethod
def eval(self, command: str, *, consumer: MessageConsumer | None = None):
...
@abc.abstractmethod
def eval_script(
self,
contents: AnyStr,
target_path: DevicePath | None = None,
*,
consumer: PyDeviceConsumer | None = None,
):
...
AnyBackend = TypeVar("AnyBackend", bound=MetaPyDeviceBackend)
class MetaPyDevice(abc.ABC, Generic[AnyBackend]):
pydevice: AnyBackend
stream_consumer: StreamConsumer | None
message_consumer: MessageConsumer | None
@abc.abstractmethod
def connect(self) -> None:
...
@abc.abstractmethod
def disconnect(self) -> None:
...
@abc.abstractmethod
def copy_to(self, source_path: HostPath, target_path: DevicePath) -> None:
...
@abc.abstractmethod
def copy_from(
self, source_path: DevicePath, target_path: HostPath, *, verify_integrity: bool = True
) -> None:
...
@abc.abstractmethod
def remove(self, target_path: DevicePath) -> None:
...
@abc.abstractmethod
def run_script(
self, content: AnyStr | StringIO | BytesIO, target_path: DevicePath | None = None
):
...
================================================
FILE: micropy/pyd/backend_rshell.py
================================================
from __future__ import annotations
from contextlib import contextmanager
from pathlib import Path
from typing import TYPE_CHECKING, AnyStr, cast
from micropy.exceptions import PyDeviceConnectionError
from micropy.pyd.abc import (
DevicePath,
HostPath,
MessageConsumer,
MessageHandler,
MetaPyDeviceBackend,
PyDeviceConsumer,
)
if TYPE_CHECKING:
from typing import type_check_only # pragma: no cover
@type_check_only # pragma: no cover
class RShell:
ASCII_XFER: bool
QUIET: bool
def connect(self, port: str):
...
try:
import rshell.main as rsh # type: ignore
from rshell.pyboard import Pyboard, PyboardError # type: ignore
except (
ImportError,
ModuleNotFoundError,
): # pragma: no cover
rsh = object() # type: ignore
if TYPE_CHECKING:
rsh: RShell = cast(RShell, object()) # type: ignore
PyboardError = RuntimeError
Pyboard = object()
class RShellConsumer:
consumer: MessageHandler
def __init__(self, child_consumer: MessageHandler):
self._outline: list[str] = []
self.consumer = child_consumer
def _output(self, data: str):
"""Yields everything up to a newline.
Args:
data (str): Anything to yield before newline
"""
if data == "\n":
line = "".join(self._outline)
self._outline = []
yield line
self._outline.append(data)
def on_message(self, char: bytes):
"""Pyboard data consumer.
When a full line of output is detected,
it is formatted then logged to stdout
and log file.
Args:
char (byte): Byte from PyBoard
Returns:
str: Converted char
"""
_char = char.decode("utf-8")
line = next(self._output(_char), None)
if line:
self.consumer(line)
return char
class RShellPyDeviceBackend(MetaPyDeviceBackend):
_connected: bool = False
_verbose: bool = False
_rsh: rsh
_pydevice: Pyboard
_dev_port: str
_repl_active: bool = False
@property
def _pyb_root(self) -> str:
"""pyboard root dirname."""
if self.connected:
dev = rsh.find_serial_device_by_port(self.location)
return getattr(dev, "name_path", "/pyboard/")
return ""
@property
def connected(self) -> bool:
return self._connected
def resolve_path(self, path: str | DevicePath | Path) -> DevicePath:
_path = path
if str(path)[0] == "/":
_path = str(path)[1:]
pyb_path = f"{self._pyb_root}{_path}"
return DevicePath(pyb_path)
def establish(self, target: str) -> RShellPyDeviceBackend:
self._rsh = rsh
self._rsh.ASCII_XFER = False
self._rsh.QUIET = not self._verbose
self.location = target
return self
def connect(self) -> None:
try:
self._rsh.connect(self.location)
except (SystemExit, Exception) as e:
raise PyDeviceConnectionError(self.location) from e
self._connected = True
dev = self._rsh.find_serial_device_by_port(self.location)
if dev is None:
raise PyDeviceConnectionError(self.location)
self._pydevice = dev.pyb
def disconnect(self) -> None:
return
def reset(self) -> None:
return
def push_file(self, source_path: HostPath, target_path: DevicePath = None, **_) -> None:
"""Copies file to pyboard.
Args:
source_path (str): path to file
target_path (str, optional): dest on pyboard. Defaults to None.
If None, file is copied to pyboard root.
Returns:
str: path to dest on pyboard
"""
src_path = Path(source_path).resolve()
_dest = target_path or src_path.name
dest = self.resolve_path(_dest)
self._rsh.cp(str(src_path), str(dest))
def pull_file(self, source_path: DevicePath, target_path: HostPath, **kwargs) -> None:
host_dest = Path(target_path).resolve()
device_src = self.resolve_path(source_path)
self._rsh.cp(str(device_src), str(host_dest))
def list_dir(self, path: DevicePath) -> list[DevicePath]:
"""List directory on pyboard.
Args:
path: path to directory
"""
dir_path = self.resolve_path(path)
tree = self._rsh.auto(rsh.listdir, str(dir_path))
return tree
def copy_dir(
self, source_path: HostPath | DevicePath, target_path: HostPath | DevicePath, **rsync
):
"""Copy directory from pyboard to machine.
Args:
source_path: path to directory
target_path: destination to copy to
rsync (dict, optional): additional args to pass to rsync call.
Defaults to {}
"""
dir_path = self.resolve_path(source_path)
dest_path = Path(str(target_path))
rsync_args = {
"recursed": True,
"mirror": False,
"dry_run": False,
"print_func": lambda *args: None,
"sync_hidden": False,
}
rsync.pop("consumer", None)
self._rsh.rsync(dir_path, str(dest_path), **{**rsync_args, **rsync})
return dest_path
@contextmanager
def repl(self):
"""Pyboard raw repl context manager."""
if self._repl_active:
yield self._pydevice
else:
self._pydevice.enter_raw_repl()
self._repl_active = True
try:
yield self._pydevice
finally:
self._pydevice.exit_raw_repl()
self._repl_active = False
def eval(self, command: str, *, consumer: MessageConsumer = None):
"""Execute bytes on pyboard."""
_handler = None if consumer is None else RShellConsumer(consumer.on_message).on_message
ret, ret_err = self._pydevice.exec_raw(command, data_consumer=_handler)
if ret_err:
raise PyboardError("exception", ret, ret_err)
return ret
def eval_script(
self,
contents: AnyStr,
target_path: DevicePath | None = None,
*,
consumer: PyDeviceConsumer = None,
):
_contents: str | bytes = contents
if isinstance(_contents, bytes):
_contents = _contents.decode()
with self.repl():
try:
out_bytes = self.eval(_contents, consumer=consumer)
except PyboardError as e:
raise Exception(str(e)) from e
out = out_bytes.decode("utf-8")
return out
def remove(self, path: DevicePath) -> None:
self._rsh.rm(str(path))
================================================
FILE: micropy/pyd/backend_upydevice.py
================================================
from __future__ import annotations
import binascii
import hashlib
import io
import random
import stat
import string
import time
from functools import wraps
from pathlib import Path, PurePosixPath
from typing import AnyStr, Callable, Generator, Optional, TypeVar, Union
import upydevice
from boltons import iterutils
from micropy.exceptions import PyDeviceConnectionError, PyDeviceError, PyDeviceFileIntegrityError
from rich import print
from typing_extensions import ParamSpec, TypeAlias
from upydevice.phantom import UOS as UPY_UOS
from .abc import DevicePath, HostPath, MessageConsumer, MetaPyDeviceBackend, PyDeviceConsumer
from .consumers import NoOpConsumer
AnyUPyDevice: TypeAlias = Union[upydevice.SerialDevice, upydevice.WebSocketDevice]
BUFFER_SIZE = 512
T = TypeVar("T")
P = ParamSpec("P")
class UOS(UPY_UOS):
@upydevice.upy_cmd_c_r()
def stat(self, path):
return self.dev_dict
def retry(fn: Callable[P, T]) -> Callable[P, T | None]:
@wraps(fn)
def _wrapper(self_: UPyDeviceBackend, *args: P.args, **kwargs: P.kwargs) -> T | None:
_result: T | None = None
retry_count = 0
while retry_count < 4:
try:
if (integrity := kwargs.pop("verify_integrity", None)) is not None:
# skip integrity check on last retry as last ditch.
kwargs["verify_integrity"] = integrity and retry_count < 3
if integrity and not kwargs["verify_integrity"]:
print("Attempting again without file integrity check...")
_result = fn(self_, *args, **kwargs) # type: ignore
except PyDeviceFileIntegrityError as e:
retry_count += 1
print(e)
self_.reset()
except Exception as e:
retry_count += 1
self_.BUFFER_SIZE = BUFFER_SIZE // pow(2, retry_count + 1)
print("reducing buffer size to:", self_.BUFFER_SIZE)
print(e)
self_.reset()
else:
break
return _result
return _wrapper # type: ignore
class UPyDeviceBackend(MetaPyDeviceBackend):
BUFFER_SIZE: int = BUFFER_SIZE
_pydevice: AnyUPyDevice
_uos: UOS | None = None
def _ensure_connected(self):
if not self.connected:
raise PyDeviceError("No currently connected device found!")
def _rand_device_path(self) -> DevicePath:
name = "".join(random.sample(string.ascii_lowercase, 6))
return self.resolve_path(DevicePath(name))
@property
def uos(self) -> UOS:
self._ensure_connected()
if not self._uos:
self._uos = UOS(self._pydevice)
return self._uos
def _pyb_root(self) -> DevicePath:
results = self.uos.stat("/flash")
if "Traceback" or "ENOENT" in results:
return DevicePath("/")
return DevicePath("/flash")
def resolve_path(self, path: DevicePath | str | Path) -> DevicePath:
_root = PurePosixPath(self._pyb_root())
_path = PurePosixPath(path)
if _path.is_absolute():
if _root == _path or _root in list(_path.parents):
return DevicePath(str(_path))
_path = _path.relative_to(list(_path.parents)[-1])
return DevicePath(str(_root / _path))
def establish(self, target: str) -> UPyDeviceBackend:
self.location = target
self._pydevice = upydevice.Device(target, init=True, autodetect=True)
return self
def connect(self):
try:
self._pydevice.connect()
except (SystemExit, Exception) as e:
raise PyDeviceConnectionError(self.location) from e
def disconnect(self):
if self.connected:
self._pydevice.disconnect()
def reset(self):
self._pydevice.reset()
time.sleep(2)
self._pydevice.connect()
time.sleep(4)
@property
def connected(self) -> bool:
return False if getattr(self, "_pydevice", None) is None else self._pydevice.connected
def list_dir(self, path: DevicePath) -> list[DevicePath]:
return [DevicePath(p) for p in self.uos.listdir(self.resolve_path(path))]
def iter_files(self, path: DevicePath) -> Generator[DevicePath, None, None]:
path = self.resolve_path(path)
self._pydevice.cmd("import uos", silent=True)
results = self._pydevice.cmd(f"list(uos.ilistdir('{path}'))", silent=True, rtn_resp=True)
if not results:
return
for file_result in results:
name, type_, _, _ = file_result
abs_path = PurePosixPath(path) / name
if type_ == stat.S_IFDIR:
yield from self.iter_files(abs_path)
else:
yield abs_path
def copy_dir(
self,
source_path: DevicePath,
target_path: HostPath,
exclude_integrity: Optional[set[str]] = None,
**kwargs,
):
target_path = Path(str(target_path)) # type: ignore
source_path = self.resolve_path(source_path)
exclude_integrity = exclude_integrity or set()
for file_path in self.iter_files(source_path):
rel_path = PurePosixPath(file_path).relative_to(
list(PurePosixPath(file_path).parents)[-1]
)
# handles os-path conversion
file_dest = Path(target_path / rel_path)
file_dest.parent.mkdir(parents=True, exist_ok=True)
integ_exclude = (
file_path in exclude_integrity or Path(file_path).name in exclude_integrity
)
integrity = kwargs.pop("verify_integrity", True) and not integ_exclude
self.pull_file(
file_path, HostPath(str(file_dest)), verify_integrity=integrity, **kwargs
)
def push_file(
self, source_path: HostPath, target_path: DevicePath, binary: bool = False, **kwargs
) -> None:
src_path = Path(str(source_path))
src_contents = src_path.read_bytes() if binary else src_path.read_text()
self.write_file(src_contents, target_path, **kwargs)
def pull_file(self, source_path: DevicePath, target_path: HostPath, **kwargs) -> None:
src_path = self.resolve_path(source_path)
targ_path = Path(str(target_path))
source_contents = self.read_file(src_path, **kwargs)
if source_contents is None:
# TODO: properly report failure to read/copy file.
return None
targ_path.write_text(source_contents)
def _iter_hex_chunks(self, content: str):
chunked_content = iterutils.chunked_iter(content, self.BUFFER_SIZE)
for chunk in chunked_content:
hex_chunk = binascii.hexlify(chunk.encode())
yield hex_chunk
@retry
def write_file(
self,
contents: str | bytes,
target_path: DevicePath,
*,
consumer: PyDeviceConsumer = NoOpConsumer,
) -> None:
is_bytes = isinstance(contents, bytes)
target_path = self.resolve_path(target_path)
self._pydevice.cmd("import gc")
self._pydevice.cmd("import ubinascii")
self._pydevice.cmd(f"f = open('{str(target_path)}', 'wb')")
content_iter = (
iterutils.chunked_iter(contents, self.BUFFER_SIZE)
if is_bytes
else self._iter_hex_chunks(contents)
)
content_size = len(contents)
consumer.on_start(name=f"Writing {str(target_path)}", size=content_size)
for chunk in content_iter:
cmd = (
f"contents = {chunk}; f.write(contents)"
if is_bytes
else f"contents = ubinascii.unhexlify('{chunk.decode()}'); f.write(contents)"
)
self._pydevice.cmd(cmd, silent=True)
consumer.on_update(size=len(chunk))
consumer.on_end()
self._pydevice.cmd("f.close()")
self._pydevice.cmd("import gc; gc.collect()")
def _compute_chunk_size(self) -> int:
mem_free = int(
self._pydevice.cmd("import gc;_=gc.collect();gc.mem_free()", rtn_resp=True, silent=True)
)
return min(mem_free // 4, 4096)
def _compute_device_file_digest(
self,
device_path: DevicePath,
*,
chunk_size: int = 256,
content_size: Optional[int] = None,
pos: int = 0,
) -> str:
checksum_cmd = ";".join(
[
"import ubinascii, uhashlib, gc",
"f=open('{path}', 'rb')",
"sha = uhashlib.sha256()",
"__=[sha.update(f.read({chunk_size})) and gc.collect() for _ in range({pos}, {file_size}, {chunk_size})]",
"ubinascii.hexlify(sha.digest()).decode()",
]
)
if content_size is None:
content_size = self.uos.stat(str(device_path))[6]
sum_cmd = checksum_cmd.format(
path=str(device_path), chunk_size=chunk_size, file_size=content_size, pos=pos
)
return self._pydevice.cmd(sum_cmd, silent=True, rtn_resp=True)
@retry
def read_file(
self,
target_path: DevicePath,
*,
consumer: PyDeviceConsumer = NoOpConsumer,
verify_integrity: bool = True,
) -> str:
target_path = self.resolve_path(target_path)
read_chunk_cmd = (
"f=open('{path}', 'rb');_=f.seek({pos});ch=f.read({chunk_size});f.close();ch"
)
content_size = self.uos.stat(str(target_path))[6]
buffer = io.BytesIO()
pos = 0
chunk_size = self._compute_chunk_size()
consumer.on_start(
name=f"Reading {Path(target_path).name} (xsize: {chunk_size})", size=int(content_size)
)
hasher = hashlib.sha256()
while pos < content_size:
try:
cmd = read_chunk_cmd.format(path=str(target_path), pos=pos, chunk_size=chunk_size)
next_chunk = self._pydevice.cmd(cmd, rtn_resp=True, silent=True)
except Exception as e:
consumer.on_message(f"Failed to read chunk; retrying ({e})")
self.reset()
chunk_size = self._compute_chunk_size()
continue
if len(next_chunk) == 0:
consumer.on_message("Failed to read chunk (no data); retrying.")
self.reset()
continue
hasher.update(next_chunk)
buffer.write(next_chunk)
pos += chunk_size
consumer.on_update(size=len(next_chunk))
consumer.on_end()
if verify_integrity:
device_sum = self._compute_device_file_digest(
target_path, chunk_size=chunk_size, content_size=content_size, pos=0
)
digest = hasher.hexdigest()
if device_sum != digest:
raise PyDeviceFileIntegrityError(
device_path=Path(target_path).name, device_sum=device_sum, digest=digest
)
consumer.on_message(f"Verified integrity: {Path(target_path).name}")
value = buffer.getvalue().decode()
return value
def eval(self, command: str, *, consumer: MessageConsumer = NoOpConsumer) -> str | None:
return self._pydevice.cmd(
command, follow=True, pipe=lambda m, *args, **kws: consumer.on_message(m)
)
def eval_script(
self,
contents: AnyStr,
target_path: DevicePath | None = None,
*,
consumer: PyDeviceConsumer = NoOpConsumer,
):
_target_path = (
self.resolve_path(target_path) if target_path else f"{self._rand_device_path()}.py"
)
self.write_file(contents, DevicePath(_target_path), consumer=consumer)
self.eval(f"import {Path(_target_path).stem}", consumer=consumer)
self.uos.remove(str(_target_path))
def remove(self, path: DevicePath) -> None:
self.uos.remove(str(path))
================================================
FILE: micropy/pyd/consumers.py
================================================
from __future__ import annotations
from functools import partialmethod
from typing import Any, Callable, NamedTuple, cast
from micropy.pyd.abc import (
EndHandler,
MessageConsumer,
MessageHandler,
StartHandler,
StreamConsumer,
UpdateHandler,
)
from tqdm import tqdm
class ProgressStreamConsumer:
bar: tqdm
def __init__(
self,
on_description: Callable[
[str, dict[str, Any] | None], tuple[str, dict[str, Any] | None]
] = None,
**kwargs,
):
self._on_description = on_description or (lambda s, cfg: (s, cfg))
def on_start(self, *, name: str = None, size: int | None = None):
bar_format = "{l_bar}{bar}| [{n_fmt}/{total_fmt} @ {rate_fmt}]"
tqdm_kwargs = {
"unit_scale": True,
"unit_divisor": 1024,
"bar_format": bar_format,
}
_name, _tqdm_kws = self._on_description(name or "", tqdm_kwargs)
# todo: use union operator when min py version is 3.9.
tqdm_kwargs.update(_tqdm_kws or dict())
self.bar = tqdm(total=size, unit="B", **tqdm_kwargs)
def on_update(self, *, size: int | None = None):
self.bar.update(size)
def on_end(self):
self.bar.close()
class ConsumerDelegate:
consumers: list[StreamConsumer | MessageConsumer]
def __init__(self, *consumers: StreamConsumer | MessageConsumer | None):
self.consumers = [i for i in consumers if i]
def consumer_for(self, action: str, *args, **kwargs):
_consumer = next((i for i in self.consumers if hasattr(i, action)), None)
if _consumer is None:
# default noop
return
return getattr(_consumer, action)(*args, **kwargs)
on_message = cast(MessageHandler, partialmethod(consumer_for, "on_message"))
on_start = cast(StartHandler, partialmethod(consumer_for, "on_start"))
on_update = cast(UpdateHandler, partialmethod(consumer_for, "on_update"))
on_end = cast(EndHandler, partialmethod(consumer_for, "on_end"))
class StreamHandlers(NamedTuple):
on_start: StartHandler
on_update: UpdateHandler
on_end: EndHandler
class MessageHandlers(NamedTuple):
on_message: MessageHandler
def _no_op(*args, **kwargs):
return None
NoOpStreamConsumer = StreamHandlers(on_start=_no_op, on_update=_no_op, on_end=_no_op)
NoOpMessageConsumer = MessageHandlers(on_message=_no_op)
NoOpConsumer = ConsumerDelegate(NoOpMessageConsumer, NoOpMessageConsumer)
================================================
FILE: micropy/pyd/pydevice.py
================================================
from __future__ import annotations
from io import BytesIO, StringIO
from pathlib import Path
from typing import AnyStr, Generic, Optional, Type
from .abc import (
AnyBackend,
DevicePath,
HostPath,
MessageConsumer,
MetaPyDevice,
MetaPyDeviceBackend,
StreamConsumer,
)
from .backend_upydevice import UPyDeviceBackend
from .consumers import ConsumerDelegate
class PyDevice(MetaPyDevice[AnyBackend], Generic[AnyBackend]):
pydevice: AnyBackend
consumer: ConsumerDelegate
def __init__(
self,
location: str,
*,
backend: Type[MetaPyDeviceBackend] = UPyDeviceBackend,
auto_connect: bool = True,
stream_consumer: StreamConsumer = None,
message_consumer: MessageConsumer = None,
delegate_cls: Type[ConsumerDelegate] = ConsumerDelegate,
):
self.pydevice = backend().establish(location)
self.consumer = delegate_cls(stream_consumer, message_consumer)
if auto_connect and self.pydevice:
self.pydevice.connect()
def copy_from(
self,
source_path: DevicePath,
target_path: HostPath,
*,
verify_integrity: bool = True,
exclude_integrity: Optional[set[str]] = None,
) -> None:
src_path = Path(str(source_path))
# 'is_dir/file' only works on existing paths.
if not src_path.suffix:
return self.pydevice.copy_dir(
DevicePath(source_path),
target_path,
consumer=self.consumer,
verify_integrity=verify_integrity,
exclude_integrity=exclude_integrity,
)
return self.pydevice.pull_file(
DevicePath(source_path),
target_path,
consumer=self.consumer,
verify_integrity=verify_integrity,
)
def copy_to(self, source_path: HostPath, target_path: DevicePath, **kwargs) -> None:
src_path = Path(str(source_path))
host_exists = src_path.exists()
if (host_exists and src_path.is_dir()) or (not host_exists and not src_path.suffix):
raise RuntimeError("Copying dirs to device is not yet supported!")
return self.pydevice.push_file(source_path, target_path, consumer=self.consumer, **kwargs)
def remove(self, target_path: DevicePath) -> None:
return self.pydevice.remove(target_path)
def connect(self):
return self.pydevice.connect()
def disconnect(self):
return self.pydevice.disconnect()
def run_script(
self, content: AnyStr | StringIO | BytesIO, target_path: DevicePath | None = None
):
_content = (
content
if isinstance(
content,
(
str,
bytes,
),
)
else content.read()
)
return self.pydevice.eval_script(_content, target_path, consumer=self.consumer)
def run(self, content: str) -> str | None:
return self.pydevice.eval(content, consumer=self.consumer)
================================================
FILE: micropy/stubs/__init__.py
================================================
"""
micropy.stubs
~~~~~~~~~~~~~~
This module contains all functionality relating
to stub files/frozen modules and their usage in MicropyCli
"""
from . import source
from .manifest import StubsManifest
from .package import AnyStubPackage, StubPackage
from .repo import StubRepository
from .repo_package import StubRepositoryPackage
from .repositories import MicropyStubPackage, MicropythonStubsManifest, MicropythonStubsPackage
from .repository_info import RepositoryInfo
from .stubs import StubManager
__all__ = [
"StubManager",
"source",
"StubsManifest",
"StubPackage",
"AnyStubPackage",
"StubRepository",
"MicropyStubPackage",
"MicropythonStubsPackage",
"MicropythonStubsManifest",
"RepositoryInfo",
"StubRepositoryPackage",
]
================================================
FILE: micropy/stubs/manifest.py
================================================
import abc
from typing import FrozenSet, Generic
from micropy.stubs.package import AnyStubPackage, StubPackage
from micropy.stubs.repository_info import RepositoryInfo
from pydantic import Field
from pydantic.generics import GenericModel
from typing_extensions import Annotated
class StubsManifest(GenericModel, Generic[AnyStubPackage], abc.ABC):
class Config:
frozen = True
repository: RepositoryInfo
packages: Annotated[FrozenSet[AnyStubPackage], Field(repr=False)]
@abc.abstractmethod
def resolve_package_url(self, package: StubPackage) -> str:
"""Resolve package to a stub source."""
def resolve_package_absolute_name(self, package: StubPackage) -> str:
"""Resolve package absolute name."""
return "/".join([self.repository.name, package.name])
def resolve_package_versioned_name(self, package: StubPackage) -> str:
"""Resolve package versioned absolute name."""
return "-".join([package.name, package.version])
def resolve_package_absolute_versioned_name(self, package: StubPackage) -> str:
"""Resolve package versioned absolute name."""
return "-".join([self.resolve_package_absolute_name(package), package.version])
================================================
FILE: micropy/stubs/package.py
================================================
from __future__ import annotations
from typing import TypeVar
from pydantic import BaseModel
class StubPackage(BaseModel):
class Config:
frozen = True
allow_population_by_field_name = True
name: str
version: str
@property
def package_name(self) -> str:
return self.name
AnyStubPackage = TypeVar("AnyStubPackage", bound=StubPackage)
================================================
FILE: micropy/stubs/repo.py
================================================
from __future__ import annotations
import collections
import inspect
from typing import TYPE_CHECKING, ClassVar, Generator, Iterator, Optional, Type
import attrs
import micropy.exceptions as exc
from boltons.typeutils import get_all_subclasses
from .manifest import StubsManifest
from .repo_package import StubRepositoryPackage
if TYPE_CHECKING:
from .repository_info import RepositoryInfo
@attrs.define
class StubRepository:
manifests: list[StubsManifest] = attrs.field(factory=list)
packages_index: collections.ChainMap[str, StubRepositoryPackage] = attrs.field(
factory=collections.ChainMap
)
versions_index: collections.ChainMap[str, list[StubRepositoryPackage]] = attrs.field(
factory=collections.ChainMap
)
manifest_types: ClassVar[list[Type[StubsManifest]]] = []
def __attrs_post_init__(self) -> None:
if not any(StubRepository.manifest_types):
StubRepository.manifest_types = [
klass
for klass in get_all_subclasses(StubsManifest)
if not inspect.isabstract(klass)
]
self.build_indexes()
@property
def packages(self) -> Iterator[StubRepositoryPackage]:
"""Iterate packages in repository."""
yield from self.packages_index.values()
def build_indexes(self) -> None:
"""Progressively builds indexes."""
for manifest in self.manifests:
pkg = next(iter(manifest.packages), None)
if pkg and manifest.resolve_package_absolute_versioned_name(pkg) in self.packages_index:
continue
packages_index = dict()
versions_index = collections.defaultdict(list)
for package in manifest.packages:
repo_package = StubRepositoryPackage(manifest=manifest, package=package)
packages_index[repo_package.absolute_versioned_name] = repo_package
versions_index[repo_package.name].append(repo_package)
self.packages_index = self.packages_index.new_child(packages_index)
self.versions_index = self.versions_index.new_child(dict(versions_index))
def add_repository(self, info: RepositoryInfo) -> StubRepository:
"""Creates a new `StubRepository` instance with a `StubManifest` derived from `info`.
Args:
info: `RepositoryInfo` instance.
Returns:
`StubRepository` instance.
"""
contents = info.fetch_source()
data = dict(repository=info, packages=contents)
for manifest_type in StubRepository.manifest_types:
try:
manifest = manifest_type.parse_obj(data)
except (
ValueError,
KeyError,
):
continue
else:
return attrs.evolve(
self,
manifests=[*self.manifests, manifest],
packages_index=self.packages_index,
versions_index=self.versions_index,
)
raise ValueError(f"Failed to determine manifest format for repo: {info}")
def search(
self, query: str, include_versions: bool = True
) -> Generator[StubRepositoryPackage, None, None]:
"""Search packages for `query`.
Args:
query: Search constraint.
include_versions: Whether to include versions in search results.
Returns:
A generator of `StubRepositoryPackage` objects.
"""
query = query.strip().lower()
for package_name in self.versions_index.keys():
if query in package_name.lower() or package_name.lower() in query:
if include_versions:
yield from self.versions_index[package_name]
continue
yield self.latest_for_package(self.versions_index[package_name][0])
def latest_for_package(
self, repo_package: StubRepositoryPackage
) -> Optional[StubRepositoryPackage]:
versions = self.versions_index[repo_package.name]
if len(versions) == 1:
return versions[0]
return max(versions, key=lambda x: x.package.version)
def resolve_package(self, name: str) -> StubRepositoryPackage:
"""Resolve a package name to a package path.
Args:
name: Package name.
Returns:
Package location.
Throws:
StubNotFound: When package cannot be resolved.
"""
for package in self.search(str(name)):
if package.match_exact(name) or package.match_exact(
"/".join([package.repo_name, name])
):
return package
latest = self.latest_for_package(package)
if latest and latest.name == name:
return latest
raise exc.StubNotFound(name)
================================================
FILE: micropy/stubs/repo_package.py
================================================
from __future__ import annotations
from typing import Iterator
import attrs
from micropy.stubs import StubPackage, StubsManifest
@attrs.frozen
class StubRepositoryPackage:
manifest: StubsManifest[StubPackage]
package: StubPackage
@property
def url(self) -> str:
return self.manifest.resolve_package_url(self.package)
@property
def repo_name(self) -> str:
return self.manifest.repository.name
@property
def name(self) -> str:
return self.package.name
@property
def version(self) -> str:
return self.package.version
@property
def absolute_name(self) -> str:
return self.manifest.resolve_package_absolute_name(self.package)
@property
def versioned_name(self) -> str:
return self.manifest.resolve_package_versioned_name(self.package)
@property
def absolute_versioned_name(self) -> str:
return self.manifest.resolve_package_absolute_versioned_name(self.package)
@property
def exact_matchers(self) -> Iterator[str]:
yield self.absolute_versioned_name
yield self.versioned_name
yield self.absolute_name
@property
def partial_matchers(self) -> Iterator[str]:
yield from self.exact_matchers
yield self.name
yield self.version
def match_exact(self, in_name: str) -> bool:
return in_name in self.exact_matchers
================================================
FILE: micropy/stubs/repositories/__init__.py
================================================
from .micropy import MicropyStubPackage, MicropyStubsManifest
from .micropython import MicropythonStubsManifest, MicropythonStubsPackage
__all__ = [
"MicropyStubsManifest",
"MicropyStubPackage",
"MicropythonStubsPackage",
"MicropythonStubsManifest",
]
================================================
FILE: micropy/stubs/repositories/micropy.py
================================================
from __future__ import annotations
from pathlib import PurePosixPath
from urllib import parse
from pydantic import Field, root_validator
from typing_extensions import Annotated
from ..manifest import StubsManifest
from ..package import StubPackage
class MicropyStubPackage(StubPackage):
name: str
version: Annotated[str, Field(alias="sha256sum")]
class MicropyStubsManifest(StubsManifest[MicropyStubPackage]):
location: str
path: str
@root_validator(pre=True)
def check(cls, values: dict):
pkgs = values["packages"]
if "packages" in pkgs:
values["location"] = pkgs["location"]
values["path"] = pkgs["path"]
values["packages"] = pkgs["packages"]
return values
def resolve_package_url(self, package: StubPackage) -> str:
base_path = PurePosixPath(parse.urlparse(self.location).path)
pkg_path = base_path / PurePosixPath(self.path) / PurePosixPath(package.name)
url = parse.urljoin(self.location, str(pkg_path))
return url
================================================
FILE: micropy/stubs/repositories/micropython.py
================================================
from __future__ import annotations
import functools
from typing import TYPE_CHECKING
from distlib.locators import locate
from distlib.version import NormalizedVersion
from pydantic import Field, validator
from typing_extensions import Annotated
from ..manifest import StubsManifest
from ..package import StubPackage
if TYPE_CHECKING:
from distlib.database import Distribution
@functools.total_ordering
class MicropythonStubsPackage(StubPackage):
name: str
version: Annotated[str, Field(alias="pkg_version")]
@property
def package_version(self) -> NormalizedVersion:
return NormalizedVersion(self.version)
def __lt__(self, other: MicropythonStubsPackage) -> bool:
return self.package_version < other.package_version
def __eq__(self, other: MicropythonStubsPackage) -> bool:
return self.name == other.name and self.version == other.version
class MicropythonStubsManifest(StubsManifest[MicropythonStubsPackage]):
@validator("packages", pre=True)
def _get_packages(cls, v: dict[str, dict]):
data = v["data"].values()
return list(data)
def resolve_package_url(self, package: StubPackage) -> str:
dist: Distribution = locate(f"{package.name} ({package.version})")
dist_url = next(i for i in dist.download_urls if "tar.gz" in i)
return dist_url
================================================
FILE: micropy/stubs/repository_info.py
================================================
from __future__ import annotations
from datetime import timedelta
from typing import Any
import requests
from cachier import cachier
from pydantic import BaseModel, HttpUrl
class RepositoryInfo(BaseModel):
name: str
display_name: str
source: HttpUrl
class Config:
frozen = True
@cachier(stale_after=timedelta(days=1), next_time=True)
def fetch_source(self) -> dict[str, Any]:
return requests.get(self.source).json()
================================================
FILE: micropy/stubs/source.py
================================================
"""
micropy.stubs.source
~~~~~~~~~~~~~~
This module contains abstractions for handling stub sources
and their location.
"""
from __future__ import annotations
import abc
import shutil
import tempfile
from contextlib import ExitStack, contextmanager
from functools import partial, reduce
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, ContextManager, Optional, Protocol, Union, cast
import attrs
import micropy.exceptions as exc
from micropy import utils
from micropy.logger import Log
from micropy.utils.types import PathStr
if TYPE_CHECKING:
from micropy.stubs.repo import StubRepository
class LocateStrategy(Protocol):
@abc.abstractmethod
def prepare(self, location: PathStr) -> Union[PathStr, tuple[PathStr, Callable[..., Any]]]:
...
logger = Log.add_logger(__name__, show_title=False)
@attrs.define
class StubSource:
"""Handles sourcing stubs."""
locators: list[LocateStrategy] = attrs.field()
location: Optional[PathStr] = attrs.field(default=None)
@locators.default
def _default_locators(self: StubSource) -> list[LocateStrategy]:
return [RemoteStubLocator(), StubInfoSpecLocator()]
def _do_locate(self, stack: ExitStack, path: PathStr, locator: LocateStrategy) -> PathStr:
logger.debug(f"running (strategy:{locator}) @ (location:{path})")
response = locator.prepare(path)
parts = iter(response if isinstance(response, tuple) else (response,))
path = next(parts, path)
teardown = next(parts, None)
if teardown:
logger.debug(f"adding teardown callback for: {locator}")
stack.callback(teardown)
logger.debug(f"results of (strategy:{locator}) -> (location:{path})")
return path
@contextmanager
def ready(self, location: Optional[PathStr] = None) -> ContextManager[PathStr]:
"""Yields prepared Stub Source.
Allows StubSource subclasses to have a preparation
method before providing a local path to itself.
Yields:
Resolved PathLike object to stub source
"""
with ExitStack() as stack:
reducer = cast(
Callable[[PathStr, LocateStrategy], PathStr], partial(self._do_locate, stack)
)
path = reduce(reducer, self.locators, location or self.location)
yield path
@attrs.define
class StubInfoSpecLocator(LocateStrategy):
def prepare(self, location: PathStr) -> PathStr:
info_path = next(Path(location).rglob("info.json"), None)
return location if info_path is None else info_path.parent
@attrs.define
class RemoteStubLocator(LocateStrategy):
"""Stub Source for remote locations."""
def _unpack_archive(self, file_bytes: bytes, path: PathStr) -> PathStr:
"""Unpack archive from bytes buffer.
Args:
file_bytes (bytes): Byte array to extract from
Must be from tarfile with gzip compression
path (str): path to extract file to
Returns:
path: path extracted to
"""
path = Path(utils.extract_tarbytes(file_bytes, path))
output = next(path.iterdir())
return output
def prepare(self, location: PathStr) -> tuple[PathStr, Optional[Callable[..., Any]]] | PathStr:
"""Retrieves and unpacks source.
Prepares remote stub resource by downloading and
unpacking it into a temporary directory.
This directory is removed via the returned teardown.
"""
if not utils.is_url(location):
logger.debug(f"{self}: {location} not viable, skipping...")
return location
tmp_dir = tempfile.mkdtemp()
tmp_path = Path(tmp_dir)
filename = utils.get_url_filename(location).split(".tar.gz")[0]
_file_name = "".join(logger.iter_formatted(f"$B[{filename}]"))
content = utils.stream_download(location, desc=f"{logger.get_service()} {_file_name}")
source_path = self._unpack_archive(content, tmp_path)
teardown = partial(shutil.rmtree, tmp_path)
return source_path, teardown
@attrs.define
class RepoStubLocator(LocateStrategy):
repo: StubRepository = attrs.field(repr=False)
def prepare(self, location: PathStr) -> Union[PathStr, tuple[PathStr, Callable[..., Any]]]:
if not self.repo:
return location
try:
source = self.repo.resolve_package(location)
except exc.StubNotFound as e:
logger.debug(f"{self}: {location} not found in repo, skipping... (exc: {e})")
return location
else:
return source.url
def get_source(location, **kwargs):
"""Factory for StubSource Instance.
Deprecated. Todo: Remove.
Args:
location (str): PathLike object or valid URL
Returns:
obj: Either Local or Remote StubSource Instance
"""
return StubSource(**kwargs, location=location)
================================================
FILE: micropy/stubs/stubs.py
================================================
from __future__ import annotations
import json
import shutil
from pathlib import Path
from typing import TYPE_CHECKING
from distlib import locators, metadata
from micropy import data, utils
from micropy.exceptions import StubError, StubValidationError
from micropy.logger import Log
from micropy.stubs import source
from packaging.utils import parse_sdist_filename
if TYPE_CHECKING:
from micropy.stubs.repo import StubRepository
class StubManager:
"""Manages a collection of Stubs.
Kwargs:
resource (str): Default resource path
repos ([StubRepo]): Repos for Remote Stubs
Raises:
StubError: a stub is missing a def file
StubValidationError: a stubs def file is not valid
Returns:
object: Instance of StubManager
"""
repo: StubRepository
_schema = data.SCHEMAS / "stubs.json"
_firm_schema = data.SCHEMAS / "firmware.json"
def __init__(self, resource=None, repos=None):
self._loaded = set()
self._firmware = set()
self.resource = resource
self.repo = repos
self.log = Log.add_logger("Stubs", stdout=False, show_title=False)
if self.resource:
self.load_from(resource, strict=False)
def __iter__(self):
return iter(self._loaded)
def __len__(self):
return len(self._loaded)
def iter_by_firmware(self, stubs=None):
"""Iterate stubs sorted by firmware.
Args:
stubs ([Stub], optional): Sublist of Stubs to iterate over.
Defaults to None. If none, uses all installed stubs.
"""
loaded = stubs or self._loaded
for firm in self._firmware:
stubs = [s for s in loaded if s.firmware == firm]
yield (firm, stubs)
other = [s for s in loaded if s.firmware is None]
yield ("Unknown", other)
def verbose_log(self, state):
"""Enable Stub logging to stdout.
Args:
state (bool): State to set
Returns:
bool: state
"""
self.log.stdout = state
return state
def _load(self, stub_source, strict=True, **kwargs):
"""Loads a stub into StubManager.
Args:
stub_source (StubSource): Stub Source Instance
strict (bool, optional): Raise Exception if stub fails to resolve.
Defaults to True.
Raises:
e: Exception raised by resolving failure
Returns:
Stub: Instance of Stub
"""
with stub_source.ready() as src_path:
if not self.is_valid(src_path):
self.log.debug("attempting to load stub from metadata.")
try:
infos = self.from_metadata(
parse_sdist_filename(stub_source.location.split("/")[-1])[0], src_path
)
kwargs["name"] = infos["name"]
except Exception as e:
self.log.debug(f"failed to load from metadata: {e}")
try:
stub_type = self._get_stubtype(src_path)
except Exception as e:
self.log.debug(f"{Path(src_path).name} failed to validate: {e}")
if strict:
raise e
else:
if stub_type is FirmwareStub:
fware = stub_type(src_path, **kwargs)
self._firmware.add(fware)
self.log.debug(f"Firmware Loaded: {fware}")
return fware
stub = stub_type(src_path, **kwargs)
fware = self.resolve_firmware(stub)
stub.firmware = fware
self._loaded.add(stub)
self.log.debug(f"Loaded: {stub}")
return stub
def resolve_firmware(self, stub):
"""Resolves FirmwareStub for DeviceStub instance.
Args:
stub (DeviceStub): Stub to resolve
Returns:
FirmwareStub: Instance of FirmwareStub
NoneType: None if an appropriate
FirmwareStub cannot be found
"""
fware_name = stub.firmware_name
self.log.info(f"Detected Firmware: $[{fware_name}]")
results = (f for f in self._firmware if f.firmware == fware_name)
fware = next(results, None)
if not fware:
try:
self.log.info("Firmware not found locally, attempting to install it...")
fware = self.add(fware_name)
except Exception:
self.log.error("Failed to resolve firmware!")
return None
else:
self.log.success(f"{fware_name} firmware added!")
return fware
return fware
def validate(self, path, schema=None):
"""Validates given stub path against its schema.
Args:
path (str): path to validate
schema (str, optional): Path to schema. Defaults to None.
If None, the DeviceStub schema is used.
Raises:
StubError: Raised if no info file can be found
StubValidationError: Raised if the info file fails validation
"""
self.log.debug(f"Validating: {path}")
schema = schema or self._schema
path = Path(path).resolve()
stub_info = path / "info.json"
val = utils.Validator(schema)
try:
val.validate(stub_info)
except FileNotFoundError as e:
self.log.error(f"missing info spec @ {path}", exception=e)
raise StubError(f"{path.name} contains no info file!") from e
except Exception as e:
self.log.error(f"validation error at {path}", exception=e)
raise StubValidationError(path, str(e)) from e
def _get_stubtype(self, path):
"""Resolves appropriate stub type.
Args:
path (str): path to stub
Returns:
cls: Appropriate class for stub
"""
try:
self.validate(path)
except StubValidationError:
try:
self.validate(path, schema=self._firm_schema)
except Exception as e:
raise e
else:
return FirmwareStub
except Exception as e:
raise e
else:
return DeviceStub
def is_valid(self, path):
"""Check if stub is valid without raising an exception.
Args:
path (str): path to stub
Returns:
bool: True if stub is valid
"""
try:
self._get_stubtype(path)
except Exception:
return False
else:
return True
def _check_existing(self, location):
"""check if location is or contains an existing stub.
Args:
location (str): name or path of Stub
Returns:
generator of existing stubs
"""
try:
do_recurse = self._should_recurse(location)
except StubError:
yield
else:
if do_recurse:
for s in (self._check_existing(p) for p in location.iterdir()):
yield next(s, None)
path_name = Path(location).name
stub = next(
(
s
for s in self._loaded
if any(t in (s.name, s.path.name) for t in (path_name, location))
),
None,
)
if stub:
yield stub
def load_from(self, directory, *args, **kwargs):
"""Recursively loads stubs from a directory.
Args:
directory (str): Path to load from
Returns:
[DeviceStub]: List of loaded Stubs
"""
dir_path = Path(str(directory)).resolve()
dirs = dir_path.iterdir()
sources = [source.StubSource([source.StubInfoSpecLocator()], location=d) for d in dirs]
stubs = []
for stub in sources.copy():
if self.is_valid(stub.location):
stub_type = self._get_stubtype(stub.location)
if stub_type is FirmwareStub:
sources.remove(stub)
self._load(stub, *args, **kwargs)
stubs.extend([self._load(s, *args, **kwargs) for s in sources])
return stubs
def _should_recurse(self, location):
"""Checks for multiple stubs in a location.
Args:
location (str): location of potential stub
Raises:
StubError: No info files could be found
Returns:
bool: True if multiple stubs are found
"""
if not Path(location).exists():
return False
path = Path(location).resolve()
info_glob = list(path.rglob("info.json"))
if len(info_glob) == 0:
raise StubError(f"{path.name} contains no info file!")
if len(info_glob) > 1:
return True
return False
def add(self, location, dest=None, force=False):
"""Add stub(s) from source.
Args:
source (str): path to stub(s)
dest (str, optional): path to copy stubs to.
Defaults to self.resource
force (bool, optional): overwrite existing stubs.
Defaults to False.
Raises:
TypeError: No resource or destination provided
"""
_dest = dest or self.resource
if not _dest:
raise TypeError("No Stub Destination Provided!")
dest = Path(str(_dest)).resolve()
stubs = [s for s in self._check_existing(location) if s is not None]
if any(stubs):
for stub in stubs:
if not force:
self.log.info(f"$[{stub}] is already installed!")
return stub
self.log.info(f"Uninstalling $[{stub.name}]...")
shutil.rmtree(stub.path)
if self._should_recurse(location):
return self.load_from(location, strict=False, copy_to=dest)
self.log.info("\nResolving stub...")
stub_source = source.StubSource(
[
source.RepoStubLocator(self.repo),
source.RemoteStubLocator(),
source.StubInfoSpecLocator(),
],
location,
)
return self._load(stub_source, copy_to=dest)
def from_stubber(self, path, dest):
"""Formats stubs generated by createstubs.py.
Creates a stub package from the stubs generated by
createstubs.py. Also attempts to auto-resolve the stubs
firmware name.
Args:
path (str): path to generated stubs
dest (str): path to output
Returns:
str: formatted stubs
"""
_path = Path(path).resolve()
dest = Path(dest).resolve()
mod_file = next(_path.rglob("modules.json"))
path = mod_file.parent
mod_data = json.load(mod_file.open())
dev_fware = mod_data["firmware"]
fname = dev_fware.get("name", None)
out_name = f"{dev_fware['sysname']}"
# TODO: Attempt to Autoresolve Firmware name and add it to info.json
if fname:
out_name = f"{out_name}-{fname}"
out_name = f"{out_name}-{dev_fware['version']}"
out_stub = dest / out_name
info_file = out_stub / "info.json"
stub_path = out_stub / "stubs"
out_stub.mkdir(exist_ok=True, parents=True)
json.dump(mod_data, info_file.open("w+"))
shutil.copytree(path, stub_path)
return out_stub
def from_metadata(self, package_name: str, path: Path) -> dict[str, str]:
"""Creates stub info.json meta from dist metadata.
Notes:
This method is a (very) dirty adapter (just like above...)
until this module can be rewritten from scratch.
"""
metadatas = (metadata.Metadata(path=p) for p in path.rglob("PKG-INFO"))
meta = next(m for m in metadatas if m.todict()["name"] == package_name)
info_path = path / "info.json"
name_parts = set(meta.todict()["name"].split("-"))
name_parts.remove("stubs")
# oh lawd, look away!!
dev_name = min(name_parts, key=lambda s: len(s))
name_parts.remove(dev_name)
firm_name = name_parts.pop()
firm = {
"ver": meta.version or "",
"port": dev_name,
"arch": "",
"sysname": dev_name,
"name": firm_name,
"mpy": 0,
"version": meta.version or "",
"machine": "",
"build": "",
"nodename": dev_name,
"platform": dev_name,
"family": "",
}
info_json = {
"firmware": firm,
"stubber": {"version": meta.version or ""},
"modules": [],
"name": package_name,
}
info_path.write_text(json.dumps(info_json))
# TODO: properly resolve requirements prior/external to this.
requires = (r for r in meta.run_requires if "stub" in r)
for req in requires:
dist = locators.locate(req)
if not dist:
continue
dist_url = next((i for i in dist.download_urls if ".tar.gz" in i), None)
if dist_url:
self.add(dist_url)
return info_json
def resolve_subresource(self, stubs, subresource):
"""Resolve or Create StubManager from list of stubs.
Args:
stubs ([Stub]): List of stubs to use in subresource
subresource (str): path to subresource
Returns:
StubManager: StubManager with subresource stubs
"""
for stub in stubs:
fware = stub.firmware
if fware:
link = subresource / fware.path.name
fware = FirmwareStub.resolve_link(fware, link)
link = subresource / stub.path.name
stub = DeviceStub.resolve_link(stub, link)
stub.firmware = fware
yield stub
class Stub:
"""Abstract Parent for Stub Related Classes.
Not Meant to be instantiated directly. Contains common logic
between different types of Stubs (ex. Firmware vs Device)
Raises:
NotImplementedError: name property is not implemented
Returns:
Instance of Stub
"""
def __init__(self, path, copy_to=None, **kwargs):
self.path = Path(path)
ref = self.path / "info.json"
self.info = json.load(ref.open())
if copy_to is not None:
self.copy_to(copy_to)
def find_root(self, path: Path) -> Path:
"""Attempt to find appropriate stub root."""
pyi_files = path.rglob("*.pyi")
if pyi_path := next(pyi_files, None):
return pyi_path.parent
return path
def copy_to(self, dest, name=None):
"""Copy stub to a directory."""
if not name:
dest = Path(dest) / self.path.name
shutil.copytree(self.path, dest)
self.path = dest.resolve()
return self
@classmethod
def resolve_link(cls, stub, link_path):
"""Resolve or Create Stub Symlink.
Args:
stub (Stub): stub to resolve
link_path (str): path to link
Returns:
Stub: Stub from symlink
"""
fware = stub.firmware
if utils.is_dir_link(link_path):
return cls(link_path, firmware=fware)
utils.create_dir_link(link_path, stub.path)
return cls(link_path, firmware=fware)
@property
def name(self):
"""Human friendly stub name."""
raise NotImplementedError
def __eq__(self, other):
return self.name == getattr(other, "name", None)
def __hash__(self):
return hash(self.name)
def __str__(self):
return self.name
class DeviceStub(Stub):
"""Handles Device Specific Stubs.
Args:
path (str): path to stub
copy_to (str, optional): Path to copy Stub to. Defaults to None.
Returns:
Device Stub Instance
"""
def __init__(self, path, copy_to=None, **kwargs):
super().__init__(path, copy_to, **kwargs)
stubs_path = self.path / "stubs"
self.stubs = self.find_root(stubs_path if stubs_path.exists() else self.path)
frozen_path = self.path / "frozen"
self.frozen = self.find_root(frozen_path if frozen_path.exists() else self.path)
stubber = self.info.get("stubber")
self.stub_version = stubber.get("version")
self.firm_info = self.info.get("firmware")
self.firmware = kwargs.get("firmware", None)
self.sysname = self.firm_info.get("sysname")
self.version = self.firm_info.get("version")
# TODO: make this module not garbage.
self._name = kwargs.get("name", self.info.get("name", None))
@property
def firmware_name(self):
"""Return an appropriate firmware name.
Returns:
str: Name of Firmware
"""
if isinstance(self.firmware, FirmwareStub):
return self.firmware.firmware
fware = self.firm_info.get("name", None)
if not fware:
fware = self.firm_info.get("firmware", "").strip()
fware.replace(" ", "-")
return fware
@property
def name(self):
if self._name:
return self._name
if not isinstance(self.firmware, FirmwareStub):
return f"{self.sysname}-{self.version}"
return f"{self.sysname}-{self.firmware_name}-{self.version}"
def __repr__(self):
return (
f"DeviceStub(sysname={self.sysname}, firmware="
f"{self.firmware_name}, version={self.version}, "
f"path={self.path})"
)
class FirmwareStub(Stub):
"""Handles Firmware Specific Modules.
Args:
path (str): path to stub
copy_to (str, optional): Path to copy Stub to. Defaults to None.
Returns:
FirmwareStub Instance
"""
def __init__(self, path, copy_to=None, **kwargs):
super().__init__(path, copy_to=copy_to, **kwargs)
self.frozen = self.path / "frozen"
self.repo = self.info.get("repo")
firmware = self.info.get("firmware").strip()
self.firmware = firmware.replace(" ", "-")
@property
def name(self):
return self.firmware
def __repr__(self):
return f"FirmwareStub(firmware={self.firmware}, repo={self.repo})"
================================================
FILE: micropy/utils/__init__.py
================================================
"""
micropy.utils
~~~~~~~~~~~~~~
This module provides utility functions that are used within
MicropyCli.
"""
from .decorators import lazy_property
from .helpers import (
create_dir_link,
ensure_existing_dir,
ensure_valid_url,
extract_tarbytes,
get_cached_data,
get_class_that_defined_method,
get_package_meta,
get_url_filename,
is_dir_link,
is_downloadable,
is_existing_dir,
is_update_available,
is_url,
iter_requirements,
search_xml,
stream_download,
)
from .stub import generate_stub
from .validate import Validator
================================================
FILE: micropy/utils/_compat.py
================================================
from __future__ import annotations
try:
from importlib import metadata as _metadata
except ImportError:
# compat for py <3.10
import importlib_metadata as metadata
else:
# workaround for
# https://github.com/python/mypy/issues/1393
metadata = _metadata
__all__ = ["metadata"]
================================================
FILE: micropy/utils/decorators.py
================================================
"""
micropy.utils.decorators
~~~~~~~~~~~~~~
This module contains generic decorators
used by MicropyCli
"""
__all__ = ["lazy_property"]
def lazy_property(fn):
attr = "_lazy__" + fn.__name__
@property
def _lazy_property(self):
if not hasattr(self, attr):
setattr(self, attr, fn(self))
return getattr(self, attr)
return _lazy_property
================================================
FILE: micropy/utils/helpers.py
================================================
"""
micropy.utils.helpers
~~~~~~~~~~~~~~
This module contains generic utility helpers
used by MicropyCli
"""
from __future__ import annotations
import inspect
import io
import os
import shutil
import subprocess as subproc
import sys
import tarfile
import xml.etree.ElementTree as ET
from datetime import timedelta
from pathlib import Path
from typing import Iterable, Optional, Union
import requests
import requirements
from cachier import cachier
from packaging import version
from requests import exceptions as reqexc
from requests import utils as requtil
from tqdm import tqdm
from ._compat import metadata
from .types import PathStr
__all__ = [
"is_url",
"get_url_filename",
"ensure_existing_dir",
"ensure_valid_url",
"is_downloadable",
"is_existing_dir",
"stream_download",
"search_xml",
"get_package_meta",
"extract_tarbytes",
"iter_requirements",
"create_dir_link",
"is_dir_link",
"is_update_available",
"get_cached_data",
"get_class_that_defined_method",
]
def is_url(url):
"""Check if provided string is a url.
Args:
url (str): url to check
Returns:
bool: True if arg url is a valid url
"""
scheme = requtil.urlparse(str(url)).scheme
return scheme in (
"http",
"https",
)
@cachier(stale_after=timedelta(days=1))
def ensure_valid_url(url):
"""Ensure a url is valid.
Args:
url (str): URL to validate
Raises:
InvalidURL: URL is not a valid url
ConnectionError: Failed to connect to url
HTTPError: Response was not 200
Returns:
str: valid url
"""
if not is_url(url):
raise reqexc.InvalidURL(f"{url} is not a valid url!")
resp = requests.head(url, allow_redirects=True)
resp.raise_for_status()
return url
def ensure_existing_dir(path: PathStr) -> Path:
"""Ensure path exists and is a directory.
If path does exist, it will be returned as
a pathlib.PurePath object
Args:
path (str): path to validate and return
Raises:
NotADirectoryError: path does not exist
NotADirectoryError: path is not a directory
Returns:
object: pathlib.PurePath object
"""
_path = Path(path)
path = _path.absolute()
try:
if not path.exists():
raise NotADirectoryError(f"{_path} does not exist!")
if not path.is_dir():
raise NotADirectoryError(f"{_path} is not a directory!")
except OSError as e:
raise NotADirectoryError(f"{_path} is not a valid path!") from e
return _path
def is_existing_dir(path):
"""Check if path is an existing directory.
Args:
path (str): path to check
Returns:
bool: True if path exists and is a directory
"""
try:
ensure_existing_dir(path)
except NotADirectoryError:
return False
else:
return True
def is_downloadable(url):
"""Checks if the url can be downloaded from.
Args:
url (str): url to check
Returns:
bool: True if contains a downloadable resource
"""
try:
ensure_valid_url(url)
except Exception:
return False
headers = requests.head(url).headers
content_type = headers.get("content-type").lower()
ctype = content_type.split("/")
if any(
t
in (
"text",
"html",
)
for t in ctype
):
return False
return True
def get_url_filename(url):
"""Parse filename from url.
Args:
url (str): url to parse
Returns:
str: filename of url
"""
path = requtil.urlparse(url).path
file_name = Path(path).name
return file_name
def stream_download(url, **kwargs):
"""Stream download with tqdm progress bar.
Args:
url (str): url to file
Returns:
bytearray: bytearray of content
"""
stream = requests.get(url, stream=True)
content = bytearray()
total_size = int(stream.headers.get("content-length", len(stream.content)))
block_size = 32 * 1024
bar_format = "{l_bar}{bar}| [{n_fmt}/{total_fmt} @ {rate_fmt}]"
tqdm_kwargs = {
"unit_scale": True,
"unit_divisor": 1024,
"smoothing": 0.1,
"bar_format": bar_format,
}
tqdm_kwargs.update(kwargs)
with tqdm(total=total_size, unit="B", **tqdm_kwargs) as pbar:
for block in stream.iter_content(block_size):
pbar.update(len(block))
content.extend(block)
return content
@cachier(stale_after=timedelta(days=3))
def search_xml(url, node):
"""Search xml from url by node.
Args:
url (str): url to xml
node (str): node to search for
Returns:
[str]: matching nodes
"""
resp = requests.get(url)
xml = resp.content.decode("UTF-8")
root = ET.fromstring(xml)
root_ns = root.tag[1 : root.tag.find("}")]
namespace = {"ns": root_ns}
_results = root.findall(f"./*/ns:{node}", namespace)
results = [k.text for k in _results]
return results
def iter_requirements(path):
"""Iterate requirements from a requirements.txt file.
Args:
path (str): path to file
"""
req_path = Path(path).absolute()
with req_path.open("r") as rfile:
yield from requirements.parse(rfile)
def get_package_meta(name, url):
"""Retrieve package metadata from PyPi.
Args:
name (str): Name of package with specs.
url (str): Url to package.
Returns:
dict: Dictionary of Metadata
"""
def _iter_compare(in_val, comp_to, operator):
for t in comp_to:
state = eval(f"in_val {operator} t")
if state:
yield t
resp = requests.get(url)
data = resp.json()
pkg = next(requirements.parse(name))
releases = data["releases"]
# Latest version
spec_data = list(releases.items())[-1][1]
if pkg.specs:
spec_comp, spec_v = pkg.specs[0]
spec_v = version.parse(spec_v)
rel_versions = [version.parse(k) for k in releases.keys()]
spec_key = str(next(_iter_compare(spec_v, rel_versions, spec_comp)))
spec_data = releases[spec_key]
# Find .tar.gz meta
tar_meta = next(i for i in spec_data if ".tar.gz" in Path(i["url"]).name)
return tar_meta
def is_within_directory(
directory: Union[os.PathLike, str], target: Union[str, os.PathLike]
) -> bool:
abs_directory = os.path.abspath(directory)
abs_target = os.path.abspath(target)
prefix = os.path.commonprefix([abs_directory, abs_target])
return prefix == abs_directory
def safe_extract(
tar: tarfile.TarFile,
path: Union[os.PathLike, str] = ".",
members: Optional[Iterable[tarfile.TarInfo]] = None,
*,
numeric_owner: bool = False,
) -> None:
for member in tar.getmembers():
member_path = os.path.join(path, member.name)
if not is_within_directory(path, member_path):
raise Exception("Attempted Path Traversal in Tar File")
tar.extractall(path, members, numeric_owner=numeric_owner)
def extract_tarbytes(file_bytes: bytes, path: str) -> str:
"""Extract tarfile as bytes.
Args:
file_bytes (bytearray): Bytes of file to extract
path (str): Path to extract it to
Returns:
path: destination path
"""
tar_bytes_obj = io.BytesIO(file_bytes)
with tarfile.open(fileobj=tar_bytes_obj, mode="r:gz") as tar:
safe_extract(tar, path)
return path
def create_dir_link(source, target):
"""Creates a platform appropriate directory link.
On POSIX systems it will create a symlink.
On Windows it will fallback on a directory junction if needed
Args:
source (os.Pathlike): Path to create link at.
target (os.Pathlike): Path to link to.
Raises:
OSError: Symlink Creation Failed
OSError: Symlink and Directory Junction Fallback Failed
"""
platform = sys.platform
source = Path(source)
target = Path(target)
try:
source.symlink_to(target, target_is_directory=True)
except OSError as e:
# Handle non-admin/non-dev windows links
if not platform == "win32":
# handles exFAT disk format (links not working)
if e.errno == 38:
shutil.copytree(
str(target.absolute()), str(source.absolute()), symlinks=False, ignore=None
)
return
elif e.errno == 17: # folder exists
return
else:
raise e
# Fall back to directory junction
cmd = ["MKLINK", "/J", str(source.absolute()), str(target.absolute())]
exit_code = subproc.call(cmd, shell=True, stdout=subproc.PIPE, stderr=subproc.PIPE)
if exit_code:
raise e
def is_dir_link(path):
"""Test if path is either a symlink or directory junction.
Args:
path (os.Pathlike): Path to test.
Returns:
bool: True if path is a type of link.
"""
platform = sys.platform
path = Path(path)
if path.is_symlink():
return True
if platform == "win32":
# Test for Directory Junction
resolved = str(path.resolve())
if not str(path.absolute()) == resolved:
return True
return False
def is_update_available():
"""Check if micropy-cli update is available.
Returns:
bool: True if update available, else False.
"""
url = "https://pypi.org/pypi/micropy-cli/json"
data = get_cached_data(url)
versions = [k for k in data["releases"].keys() if "rc" not in k]
if versions:
latest = version.parse(versions[-1])
cur_version = version.parse(metadata.version("micropy-cli"))
if cur_version < latest:
return str(latest)
return False
@cachier(stale_after=timedelta(days=3), next_time=True)
def get_cached_data(url):
"""Wrap requests with a short cache."""
source_data = requests.get(url).json()
return source_data
def get_class_that_defined_method(meth):
"""Determines Class that defined a given method.
See - https://stackoverflow.com/a/25959545
Args:
meth (Callable): Method to determine class from
Returns:
Callable: Class that defined method
"""
if inspect.ismethod(meth):
for cls in inspect.getmro(meth.__self__.__class__):
if cls.__dict__.get(meth.__name__) is meth:
return cls
meth = meth.__func__ # fallback to __qualname__ parsing
if inspect.isfunction(meth):
cls = getattr(
inspect.getmodule(meth), meth.__qualname__.split(".", 1)[0].rsplit(".", 1)[0]
)
if isinstance(cls, type):
return cls
return getattr(meth, "__objclass__", None) # handle special descriptor objects
================================================
FILE: micropy/utils/stub.py
================================================
"""Micropy stub utils."""
from __future__ import annotations
import importlib.util
import io
import sys
from pathlib import Path
from types import ModuleType
from typing import Optional
import libcst as cst
import libcst.codemod as codemod
import micropy.data
from stubber import minify
from stubber.codemod import board as stub_board
from stubber.utils import stubmaker
def locate_create_stubs() -> Path:
"""Locate createstubs.py"""
return Path(importlib.util.find_spec("stubber.board.createstubs").origin)
def import_source_code(module_name: str, path: Path) -> ModuleType:
"""Dynamically create and load module from python source code.
Args:
module_name: name of new module to create.
path: path to source code.
Returns:
Dynamically created module.
"""
spec = importlib.util.spec_from_file_location(module_name, path)
module = importlib.util.module_from_spec(spec)
sys.modules[module_name] = module
spec.loader.exec_module(module)
return module
def import_stubber() -> ModuleType:
"""Dynamically import stubber.
We do this because `micropython-stubs` is not a python package, so
we can't import from it as you would normally.
"""
vers_path = micropy.data.STUBBER / "src" / "version.py"
src_path = micropy.data.STUBBER / "src" / "utils.py"
# stubber utils expects an ambiguous 'version' import
import_source_code("version", vers_path)
mod = import_source_code("stubber.utils", src_path)
return mod
def generate_stub(path, log_func=None):
"""Create Stub from local .py file.
Args:
path (str): Path to file
log_func (func, optional): Callback function for logging.
Defaults to None.
Returns:
tuple: Tuple of file path and generated stub path.
"""
if stubmaker is None:
raise ImportError("micropython-stubber requires a python version of >= 3.8")
stubmaker.STUBGEN_OPT.quiet = True
file_path = Path(path).absolute()
stubbed_path = file_path.with_suffix(".pyi")
stubmaker.generate_pyi_from_file(file_path)
# ensure stubs reside next to their source.
result = next((file_path.parent.rglob(f"**/{stubbed_path.name}")), stubbed_path)
if result.exists():
result.replace(stubbed_path)
if not any(result.parent.iterdir()):
result.parent.rmdir()
files = (file_path, stubbed_path)
return files
def prepare_create_stubs(
*,
variant: Optional[stub_board.CreateStubsVariant] = None,
modules_set: Optional[stub_board.ListChangeSet] = None,
problem_set: Optional[stub_board.ListChangeSet] = None,
exclude_set: Optional[stub_board.ListChangeSet] = None,
compile: bool = False,
) -> io.StringIO | io.BytesIO:
if stub_board is None:
raise ImportError("micropython-stubber requires a python version of >= 3.8")
variant = variant or stub_board.CreateStubsVariant.BASE
ctx = codemod.CodemodContext()
code_mod = stub_board.CreateStubsCodemod(
ctx, variant=variant, modules=modules_set, problematic=problem_set, excluded=exclude_set
)
create_stubs = cst.parse_module(locate_create_stubs().read_text())
result = code_mod.transform_module_impl(create_stubs).code
result_io = io.StringIO(result)
minified_io = io.StringIO()
minify.minify(result_io, minified_io, keep_report=True, diff=False)
minified_io.seek(0)
# TODO: compile w/ mpy-cross
if compile:
compiled_io = io.BytesIO()
minify.cross_compile(minified_io, compiled_io)
compiled_io.seek(0)
return compiled_io
return minified_io
================================================
FILE: micropy/utils/types.py
================================================
"""Type utilities and variables."""
from __future__ import annotations
from os import PathLike
from typing import Any, Protocol, Union, runtime_checkable
from typing_extensions import TypeAlias
# PathLike string or string type alias.
PathStr: TypeAlias = Union[str, PathLike]
@runtime_checkable
class SupportsLessThan(Protocol):
def __lt__(self, other: Any) -> bool:
...
================================================
FILE: micropy/utils/validate.py
================================================
"""
micropy.utils.validate
~~~~~~~~~~~~~~
This module contains utility functions for MicropyCli
that center on data validation
"""
import json
from pathlib import Path
from jsonschema import Draft7Validator
class Validator:
""" "jsonschema wrapper for file validation.
Returns:
object: Validator Instance
"""
def __init__(self, schema_path):
schema = self._load_json(schema_path)
self.schema = Draft7Validator(schema)
def _load_json(self, path):
"""Loads json data from file.
Args:
path (str): path to file
Returns:
Loaded JSON data as an array or dict
"""
file = Path(path).resolve()
data = json.load(file.open())
return data
def validate(self, path):
"""Validates json file against a schema.
Args:
path (str): path to json file to validate
Returns:
jsonschema.validate
"""
data = self._load_json(path)
return self.schema.validate(data)
================================================
FILE: pyproject.toml
================================================
[tool.poetry]
name = "micropy-cli"
version = "4.2.2"
description = "Micropython Project Management Tool with VSCode support, Linting, Intellisense, Dependency Management, and more!"
authors = ["Braden Mars "]
license = "MIT"
packages = [{ include = "micropy" }]
include = [
{ path = "tests", format = "sdist" },
"README.md",
"CHANGELOG.md",
"LICENSE",
"micropy/lib/stubber/src/*",
"micropy/lib/stubber/board/*",
"micropy/lib/stubber/process.py",
"micropy/lib/stubber/minified/*"
]
exclude = ["micropy/lib/stubber/*"]
readme = "README.md"
homepage = "https://github.com/BradenM/micropy-cli"
repository = "https://github.com/BradenM/micropy-cli"
documentation = "https://micropy-cli.readthedocs.io"
keywords = ["micropython", "stubs", "linting", "vscode", "intellisense"]
classifiers = [
"Operating System :: OS Independent",
"Topic :: Software Development :: Code Generators",
"Topic :: Software Development :: Embedded Systems",
"Topic :: Software Development :: Build Tools",
"Programming Language :: Python :: Implementation :: MicroPython",
"Programming Language :: Python :: Implementation :: CPython",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
]
[tool.poetry.scripts]
micropy = 'micropy.app:app'
[tool.poetry.dependencies]
python = ">=3.9,<3.12"
boltons = "==23.1.1"
cachier = "2.3.0"
Jinja2 = "3.1.4"
questionary = "1.10.0"
requests = "2.32.2"
requirements-parser = "0.9.0"
tqdm = "4.66.4"
click = "8.1.7"
colorama = { version = "==0.4.6", platform = "win32" }
jsonschema = "=3.2.0"
dpath = "==1.5.0"
GitPython = "==3.1.43"
packaging = "==21.3"
python-minifier = "==2.9.0"
mypy = "^1"
## rshell is broken on windows (with python >=3.10) due to pyreadline dependency.
rshell = [
{ version = "^0.0.31", platform = "win32", python = "<3.10" },
{ version = "^0.0.31", platform = "!=win32" },
]
MarkupSafe = "==2.1.5"
upydevice = "0.3.8"
attrs = "==23.2.0"
typing-extensions = "4.12.2"
pydantic = "1.10.16"
distlib = "0.3.8"
importlib-metadata = { version = "==5.2.0", python = '<3.10' }
micropython-stubber = "==1.23.2"
libcst = ">=1.1.0,<2.0.0"
typer = {version = "0.12.3", extras = ["all"]}
[tool.poetry.group.dev.dependencies]
bump2version = "^0.5.11"
pre-commit = "^3.0.2"
mypy = { version = "^1.0", extras = ["dmypy"] }
types-requests = "^2.27.14"
pylint = "^2.7"
better-exceptions = "^0.3.3"
pdbpp = {version = "^0.10.3", platform = "!=win32" }
[tool.poetry.group.lint]
optional = true
[tool.poetry.group.lint.dependencies]
pyupgrade = "^3.3.1"
isort = "^5.12.0"
black = ">=22.10,<23.0"
ruff = "^0.5.0"
[tool.poetry.group.test]
optional = true
[tool.poetry.group.test.dependencies]
pytest = "^7.1.1"
pytest-cov = "^4.0"
pytest-datadir = "^1.3"
pytest-mock = "^3.5"
pytest-runner = "^6.0"
pytest-testmon = "^1.3"
pytest-watch = "^4.2"
pytest-xdist = "^3.0"
pytest-forked = "^1.3"
pytest-randomly = "^3.7.0"
pytest-sugar = "^0.9.4"
requests-mock = "^1.9"
flaky = "^3.7.0"
coveralls = "^3.0"
codacy-coverage = "^1.3"
mock = "^5.0.1"
[tool.poetry.group.docs]
optional = true
[tool.poetry.group.docs.dependencies]
sphinx-click = "^4.3.0"
sphinx-autodoc-typehints = "^1.19.2"
recommonmark = "^0.7.0"
Sphinx = "^5.2.1"
sphinx-rtd-theme = "^1.0.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.black]
line-length = 100
skip-string-normalization = false
target-version = ['py38']
include = '\.pyi?$'
exclude = '''
(
/(
\.eggs # exclude a few common directories in the
| \.git # root of the project
| \.hg
| \.mypy_cache
| \.tox
| \.venv
| _build
| build
| dist
| micropy/lib
)/
| foo.py # also separately exclude a file named foo.py in
# the root of the project
)
'''
[tool.isort]
profile = "black"
src_paths = ["src"]
line_length = 100
honor_noqa = true
[tool.ruff]
select = [
"E", # pycodestyle errors
"W", # pycodestyle warnings
"F", # pyflakes
# "I", # isort
# "D", # pydocstyle
"UP", # pyupgrade
"C", # flake8-comprehensions
"B", # flake8-bugbear
# "PT", # flake8-pytest-style
"RUF" # ruff specific rules
]
ignore = [
"E501", # line too long - let black handle.
"C901", # too complex
"C408", # rewrite as literal; makes comprehensions harder sometimes imo
"B008", # no function call as arg default; typer uses this pattern.
]
line-length = 100
target-version = 'py38'
src = ['micropy']
extend-exclude = ['micropy/lib', 'tests/test_stubs', 'tests/data', 'micropy/utils/__init__.py', 'micropy/__init__.py']
[tool.ruff.pyupgrade]
keep-runtime-typing = true
[tool.pytest.ini_options]
collect_ignore = ['setup.py', 'micropy/lib/*', 'micropy/lib/**/*']
testpaths = "tests"
addopts = "--maxprocesses=4 -ra -q"
norecursedirs = "micropy/lib"
mock_use_standalone_module = true
[tool.coverage.run]
source = ["micropy"]
omit = ["micropy/project/template/*", "micropy/lib/*", "micropy/lib/**/*"]
[tool.coverage.report]
exclude_lines = [
"if __name__ == .__main__.:",
'class .*\bProtocol\):',
'raise NotImplementedError',
'raise AssertionError',
'def __repr__',
'noqa',
'@(abc\.)?abstractmethod',
'pragma: no cover'
]
[tool.mypy]
plugins = [
"pydantic.mypy"
]
follow_imports = "silent"
warn_redundant_casts = true
warn_unused_ignores = true
disallow_any_generics = true
check_untyped_defs = true
no_implicit_reexport = true
disallow_untyped_defs = true
[[tool.mypy.overrides]]
module = ["boltons", "upydevice", "upydevice.*", "rshell", "tqdm"]
ignore_missing_imports = true
[tool.pydantic-mypy]
init_forbid_extra = true
init_typed = true
warn_required_dynamic_aliases = true
warn_untyped_fields = true
================================================
FILE: release-please-config.json
================================================
{
"$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json",
"release-type": "python",
"prerelease": true,
"include-v-in-tag": true,
"bootstrap-sha": "fbdf74f0cb3a4621fa7373495c9b2e7153009818",
"changelog-sections": [
{
"type": "feat",
"section": "Features"
},
{
"type": "feature",
"section": "Features"
},
{
"type": "fix",
"section": "Bug Fixes"
},
{
"type": "perf",
"section": "Performance Improvements"
},
{
"type": "revert",
"section": "Reverts"
},
{
"type": "docs",
"section": "Documentation",
"hidden": false
},
{
"type": "style",
"section": "Styles",
"hidden": true
},
{
"type": "chore",
"section": "Miscellaneous Chores",
"hidden": true
},
{
"type": "refactor",
"section": "Code Refactoring",
"hidden": false
},
{
"type": "test",
"section": "Tests",
"hidden": true
},
{
"type": "build",
"section": "Build System",
"hidden": true
},
{
"type": "ci",
"section": "Continuous Integration",
"hidden": true
}
],
"plugins": [
{
"type": "sentence-case"
}
],
"packages": {
".": {}
}
}
================================================
FILE: scripts/export-docs-reqs.sh
================================================
#!/usr/bin/env bash
ROOT="$(git rev-parse --show-toplevel)"
poetry plugin add poetry-export-plugin>/dev/null
poetry export --with docs --with dev --without-hashes -f requirements.txt -o "${ROOT}/docs/requirements.txt"
================================================
FILE: tests/__init__.py
================================================
================================================
FILE: tests/app/conftest.py
================================================
from typing import NamedTuple
from unittest.mock import MagicMock
import pytest
from micropy import MicroPy, logger
from micropy.project import Project
from micropy.stubs import StubManager, StubRepository
from pytest_mock import MockFixture
from typer.testing import CliRunner
@pytest.fixture()
def runner():
runner = CliRunner()
with runner.isolated_filesystem():
yield runner
class MicroPyScenario(NamedTuple):
project_exists: bool = True
has_stubs: bool = True
impl_add: bool = True
@pytest.fixture()
def mock_repo(mocker: MockFixture) -> StubRepository:
mock = mocker.MagicMock(StubRepository, autospec=True)
mocker.patch("micropy.stubs.StubRepository", return_value=mock)
return mock
@pytest.fixture()
def micropy_obj(
request: pytest.FixtureRequest, tmp_path, mocker: MockFixture, mock_repo
) -> MicroPy:
mpy = mocker.MagicMock(MicroPy, autospec=True)
mpy.log = logger.Log.add_logger("MicroPy")
mpy.project = mocker.MagicMock(Project, autospec=True).return_value
mpy.stubs = mocker.MagicMock(StubManager, autospec=True).return_value
mpy.repo = mock_repo
if param := getattr(request, "param", MicroPyScenario()):
if param.impl_add:
mpy.stubs.add = lambda i: i
mpy.project.exists = param.project_exists
stubs = ["a-stub"] if param.has_stubs else []
mpy.stubs.__iter__.return_value = iter(stubs)
mocker.patch("micropy.main.MicroPy", return_value=mpy)
return mpy
@pytest.fixture(params=[True, False])
def context_mock(request: pytest.FixtureRequest, mocker: MockFixture, micropy_obj) -> MagicMock:
ctx = mocker.MagicMock()
ctx.ensure_object.return_value = micropy_obj
ctx.find_object.return_value = micropy_obj
ctx.resilient_parsing = request.param
ctx.obj = micropy_obj
return ctx
================================================
FILE: tests/app/test_main.py
================================================
from pathlib import Path
from typing import List, NamedTuple, Optional
import pytest
import typer
from micropy import utils
from micropy.app import main as main_app
from micropy.app.main import TemplateEnum, app
from micropy.project import Project
from pytest_mock import MockFixture
from tests.app.conftest import MicroPyScenario, context_mock
# current working directory symbol
CWD = object()
@pytest.fixture
def project_path(tmp_path):
return tmp_path / "test_project"
class TemplateParamCase(NamedTuple):
value: Optional[List[TemplateEnum]]
expected: List[TemplateEnum]
prompt: Optional[List[TemplateEnum]]
confirm: Optional[bool] = None
@pytest.fixture(
params=[
TemplateParamCase(value=None, expected=[], prompt=[], confirm=True),
TemplateParamCase(value=None, expected=[TemplateEnum.vscode], prompt=[TemplateEnum.vscode]),
TemplateParamCase(value=[TemplateEnum.vscode], expected=[TemplateEnum.vscode], prompt=None),
]
)
def template_param(request: pytest.FixtureRequest, mocker: MockFixture):
param: TemplateParamCase = request.param
mock_prompt = mocker.MagicMock()
mock_confirm = mocker.MagicMock(return_value=param.prompt)
mock_prompt.ask.return_value = param.prompt
mock_confirm.ask.return_value = param.confirm
mocker.patch("questionary.checkbox", return_value=mock_prompt)
mocker.patch("questionary.confirm", return_value=mock_confirm)
ctx = request.getfixturevalue("context_mock")
yield param
if ctx.resilient_parsing:
return
if param.prompt is None:
mock_prompt.ask.assert_not_called()
else:
mock_prompt.ask.assert_called_once()
if param.confirm is None:
mock_confirm.ask.assert_not_called()
else:
mock_confirm.ask.assert_called_once()
def test_template_callback(mocker: MockFixture, micropy_obj, context_mock, template_param):
if context_mock.resilient_parsing:
assert main_app.template_callback(context_mock, template_param.value) is None
return
assert main_app.template_callback(context_mock, template_param.value) == template_param.expected
@pytest.mark.parametrize(
"input,expected",
[
(None, CWD),
("NewProject", "NewProject"),
("/tmp/NewProject", "/tmp/NewProject"),
],
)
def test_path_callback(mocker: MockFixture, micropy_obj, context_mock, input, expected):
expected = Path.cwd() if expected == CWD else expected
if context_mock.resilient_parsing:
assert main_app.path_callback(context_mock, input) is None
return
assert main_app.path_callback(context_mock, input) == expected
@pytest.mark.parametrize(
"input,expected",
[
(None, CWD),
("NewProject", "NewProject"),
("/tmp/NewProject", "/tmp/NewProject"),
],
)
def test_name_callback(mocker: MockFixture, micropy_obj, context_mock, input, expected):
expected = (Path.cwd()).name if expected == CWD else expected
mock_ask = mocker.MagicMock()
mock_ask.ask.return_value.strip.return_value = expected
mocker.patch("questionary.text", return_value=mock_ask)
if context_mock.resilient_parsing:
assert main_app.name_callback(context_mock, input) is None
return
assert main_app.name_callback(context_mock, input) == expected
@pytest.mark.parametrize(
"micropy_obj,expect",
[
(
MicroPyScenario(),
True,
),
(MicroPyScenario(project_exists=False), None),
],
indirect=["micropy_obj"],
)
def test_main_callback(mocker: MockFixture, context_mock, micropy_obj, expect):
if context_mock.resilient_parsing:
assert main_app.main_callback(context_mock) is None
return
util_mock = mocker.patch.object(utils, "is_update_available", return_value=False)
main_app.main_callback(context_mock)
if expect:
util_mock.assert_called_once()
else:
util_mock.assert_not_called()
@pytest.mark.parametrize(
"input,prompt,micropy_obj,expected",
[
(None, [], MicroPyScenario(has_stubs=False), typer.Abort),
(None, [], MicroPyScenario(), typer.BadParameter),
(["a-stub"], None, MicroPyScenario(), ["a-stub"]),
(["a-stub"], None, MicroPyScenario(has_stubs=False), ["a-stub"]),
(None, ["some-stub"], MicroPyScenario(), ["some-stub"]),
],
indirect=["micropy_obj"],
)
def test_stubs_callback(mocker: MockFixture, context_mock, input, prompt, micropy_obj, expected):
mock_ask = mocker.MagicMock()
mock_ask.ask.return_value = prompt
mocker.patch("questionary.checkbox", return_value=mock_ask)
if context_mock.resilient_parsing:
assert main_app.stubs_callback(context_mock, input) is None
return
if not isinstance(expected, list):
with pytest.raises(expected):
main_app.stubs_callback(context_mock, input)
else:
assert main_app.stubs_callback(context_mock, input) == expected
if prompt is None:
mock_ask.ask.assert_not_called()
else:
mock_ask.ask.assert_called_once()
# Test main_init function
def test_main_init(mocker, micropy_obj, project_path, runner):
ask_mock = mocker.MagicMock()
ask_mock.ask.return_value = "test_project"
mocker.patch("questionary.text", return_value=ask_mock)
mocker.patch("questionary.checkbox")
result = runner.invoke(app, ["init", str(project_path)], obj=micropy_obj)
print(result.stdout)
assert result.exit_code == 0
assert f"Initiating {project_path.name}" in result.stdout
assert "Project Created" in result.stdout
ctx = context_mock
@pytest.mark.parametrize(
"micropy_obj,expect",
[
(MicroPyScenario(), Project),
(MicroPyScenario(project_exists=False), typer.Abort),
],
indirect=["micropy_obj"],
)
def test_ensure_project(mocker: MockFixture, micropy_obj, expect, tmp_path):
ctx = mocker.MagicMock(typer.Context, autospec=True).return_value
ctx.ensure_object.return_value = micropy_obj
if expect == typer.Abort:
with pytest.raises(typer.Abort):
main_app.ensure_project(ctx)
else:
project = main_app.ensure_project(ctx)
assert project == micropy_obj.project
@pytest.mark.parametrize(
"input,expected",
[(None, None), (Path("somepath"), typer.Exit)],
)
def test_install_local_callback(micropy_obj, context_mock, input, expected):
if context_mock.resilient_parsing:
assert main_app.install_local_callback(context_mock, input) is None
return
if expected == typer.Exit:
with pytest.raises(typer.Exit):
main_app.install_local_callback(context_mock, input)
micropy_obj.project.add_package.assert_called_once()
return
assert main_app.install_local_callback(context_mock, input) == expected
micropy_obj.project.add_package.assert_not_called()
@pytest.mark.parametrize(
"packages, path_in_params",
[(None, True), (None, False), (["pkg1", "pkg2"], False), (["pkg1", "pkg2"], True)],
)
def test_install_project_callback(mocker, context_mock, micropy_obj, packages, path_in_params):
if context_mock.resilient_parsing:
assert main_app.install_local_callback(context_mock, packages) is None
return
context_mock.params = dict()
if path_in_params:
context_mock.params["path"] = "some_path"
context_mock.params["dev"] = False
if path_in_params:
result = main_app.install_project_callback(context_mock, packages)
assert result is None
return
if packages is None and not path_in_params:
with pytest.raises(typer.Exit):
main_app.install_project_callback(context_mock, packages)
micropy_obj.project.add_from_file.assert_called_once()
return
result = main_app.install_project_callback(context_mock, packages)
assert result == packages
@pytest.mark.parametrize("dev_flag", [True, False])
def test_main_install(mocker, micropy_obj, runner, dev_flag):
packages = ["package1", "package2"]
result = runner.invoke(
app,
["install", *packages, "--dev"] if dev_flag else ["install", *packages],
obj=micropy_obj,
)
print(result.stdout)
assert result.exit_code == 0
assert "Installing Packages" in result.stdout
for pkg in packages:
micropy_obj.project.add_package.assert_any_call(pkg, dev=dev_flag)
def test_main_version(runner):
result = runner.invoke(app, ["version"])
assert result.exit_code == 0
assert "Micropy Version:" in result.stdout
================================================
FILE: tests/app/test_stubs.py
================================================
from pathlib import Path
import pytest
from micropy.app import stubs as stubs_app
from micropy.app.stubs import stubs_app as app
from micropy.exceptions import StubError, StubNotFound
from micropy.pyd import PyDevice
from micropy.stubs import StubRepositoryPackage
from micropy.stubs.source import StubSource
from pytest_mock import MockerFixture
from stubber.codemod.modify_list import ListChangeSet
from tests.app.conftest import MicroPyScenario
@pytest.mark.parametrize(
"input,expected",
[(None, None), (["mod-1", "mod-2"], ListChangeSet.from_strings(add=["mod-1", "mod-2"]))],
)
def test_create_changeset(input, expected):
result = stubs_app.create_changeset(input)
if expected is None:
assert result is None
else:
assert result.add[0].children == expected.add[0].children
assert result.add[1].children == expected.add[1].children
@pytest.fixture()
def pydevice_mock(mocker: MockerFixture):
def mock_copy_from(dev_path, tmp_dir, **kwargs):
stub_dir = Path(tmp_dir) / "stubs"
stub_dir.mkdir()
pyb_mock = mocker.MagicMock(PyDevice, autospec=True)
pyb_mock.return_value.copy_from = mock_copy_from
return pyb_mock
@pytest.fixture()
def pyb_mock(request: pytest.FixtureRequest, mocker: MockerFixture):
device_mock = request.getfixturevalue("pydevice_mock")
pyb = device_mock.return_value
mocker.patch("micropy.app.stubs.PyDevice", return_value=pyb)
return pyb
@pytest.fixture
def stubs_locator_mock(mocker: MockerFixture):
stubs_locator = mocker.MagicMock(StubSource, autospec=True)
mocker.patch("micropy.app.stubs.stubs_source.StubSource", return_value=stubs_locator)
return stubs_locator
@pytest.fixture()
def stub_search_data(mocker: MockerFixture):
stub1 = mocker.MagicMock(StubRepositoryPackage, autospec=True)
stub2 = mocker.MagicMock(StubRepositoryPackage, autospec=True)
stub3 = mocker.MagicMock(StubRepositoryPackage, autospec=True)
stub1.name = "test1"
stub1.version = "1.0.0"
stub2.name = "test2"
stub2.version = "1.1.0"
stub3.name = "test3"
stub3.version = "0.9.0"
return [
stub1,
stub2,
stub3,
]
def test_stubs_create(mocker: MockerFixture, pyb_mock, micropy_obj, runner):
result = runner.invoke(app, ["create", "/dev/port"], obj=micropy_obj)
print(result.stdout)
pyb_mock.run_script.assert_called_once()
pyb_mock.disconnect.assert_called_once()
def test_stubs_create__connect_error(pydevice_mock, micropy_obj, runner):
pydevice_mock.side_effect = SystemExit()
result = runner.invoke(app, ["create", "/dev/port"], obj=micropy_obj)
assert result.return_value is None
def test_stubs_create__script_error(pyb_mock, micropy_obj, runner):
pyb_mock.run_script.side_effect = Exception("Script error")
with pytest.raises(Exception, match="Script error"):
result = runner.invoke(
app, ["create", "/dev/port"], obj=micropy_obj, catch_exceptions=False
)
assert result.return_value is None
@pytest.mark.parametrize("force", [True, False])
@pytest.mark.parametrize("micropy_obj", [MicroPyScenario(impl_add=False)], indirect=True)
def test_stubs_add_success(micropy_obj, runner, stubs_locator_mock, mock_repo, force):
stubs_locator_mock.ready.return_value.__enter__.return_value = "test-stub"
args = ["add", "test-stub"]
if force:
args.append("--force")
result = runner.invoke(app, args, obj=micropy_obj, catch_exceptions=False)
print(result.stdout)
assert result.exit_code == 0
assert "added!" in result.stdout
stubs_locator_mock.ready.assert_called_once_with("test-stub")
micropy_obj.stubs.add.assert_called_once_with("test-stub", force=force)
@pytest.mark.parametrize("micropy_obj", [MicroPyScenario(impl_add=False)], indirect=True)
def test_stubs_add__not_found(micropy_obj, runner, stubs_locator_mock, mock_repo):
micropy_obj.stubs.add.side_effect = StubNotFound()
result = runner.invoke(app, ["add", "nonexistent-stub"], obj=micropy_obj)
assert result.exit_code == 1
assert "could not be found" in result.stdout
stubs_locator_mock.ready.assert_called_once_with("nonexistent-stub")
@pytest.mark.parametrize("micropy_obj", [MicroPyScenario(impl_add=False)], indirect=True)
def test_stubs_add__invalid(micropy_obj, runner, stubs_locator_mock, mock_repo):
micropy_obj.stubs.add.side_effect = StubError()
result = runner.invoke(app, ["add", "invalid-stub"], obj=micropy_obj)
assert result.exit_code == 1
assert "is not a valid stub!" in result.stdout
stubs_locator_mock.ready.assert_called_once_with("invalid-stub")
@pytest.mark.parametrize(
"micropy_obj", [MicroPyScenario(), MicroPyScenario(project_exists=False)], indirect=True
)
def test_stubs_list(micropy_obj, runner):
result = runner.invoke(app, ["list"], obj=micropy_obj)
assert result.exit_code == 0
assert "Installed Stubs" in result.stdout
print(result.stdout)
if not micropy_obj.project.exists:
micropy_obj.stubs.iter_by_firmware.assert_called_once()
else:
assert micropy_obj.stubs.iter_by_firmware.call_count == 2
@pytest.mark.parametrize("outdated", [True, False])
def test_stubs_search(stub_search_data, micropy_obj, runner, outdated):
micropy_obj.stubs._loaded = {"test1", "test3"}
micropy_obj.stubs._firmware = {"test1", "test3"}
micropy_obj.repo.search.return_value = stub_search_data
args = ["search", "test"]
if outdated:
args.append("--show-outdated")
result = runner.invoke(app, args, obj=micropy_obj, catch_exceptions=False)
assert result.exit_code == 0
assert "Results for test" in result.stdout
assert "test1" in result.stdout
assert "test2" in result.stdout
assert "test3" in result.stdout
assert "Installed" in result.stdout
def test_stubs_search_no_results(mocker: MockerFixture, micropy_obj, runner):
micropy_obj.repo.search.return_value = []
result = runner.invoke(app, ["search", "nonexistent"], obj=micropy_obj)
assert result.exit_code == 0
assert "No results found for: nonexistent" in result.stdout
================================================
FILE: tests/conftest.py
================================================
""" Common Pytest Fixtures"""
import importlib
import shutil
from pathlib import Path
from pprint import PrettyPrinter
import micropy
import pytest
import questionary
from boltons import iterutils
# Mock values for Template VSCode ext checks
mock_vscode_exts = [
"mock.ext@0.0.0",
# meets req
"ms-python.python@2019.9.34474",
]
def pytest_collection_modifyitems(items):
items.reverse()
@pytest.fixture(autouse=True)
def cleanup_data(mocker):
mocker.resetall()
try:
micropy.utils.ensure_valid_url.clear_cache()
except Exception:
importlib.reload(micropy)
@pytest.fixture
def mock_prompt(monkeypatch):
def mock_prompt(*args, **kwargs):
class prompt_mock:
def __init__(self, *args, **kwargs):
return None
def ask(self):
return ["stub"]
return prompt_mock(*args, **kwargs)
monkeypatch.setattr(questionary, "checkbox", mock_prompt)
@pytest.fixture
def mock_micropy_path(mocker, tmp_path):
path = tmp_path / ".micropy"
stub_path = path / "stubs"
log_path = path / "micropy.log"
mocker.patch("micropy.data.FILES", path)
mocker.patch("micropy.data.STUB_DIR", stub_path)
mocker.patch("micropy.data.LOG_FILE", log_path)
return path
@pytest.fixture
def mock_micropy(mock_micropy_path):
config = micropy.main.MicroPyOptions(root_dir=mock_micropy_path)
mp = micropy.main.MicroPy(options=config)
return mp
@pytest.fixture
def mock_cwd(request, tmp_path, mocker):
print(request)
import pathlib
mocker.patch("pathlib.Path.cwd")
pathlib.Path.cwd.return_value = tmp_path
yield (tmp_path)
@pytest.fixture(scope="session")
def test_urls():
def test_headers(type):
return {"content-type": type}
return {
"valid": "http://www.google.com",
"valid_https": "https://www.google.com",
"invalid": "/foobar/bar/foo",
"invalid_file": "file:///foobar/bar/foo",
"bad_resp": "http://www.google.com/XYZ/ABC/BADRESP",
"download": "https://www.somewebsite.com/archive_test_stub.tar.gz",
"headers": {
"can_download": test_headers("application/gzip"),
"not_download": test_headers("text/plain"),
},
}
@pytest.fixture
def get_stub_paths(shared_datadir, tmp_path):
def _get_stub_paths(count=1, valid=True, firm=False, dest=tmp_path):
_stubs = ["fware"] if firm else ["esp8266", "esp32"]
stubs = iter(_stubs)
_count = 0
while _count < count:
s = next(stubs)
path = (
(shared_datadir / f"{s}_test_stub")
if valid
else (shared_datadir / f"{s}_invalid_stub")
)
if path.exists():
dest = dest / path.name
if not dest.exists():
shutil.copytree(path, (dest / path.name))
yield dest
_count += 1
return _get_stub_paths
@pytest.fixture
def mock_mp_stubs(mock_micropy, mocker, shared_datadir):
mock_micropy.stubs.add(shared_datadir / "fware_test_stub")
mock_micropy.stubs.add(shared_datadir / "esp8266_test_stub")
mock_micropy.stubs.add(shared_datadir / "esp32_test_stub")
return mock_micropy
@pytest.fixture
def get_stubs(get_stub_paths, mocker, tmp_path):
def _get_stubs(path=tmp_path, **kwargs):
def stubbify(m, path, firm=None):
m.path = path
m.frozen = path / "frozen"
m.stubs = path / "stubs"
m.name = m.path.name
m.stub_version = "0.0.0"
m.firmware = firm
return m
paths = get_stub_paths(dest=path, **kwargs)
firm = next(get_stub_paths(firm=True, dest=path))
firm_mock = stubbify(mocker.MagicMock(), firm)
for p in paths:
yield stubbify(mocker.MagicMock(), p, firm=firm_mock)
return _get_stubs
@pytest.fixture
def micropy_stubs(mocker, get_stubs):
def _micropy_stubs(count=3):
def _mock_resolve_subresource(stubs, data_path):
return get_stubs(path=data_path)
mock_mp = mocker.patch.object(micropy, "MicroPy").return_value
stubs = list(get_stubs())
mock_mp.stubs.__iter__.return_value = stubs
mock_mp.stubs.resolve_subresource = _mock_resolve_subresource
mock_mp.stubs.add.return_value = stubs[0]
return mock_mp
return _micropy_stubs
@pytest.yield_fixture
def test_archive(shared_datadir):
archive = shared_datadir / "archive_test_stub.tar.gz"
file_obj = archive.open("rb")
file_bytes = file_obj.read()
yield file_bytes
file_obj.close()
micropy_source = micropy.stubs.repository_info.RepositoryInfo(
name="BradenM/micropy-stubs",
display_name="micropy-stubs",
source="https://my-mocked-source.com/bradenm",
)
micropython_source = micropy.stubs.repository_info.RepositoryInfo(
name="Josverl/micropython-stubs",
display_name="micropython-stubs",
# source="https://raw.githubusercontent.com/Josverl/micropython-stubs/main/publish/package_data.jsondb",
source="https://my-mocked-source.com/josverl",
)
@pytest.fixture
def mock_manifests(mocker, requests_mock):
micropy_manifest = {
"name": "Micropy Stubs",
"location": "https://codeload.github.com/BradenM/micropy-stubs",
"source": "https://raw.githubusercontent.com/bradenm/micropy-stubs/source.json",
"path": "legacy.tar.gz/pkg/",
"packages": [
{
"name": "micropython",
"type": "firmware",
"sha256sum": "7ff2cce0237268cd52164b77b6c2df6be6249a67ee285edc122960af869b8ed2",
},
{"name": "esp8266-micropython-1.15.0", "type": "device", "sha256sum": "abc123"},
],
}
micropython_manifest = {
"version": 2,
"keys": [
"description",
"hash",
"mpy_version",
"name",
"path",
"pkg_version",
"publish",
"stub_hash",
"stub_sources",
],
"data": {
"160521968180811532": {
"name": "micropython-esp32-stubs",
"mpy_version": "1.18",
"publish": True,
"pkg_version": "1.18.post1",
"path": "publish/micropython-v1_18-esp32-stubs",
"stub_sources": [
["Firmware stubs", "stubs/micropython-v1_18-esp32"],
["Frozen stubs", "stubs/micropython-v1_18-frozen/esp32/GENERIC"],
["Core Stubs", "stubs/cpython_core-pycopy"],
],
"description": "MicroPython stubs",
"hash": "712ebd85140b078ce6d9d3cbb9d7ffc18cf10aef",
"stub_hash": "",
}
},
}
requests_mock.get(
micropy_source.source,
json=micropy_manifest,
)
requests_mock.get(
micropython_source.source,
json=micropython_manifest,
)
@pytest.fixture
def mock_checks(mocker):
"""Mock VSCode Template Checks"""
m_run = mocker.patch.object(micropy.project.checks.subproc, "run").return_value
type(m_run).stdout = mocker.PropertyMock(return_value="\n".join(mock_vscode_exts))
return m_run
@pytest.fixture
def mock_pkg(mocker, tmp_path):
"""return mock package"""
from micropy import packages
tmp_pkg = tmp_path / "tmp_pkg"
tmp_pkg.mkdir()
(tmp_pkg / "module.py").touch()
(tmp_pkg / "file.py").touch()
mocker.patch.object(packages.source_package.utils, "ensure_valid_url")
mock_tarbytes = mocker.patch.object(packages.source_package.utils, "extract_tarbytes")
mock_meta = mocker.patch.object(packages.source_package.utils, "get_package_meta")
mocker.patch.object(packages.source_package.utils, "get_url_filename")
mocker.patch.object(packages.source_package.utils, "stream_download")
mock_tarbytes.return_value = tmp_pkg
mock_meta.return_value = {"url": "http://realurl.com"}
return tmp_pkg
# Pytest Incremental Marker
def pytest_runtest_makereport(item, call):
if "incremental" in item.keywords:
if call.excinfo is not None:
parent = item.parent
parent._previousfailed = item
def pytest_runtest_setup(item):
if "incremental" in item.keywords:
previousfailed = getattr(item.parent, "_previousfailed", None)
if previousfailed is not None:
pytest.xfail(f"previous test failed ({previousfailed.name})")
class AssertUtils:
pp = PrettyPrinter(indent=4, width=20).pprint
def dict_match_mocks(self, d1):
from unittest.mock import Mock
# Mocks
remapped = iterutils.remap(
d1, lambda p, k, v: (k, "MOCKED_VALUE") if isinstance(v, Mock) else True
)
return remapped
def dict_equal(self, d1, d2):
match_d1 = sorted(self.dict_match_mocks(d1).items())
match_d2 = sorted(self.dict_match_mocks(d1).items())
print("== IS DICT EQUAL ==")
self.pp(match_d1)
print("\n----------\n")
self.pp(match_d2)
print("==============")
return match_d1 == match_d2
def list_equal(self, l1, l2):
list_one = sorted(l1)
list_two = sorted(l2)
return list_one == list_two
def load_json(self, path):
import json
return json.loads(path.read_text())
def json_equal_dict(self, path, d2):
data = self.load_json(path)
return self.dict_equal(data, d2)
def str_path(self, path, absolute=False):
"""x-platform path strings helper"""
path = Path(path)
if absolute:
path = path.absolute()
return str(path)
def get_rand_str(self, length=10):
import random
import string
letters = string.ascii_lowercase
return "".join(random.choice(letters) for i in range(length))
@pytest.fixture
def utils():
return AssertUtils()
================================================
FILE: tests/data/esp32_test_stub/frozen/ntptime.py
================================================
try:
import usocket as socket
except:
import socket
try:
import ustruct as struct
except:
import struct
# (date(2000, 1, 1) - date(1900, 1, 1)).days * 24*60*60
NTP_DELTA = 3155673600
host = "pool.ntp.org"
def time():
NTP_QUERY = bytearray(48)
NTP_QUERY[0] = 0x1B
addr = socket.getaddrinfo(host, 123)[0][-1]
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.settimeout(1)
res = s.sendto(NTP_QUERY, addr)
msg = s.recv(48)
s.close()
val = struct.unpack("!I", msg[40:44])[0]
return val - NTP_DELTA
# There's currently no timezone support in MicroPython, so
# utime.localtime() will return UTC time (as if it was .gmtime())
def settime():
t = time()
import machine
import utime
tm = utime.localtime(t)
tm = tm[0:3] + (0,) + tm[3:6] + (0,)
machine.RTC().datetime(tm)
print(utime.localtime())
================================================
FILE: tests/data/esp32_test_stub/frozen/ntptime.pyi
================================================
# make_stub_files: Fri 21 Jun 2019 at 00:44:00
from typing import Any, Dict, Optional, Sequence, Tuple, Union
Node = Any
def time() -> Any: ...
# 0: return val-NTP_DELTA
# ? 0: return val-NTP_DELTA
def settime() -> None: ...
================================================
FILE: tests/data/esp32_test_stub/info.json
================================================
{
"firmware": {
"machine": "ESP32 module with ESP32",
"firmware": "esp32 1.11.0",
"nodename": "esp8266",
"version": "1.11.0",
"release": "1.11.0",
"sysname": "esp32",
"name": "micropython"
},
"stubber": { "version": "1.2.0" },
"modules": [
{
"file": "/stubs/esp32_1_11_0/umqtt/robust.py",
"module": "umqtt.robust"
},
{
"file": "/stubs/esp32_1_11_0/umqtt/simple.py",
"module": "umqtt.simple"
}
]
}
================================================
FILE: tests/data/esp32_test_stub/stubs/machine.py
================================================
"""
Module: 'machine' on esp8266 v1.9.4
"""
# MCU: (sysname='esp8266', nodename='esp8266', release='2.2.0-dev(9422289)', version='v1.9.4-8-ga9a3caad0 on 2018-05-11', machine='ESP module with ESP8266')
# Stubber: 1.1.2
class ADC:
""""""
def read():
pass
DEEPSLEEP = 4
DEEPSLEEP_RESET = 5
HARD_RESET = 6
class I2C:
""""""
================================================
FILE: tests/data/esp32_test_stub/stubs/modules.json
================================================
[
{
"nodename": "esp8266",
"release": "2.2.0-dev(9422289)",
"version": "v1.9.4-8-ga9a3caad0 on 2018-05-11",
"machine": "ESP module with ESP8266",
"sysname": "esp8266"
},
{ "stubber": "1.1.2" },
{
"file": "/stubs/esp8266_v1_9_4/uasyncio/core.py",
"module": "uasyncio.core"
},
{ "file": "/stubs/esp8266_v1_9_4/umqtt/robust.py", "module": "umqtt.robust" },
{ "file": "/stubs/esp8266_v1_9_4/umqtt/simple.py", "module": "umqtt.simple" },
{
"file": "/stubs/esp8266_v1_9_4/urllib/urequest.py",
"module": "urllib.urequest"
},
{ "file": "/stubs/esp8266_v1_9_4/upip.py", "module": "upip" },
{ "file": "/stubs/esp8266_v1_9_4/_boot.py", "module": "_boot" },
{ "file": "/stubs/esp8266_v1_9_4/_onewire.py", "module": "_onewire" },
{ "file": "/stubs/esp8266_v1_9_4/apa102.py", "module": "apa102" },
{ "file": "/stubs/esp8266_v1_9_4/array.py", "module": "array" },
{ "file": "/stubs/esp8266_v1_9_4/btree.py", "module": "btree" },
{ "file": "/stubs/esp8266_v1_9_4/dht.py", "module": "dht" },
{ "file": "/stubs/esp8266_v1_9_4/ds18x20.py", "module": "ds18x20" },
{ "file": "/stubs/esp8266_v1_9_4/errno.py", "module": "errno" },
{ "file": "/stubs/esp8266_v1_9_4/esp.py", "module": "esp" },
{
"file": "/stubs/esp8266_v1_9_4/example_pub_button.py",
"module": "example_pub_button"
},
{
"file": "/stubs/esp8266_v1_9_4/example_sub_led.py",
"module": "example_sub_led"
},
{ "file": "/stubs/esp8266_v1_9_4/flashbdev.py", "module": "flashbdev" },
{ "file": "/stubs/esp8266_v1_9_4/framebuf.py", "module": "framebuf" },
{ "file": "/stubs/esp8266_v1_9_4/gc.py", "module": "gc" },
{ "file": "/stubs/esp8266_v1_9_4/inisetup.py", "module": "inisetup" },
{ "file": "/stubs/esp8266_v1_9_4/json.py", "module": "json" },
{ "file": "/stubs/esp8266_v1_9_4/math.py", "module": "math" },
{ "file": "/stubs/esp8266_v1_9_4/micropython.py", "module": "micropython" },
{ "file": "/stubs/esp8266_v1_9_4/neopixel.py", "module": "neopixel" },
{ "file": "/stubs/esp8266_v1_9_4/network.py", "module": "network" },
{ "file": "/stubs/esp8266_v1_9_4/ntptime.py", "module": "ntptime" },
{ "file": "/stubs/esp8266_v1_9_4/onewire.py", "module": "onewire" },
{ "file": "/stubs/esp8266_v1_9_4/select.py", "module": "select" },
{ "file": "/stubs/esp8266_v1_9_4/sys.py", "module": "sys" },
{ "file": "/stubs/esp8266_v1_9_4/time.py", "module": "time" },
{ "file": "/stubs/esp8266_v1_9_4/ubinascii.py", "module": "ubinascii" },
{ "file": "/stubs/esp8266_v1_9_4/uhashlib.py", "module": "uhashlib" },
{ "file": "/stubs/esp8266_v1_9_4/uheapq.py", "module": "uheapq" },
{ "file": "/stubs/esp8266_v1_9_4/ujson.py", "module": "ujson" },
{ "file": "/stubs/esp8266_v1_9_4/urandom.py", "module": "urandom" },
{ "file": "/stubs/esp8266_v1_9_4/ure.py", "module": "ure" },
{ "file": "/stubs/esp8266_v1_9_4/uselect.py", "module": "uselect" },
{ "file": "/stubs/esp8266_v1_9_4/ussl.py", "module": "ussl" },
{ "file": "/stubs/esp8266_v1_9_4/ustruct.py", "module": "ustruct" },
{ "file": "/stubs/esp8266_v1_9_4/utime.py", "module": "utime" },
{ "file": "/stubs/esp8266_v1_9_4/utimeq.py", "module": "utimeq" },
{ "file": "/stubs/esp8266_v1_9_4/uzlib.py", "module": "uzlib" },
{
"file": "/stubs/esp8266_v1_9_4/websocket_helper.py",
"module": "websocket_helper"
}
]
================================================
FILE: tests/data/esp8266_invalid_stub/info.json
================================================
[
{
"nodename": "esp32",
"release": "1.10.0"
},
{
"pathtofile": "/foobar/foo/bar.py",
"something": "bar"
}
]
================================================
FILE: tests/data/esp8266_test_stub/frozen/ntptime.py
================================================
try:
import usocket as socket
except:
import socket
try:
import ustruct as struct
except:
import struct
# (date(2000, 1, 1) - date(1900, 1, 1)).days * 24*60*60
NTP_DELTA = 3155673600
host = "pool.ntp.org"
def time():
NTP_QUERY = bytearray(48)
NTP_QUERY[0] = 0x1B
addr = socket.getaddrinfo(host, 123)[0][-1]
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.settimeout(1)
res = s.sendto(NTP_QUERY, addr)
msg = s.recv(48)
s.close()
val = struct.unpack("!I", msg[40:44])[0]
return val - NTP_DELTA
# There's currently no timezone support in MicroPython, so
# utime.localtime() will return UTC time (as if it was .gmtime())
def settime():
t = time()
import machine
import utime
tm = utime.localtime(t)
tm = tm[0:3] + (0,) + tm[3:6] + (0,)
machine.RTC().datetime(tm)
print(utime.localtime())
================================================
FILE: tests/data/esp8266_test_stub/frozen/ntptime.pyi
================================================
# make_stub_files: Fri 21 Jun 2019 at 00:44:00
from typing import Any, Dict, Optional, Sequence, Tuple, Union
Node = Any
def time() -> Any: ...
# 0: return val-NTP_DELTA
# ? 0: return val-NTP_DELTA
def settime() -> None: ...
================================================
FILE: tests/data/esp8266_test_stub/info.json
================================================
{
"firmware": {
"machine": "ESP module with ESP8266",
"firmware": "esp8266 v1.9.4",
"nodename": "esp8266",
"version": "1.9.4",
"release": "2.2.0-dev(9422289)",
"sysname": "esp8266",
"name": "micropython"
},
"stubber": { "version": "1.2.0" },
"modules": [
{
"file": "/stubs/esp8266_1_9_4/umqtt/robust.py",
"module": "umqtt.robust"
},
{
"file": "/stubs/esp8266_1_9_4/umqtt/simple.py",
"module": "umqtt.simple"
}
]
}
================================================
FILE: tests/data/esp8266_test_stub/stubs/machine.py
================================================
"""
Module: 'machine' on esp8266 v1.9.4
"""
# MCU: (sysname='esp8266', nodename='esp8266', release='2.2.0-dev(9422289)', version='v1.9.4-8-ga9a3caad0 on 2018-05-11', machine='ESP module with ESP8266')
# Stubber: 1.1.2
class ADC:
""""""
def read():
pass
DEEPSLEEP = 4
DEEPSLEEP_RESET = 5
HARD_RESET = 6
class I2C:
""""""
================================================
FILE: tests/data/esp8266_test_stub/stubs/modules.json
================================================
[
{
"nodename": "esp8266",
"release": "2.2.0-dev(9422289)",
"version": "v1.9.4-8-ga9a3caad0 on 2018-05-11",
"machine": "ESP module with ESP8266",
"sysname": "esp8266"
},
{ "stubber": "1.1.2" },
{
"file": "/stubs/esp8266_v1_9_4/uasyncio/core.py",
"module": "uasyncio.core"
},
{ "file": "/stubs/esp8266_v1_9_4/umqtt/robust.py", "module": "umqtt.robust" },
{ "file": "/stubs/esp8266_v1_9_4/umqtt/simple.py", "module": "umqtt.simple" },
{
"file": "/stubs/esp8266_v1_9_4/urllib/urequest.py",
"module": "urllib.urequest"
},
{ "file": "/stubs/esp8266_v1_9_4/upip.py", "module": "upip" },
{ "file": "/stubs/esp8266_v1_9_4/_boot.py", "module": "_boot" },
{ "file": "/stubs/esp8266_v1_9_4/_onewire.py", "module": "_onewire" },
{ "file": "/stubs/esp8266_v1_9_4/apa102.py", "module": "apa102" },
{ "file": "/stubs/esp8266_v1_9_4/array.py", "module": "array" },
{ "file": "/stubs/esp8266_v1_9_4/btree.py", "module": "btree" },
{ "file": "/stubs/esp8266_v1_9_4/dht.py", "module": "dht" },
{ "file": "/stubs/esp8266_v1_9_4/ds18x20.py", "module": "ds18x20" },
{ "file": "/stubs/esp8266_v1_9_4/errno.py", "module": "errno" },
{ "file": "/stubs/esp8266_v1_9_4/esp.py", "module": "esp" },
{
"file": "/stubs/esp8266_v1_9_4/example_pub_button.py",
"module": "example_pub_button"
},
{
"file": "/stubs/esp8266_v1_9_4/example_sub_led.py",
"module": "example_sub_led"
},
{ "file": "/stubs/esp8266_v1_9_4/flashbdev.py", "module": "flashbdev" },
{ "file": "/stubs/esp8266_v1_9_4/framebuf.py", "module": "framebuf" },
{ "file": "/stubs/esp8266_v1_9_4/gc.py", "module": "gc" },
{ "file": "/stubs/esp8266_v1_9_4/inisetup.py", "module": "inisetup" },
{ "file": "/stubs/esp8266_v1_9_4/json.py", "module": "json" },
{ "file": "/stubs/esp8266_v1_9_4/math.py", "module": "math" },
{ "file": "/stubs/esp8266_v1_9_4/micropython.py", "module": "micropython" },
{ "file": "/stubs/esp8266_v1_9_4/neopixel.py", "module": "neopixel" },
{ "file": "/stubs/esp8266_v1_9_4/network.py", "module": "network" },
{ "file": "/stubs/esp8266_v1_9_4/ntptime.py", "module": "ntptime" },
{ "file": "/stubs/esp8266_v1_9_4/onewire.py", "module": "onewire" },
{ "file": "/stubs/esp8266_v1_9_4/select.py", "module": "select" },
{ "file": "/stubs/esp8266_v1_9_4/sys.py", "module": "sys" },
{ "file": "/stubs/esp8266_v1_9_4/time.py", "module": "time" },
{ "file": "/stubs/esp8266_v1_9_4/ubinascii.py", "module": "ubinascii" },
{ "file": "/stubs/esp8266_v1_9_4/uhashlib.py", "module": "uhashlib" },
{ "file": "/stubs/esp8266_v1_9_4/uheapq.py", "module": "uheapq" },
{ "file": "/stubs/esp8266_v1_9_4/ujson.py", "module": "ujson" },
{ "file": "/stubs/esp8266_v1_9_4/urandom.py", "module": "urandom" },
{ "file": "/stubs/esp8266_v1_9_4/ure.py", "module": "ure" },
{ "file": "/stubs/esp8266_v1_9_4/uselect.py", "module": "uselect" },
{ "file": "/stubs/esp8266_v1_9_4/ussl.py", "module": "ussl" },
{ "file": "/stubs/esp8266_v1_9_4/ustruct.py", "module": "ustruct" },
{ "file": "/stubs/esp8266_v1_9_4/utime.py", "module": "utime" },
{ "file": "/stubs/esp8266_v1_9_4/utimeq.py", "module": "utimeq" },
{ "file": "/stubs/esp8266_v1_9_4/uzlib.py", "module": "uzlib" },
{
"file": "/stubs/esp8266_v1_9_4/websocket_helper.py",
"module": "websocket_helper"
}
]
================================================
FILE: tests/data/fware_test_stub/frozen/utarfile.py
================================================
import uctypes
# http://www.gnu.org/software/tar/manual/html_node/Standard.html
TAR_HEADER = {
"name": (uctypes.ARRAY | 0, uctypes.UINT8 | 100),
"size": (uctypes.ARRAY | 124, uctypes.UINT8 | 12),
}
DIRTYPE = "dir"
REGTYPE = "file"
def roundup(val, align):
return (val + align - 1) & ~(align - 1)
class FileSection:
def __init__(self, f, content_len, aligned_len):
self.f = f
self.content_len = content_len
self.align = aligned_len - content_len
def read(self, sz=65536):
if self.content_len == 0:
return b""
if sz > self.content_len:
sz = self.content_len
data = self.f.read(sz)
sz = len(data)
self.content_len -= sz
return data
def readinto(self, buf):
if self.content_len == 0:
return 0
if len(buf) > self.content_len:
buf = memoryview(buf)[: self.content_len]
sz = self.f.readinto(buf)
self.content_len -= sz
return sz
def skip(self):
self.f.read(self.content_len + self.align)
class TarInfo:
def __str__(self):
return "TarInfo(%r, %s, %d)" % (self.name, self.type, self.size)
class TarFile:
def __init__(self, name=None, fileobj=None):
if fileobj:
self.f = fileobj
else:
self.f = open(name, "rb")
self.subf = None
def next(self):
if self.subf:
self.subf.skip()
buf = self.f.read(512)
if not buf:
return None
h = uctypes.struct(uctypes.addressof(buf), TAR_HEADER, uctypes.LITTLE_ENDIAN)
# Empty block means end of archive
if h.name[0] == 0:
return None
d = TarInfo()
d.name = str(h.name, "utf-8").rstrip()
d.size = int(bytes(h.size).rstrip(), 8)
d.type = [REGTYPE, DIRTYPE][d.name[-1] == "/"]
self.subf = d.subf = FileSection(self.f, d.size, roundup(d.size, 512))
return d
def __iter__(self):
return self
def __next__(self):
v = self.next()
if v is None:
raise StopIteration
return v
def extractfile(self, tarinfo):
return tarinfo.subf
================================================
FILE: tests/data/fware_test_stub/frozen/utarfile.pyi
================================================
# make_stub_files: Thu 20 Jun 2019 at 23:08:04
from typing import Any, Dict, Optional, Sequence, Tuple, Union
Node = Any
def roundup(val: Any, align: Any) -> Any: ...
# 0: return val+align-1&~align-1
# ? 0: return val+align-number&align-number
class FileSection:
def __init__(self, f: Any, content_len: smallint, aligned_len: smallint) -> None: ...
def read(self, sz: Any = 65536) -> Union[Any, bytes]: ...
# 0: return b''
# 0: return bytes
# 1: return data
# ? 1: return data
def readinto(self, buf: Any) -> Union[Any, number]: ...
# 0: return 0
# 0: return number
# 1: return sz
# ? 1: return sz
def skip(self) -> None: ...
class TarInfo:
def __str__(self) -> str: ...
class TarFile:
def __init__(self, name: str = None, fileobj: Any = None) -> None: ...
def next(self) -> Optional[Any]: ...
# 0: return None
# 0: return None
# 1: return None
# 1: return None
# 2: return d
# ? 2: return d
def __iter__(self) -> Any: ...
# 0: return self
# ? 0: return self
def __next__(self) -> Any: ...
# 0: return v
# ? 0: return v
def extractfile(self, tarinfo: Any) -> Any: ...
# 0: return tarinfo.subf
# ? 0: return tarinfo.subf
================================================
FILE: tests/data/fware_test_stub/frozen/utokenize.py
================================================
# (c) 2019 Paul Sokolovsky, MIT license
from token import *
from ucollections import namedtuple
NL = 55
ENCODING = 56
tok_name[NL] = "NL"
tok_name[ENCODING] = "ENCODING"
class TokenInfo(namedtuple("TokenInfo", ("type", "string", "start", "end", "line"))):
def __str__(self):
return "TokenInfo(type=%d (%s), string=%r, line=%r)" % (
self.type,
tok_name[self.type],
self.string,
self.line,
)
def get_indent(l):
for i in range(len(l)):
if l[i] != " ":
return i, l[i:]
def tokenize(readline):
indent = 0
yield TokenInfo(ENCODING, "utf-8", 0, 0, "")
while True:
l = readline()
org_l = l
if not l:
break
i, l = get_indent(l)
if l == "\n":
yield TokenInfo(NL, l, 0, 0, org_l)
continue
if i > indent:
yield TokenInfo(INDENT, " " * (i - indent), 0, 0, org_l)
elif i < indent:
yield TokenInfo(DEDENT, "", 0, 0, org_l)
indent = i
while l:
if l[0].isdigit():
t = ""
while l and (l[0].isdigit() or l[0] == "."):
t += l[0]
l = l[1:]
yield TokenInfo(NUMBER, t, 0, 0, org_l)
elif l[0].isalpha():
name = ""
while l and (l[0].isalpha() or l[0].isdigit()):
name += l[0]
l = l[1:]
yield TokenInfo(NAME, name, 0, 0, org_l)
elif l[0] == "\n":
yield TokenInfo(NEWLINE, "\n", 0, 0, org_l)
break
elif l[0].isspace():
l = l[1:]
else:
yield TokenInfo(OP, l[0], 0, 0, org_l)
l = l[1:]
yield TokenInfo(ENDMARKER, "", 0, 0, "")
================================================
FILE: tests/data/fware_test_stub/frozen/utokenize.pyi
================================================
# make_stub_files: Thu 20 Jun 2019 at 23:08:04
from typing import Any, Dict, Optional, Sequence, Tuple, Union
Node = Any
class TokenInfo(namedtuple(str, Tuple[str, str, str, str, str])):
def __str__(self) -> str: ...
def get_indent(l: Any) -> Tuple[int, Any]: ...
def tokenize(readline: Any) -> None: ...
================================================
FILE: tests/data/fware_test_stub/info.json
================================================
{
"scope": "firmware",
"name": "MicroPython Official",
"repo": "micropython/micropython",
"module_path": "ports/{}/modules",
"firmware": "micropython",
"excluded_modules": [
"_boot.py",
"inisetup.py",
"upip.py",
"upip_utarfile.py"
],
"modules": ["utarfile", "utokenize"],
"devices": ["esp8266", "esp32"],
"path": "packages/micropython-official/info.json",
"versions": [
{
"version": "1.11.0",
"git_tag": "v1.11",
"sha": "6f75c4f3cd393131579db70cdf0b35d1fe5b95ab",
"latest": true,
"devices": ["esp8266", "esp32"]
},
{
"version": "1.10.0",
"git_tag": "v1.10",
"sha": "3e25d611ef3185b68558a20057d50b0d18dc67a0",
"latest": false,
"devices": ["esp8266", "esp32"]
},
{
"version": "1.9.4",
"git_tag": "v1.9.4",
"sha": "421b84af9968e582f324899934f52b3df60381ee",
"latest": false,
"devices": ["esp8266", "esp32"]
},
{
"version": "1.9.3",
"git_tag": "v1.9.3",
"sha": "fe45d78b1edd6d2202c3544797885cb0b12d4f03",
"latest": false,
"devices": ["esp8266"]
}
]
}
================================================
FILE: tests/data/project_test/.pylintrc
================================================
[MASTER]
# Loaded Stubs: esp32-micropython-1.11.0 esp8266-micropython-1.11.0
init-hook='import sys;sys.path.insert(1, ".micropy/micropython/frozen");sys.path.insert(1, ".micropy/micropython/frozen");sys.path.insert(1, ".micropy/esp32-micropython-1.11.0/frozen");sys.path.insert(1, ".micropy/esp8266-micropython-1.11.0/frozen");sys.path.insert(1, ".micropy/esp32-micropython-1.11.0/stubs");sys.path.insert(1, ".micropy/esp8266-micropython-1.11.0/stubs");sys.path.insert(1,"./lib")'
================================================
FILE: tests/data/project_test/.vscode/settings.json
================================================
{
"python.linting.enabled": true,
// Loaded Stubs: esp32-micropython-1.11.0 esp8266-micropython-1.11.0
"python.autoComplete.extraPaths": [
"${workspaceRoot}/.micropy/micropython/frozen",
"${workspaceRoot}/.micropy/micropython/frozen",
"${workspaceRoot}/.micropy/esp32-micropython-1.11.0/frozen",
"${workspaceRoot}/.micropy/esp8266-micropython-1.11.0/frozen",
"${workspaceRoot}/.micropy/esp32-micropython-1.11.0/stubs",
"${workspaceRoot}/.micropy/esp8266-micropython-1.11.0/stubs"
],
"python.autoComplete.typeshedPaths": [
"${workspaceRoot}/.micropy/micropython/frozen",
"${workspaceRoot}/.micropy/micropython/frozen",
"${workspaceRoot}/.micropy/esp32-micropython-1.11.0/frozen",
"${workspaceRoot}/.micropy/esp8266-micropython-1.11.0/frozen",
"${workspaceRoot}/.micropy/esp32-micropython-1.11.0/stubs",
"${workspaceRoot}/.micropy/esp8266-micropython-1.11.0/stubs"
],
"python.analysis.typeshedPaths": [
"${workspaceRoot}/.micropy/micropython/frozen",
"${workspaceRoot}/.micropy/micropython/frozen",
"${workspaceRoot}/.micropy/esp32-micropython-1.11.0/frozen",
"${workspaceRoot}/.micropy/esp8266-micropython-1.11.0/frozen",
"${workspaceRoot}/.micropy/esp32-micropython-1.11.0/stubs",
"${workspaceRoot}/.micropy/esp8266-micropython-1.11.0/stubs"
],
"python.analysis.extraPaths": [
"${workspaceRoot}/.micropy/micropython/frozen",
"${workspaceRoot}/.micropy/micropython/frozen",
"${workspaceRoot}/.micropy/esp32-micropython-1.11.0/frozen",
"${workspaceRoot}/.micropy/esp8266-micropython-1.11.0/frozen",
"${workspaceRoot}/.micropy/esp32-micropython-1.11.0/stubs",
"${workspaceRoot}/.micropy/esp8266-micropython-1.11.0/stubs"
],
"python.linting.pylintEnabled": true
}
================================================
FILE: tests/data/project_test/micropy.json
================================================
{
"name": "NewProject",
"stubs": {
"esp32-micropython-1.11.0": "1.2.0",
"esp8266-micropython-1.11.0": "1.2.0",
"custom-stub": "../esp32_test_stub"
},
"config": {
"vscode": true
},
"packages": {
"some_pkg": "*"
},
"dev-packages": {
"dev_pkg": "*"
}
}
================================================
FILE: tests/data/stubber_test_stub/micropython.py
================================================
"""
Module: 'micropython' on esp32 1.11.0
"""
# MCU: (sysname='esp32', nodename='esp32', release='1.11.0', version='v1.11-132-gc24d81119 on 2019-07-08', machine='ESP32 module with ESP32')
# Stubber: 1.2.0
def alloc_emergency_exception_buf():
pass
def const():
pass
def heap_lock():
pass
def heap_unlock():
pass
def kbd_intr():
pass
def mem_info():
pass
def opt_level():
pass
def qstr_info():
pass
def schedule():
pass
def stack_use():
pass
================================================
FILE: tests/data/stubber_test_stub/modules.json
================================================
{
"firmware": {
"machine": "ESP32 module with ESP32",
"firmware": "esp32 1.11.0",
"nodename": "esp32",
"version": "1.11.0",
"release": "1.11.0",
"sysname": "esp32"
},
"stubber": { "version": "1.2.0" },
"modules": [
{
"file": "/stubs/esp32_1_11_0/micropython.py",
"module": "micropython"
}
]
}
================================================
FILE: tests/data/test_repo.json
================================================
{
"name": "Test Repo",
"location": "www.google.com",
"source": "www.google.com/file.json",
"path": "tarball/pkg/",
"packages": [
{
"name": "esp32-micropython-1.11.0",
"type": "device",
"sha256sum": "abc123"
},
{
"name": "esp32_LoBo-esp32_LoBo-3.2.24",
"type": "device",
"sha256sum": "123abc"
},
{
"name": "esp8266-micropython-1.9.4",
"type": "device",
"sha256sum": "1ab2c3"
}
]
}
================================================
FILE: tests/data/test_source.xml
================================================
test-repo
1000
false
packages/esp32-micropython-1.10.0.tar.gz
2019-07-10T09:16:54.000Z
"06290f2d047c0a8ffd7f7b79c98e7c33"
12819
STANDARD
packages/esp32-micropython-1.11.0.tar.gz
2019-07-10T09:16:54.000Z
"23987290858e648effa0d150c7fdd323"
13754
STANDARD
================================================
FILE: tests/data/test_sources.json
================================================
[
{
"name": "Test Repo",
"source": "https://google.com/repo.json"
}
]
================================================
FILE: tests/test_checks.py
================================================
import subprocess
from micropy.project import checks
def test_vscode_ext_min_version(mock_checks, mocker):
"""Test VSCode Extension Template Checks"""
assert checks.vscode_ext_min_version("ms-python.python")
assert not checks.vscode_ext_min_version("ms-python.python", min_version="2019.9.34911")
mock_subproc = mocker.patch.object(subprocess, "run")
mock_subproc.side_effect = [Exception]
assert checks.vscode_ext_min_version("ms-python.python")
================================================
FILE: tests/test_config.py
================================================
import json
import pytest
from micropy import config
class TestConfig:
default = {"one": 1, "two": 2, "sub": {"items": ["foo", "bar"], "bool": True}}
@pytest.fixture
def test_config(self, tmp_path):
cfg_file = tmp_path / "conf.json"
conf = config.Config(cfg_file, default=self.default)
return conf
def get_file_data(self, conf):
path = conf.source.file_path
return json.loads(path.read_text())
def test_default(self, tmp_path):
cfg_file = tmp_path / "conf.json"
conf = config.Config(cfg_file, default=self.default)
# should not create source file until first change
assert not cfg_file.exists()
# make change
conf.set("one", 2)
assert cfg_file.exists()
assert self.get_file_data(conf) == conf.raw()
def test_load_from_file(self, tmp_path, utils):
cfg_file = tmp_path / "conf.json"
cfg_file.write_text(json.dumps(self.default))
conf = config.Config(cfg_file, default={})
assert conf.config == self.default
# default should be overridden
conf = config.Config(cfg_file, default={"one": 1})
assert utils.dict_equal(conf.raw(), self.default)
def test_override(self, test_config, tmp_path):
conf = test_config
new_cfg = tmp_path / "newcfg.json"
conf.source = new_cfg
assert isinstance(conf.source, config.JSONConfigSource)
diff_cfg = tmp_path / "diffcfg.json"
conf.source.file_path = diff_cfg
conf.sync()
assert self.get_file_data(conf) == conf.config
def test_get(self, test_config):
conf = test_config
assert conf.get("one") == 1
assert conf.get("sub/bool")
assert conf.get("sub/items/0") == "foo"
assert conf.get("sub/items") == ["foo", "bar"]
def test_set(self, test_config):
conf = test_config
conf.set("one", 1)
conf.set("one/sub/items.0", "foobar")
data = json.loads(conf.source.file_path.read_text())
assert data == conf.config
def test_update_from_file(self, test_config):
conf = test_config
cfg_file = conf.source.file_path
new = self.default.copy()
new["one"] = 45
new["section"] = {"value": "foo"}
cfg_file.write_text(json.dumps(new))
conf = config.Config(cfg_file, default=self.default)
assert conf.get("one") == 45
assert conf.get("section/value") == "foo"
assert conf.config == new
def test_extend(self, test_config):
conf = test_config
conf.extend("sub/items", ["foobar", "barfoo"])
file_data = json.loads(conf.source.file_path.read_text())
print(file_data)
assert file_data["sub"]["items"] == ["foo", "bar", "foobar", "barfoo"]
assert conf.get("sub/items") == ["foo", "bar", "foobar", "barfoo"]
def test_upsert(self, test_config):
conf = test_config
conf.upsert("sub/items", ["barfoo", "foobar", "bar", "foo"])
file_data = self.get_file_data(conf)
assert file_data["sub"]["items"] == ["barfoo", "foobar", "bar", "foo"]
assert conf.get("sub/items") == ["barfoo", "foobar", "bar", "foo"]
def test_dict(self):
conf = config.Config(source_format=config.DictConfigSource, default=self.default)
assert conf.get("one") == 1
conf.set("sub/bool", False)
assert not conf.get("sub/bool")
================================================
FILE: tests/test_highlevel.py
================================================
import json
import shutil
from copy import deepcopy
from pathlib import Path
import pytest
from micropy import project
@pytest.fixture
def mock_requests(mocker, requests_mock, test_archive):
mock_source = {
"name": "Micropy Stubs",
"location": "https://codeload.github.com/BradenM/micropy-stubs",
"source": "https://raw.githubusercontent.com/bradenm/micropy-stubs/source.json",
"path": "legacy.tar.gz/pkg/",
"packages": [
{
"name": "micropython",
"type": "firmware",
"sha256sum": "7ff2cce0237268cd52164b77b6c2df6be6249a67ee285edc122960af869b8ed2",
},
],
}
requests_mock.get(
"https://raw.githubusercontent.com/BradenM/micropy-stubs/master/source.json",
json=mock_source,
)
requests_mock.get(
"https://codeload.github.com/BradenM/micropy-stubs/legacy.tar.gz/pkg/micropython",
content=test_archive,
)
@pytest.mark.skip(reason="Tests need some serious cleanup before something like this could work.")
@pytest.mark.usefixtures("mock_requests")
class TestCreateProject:
mp = None
expect_mp_data = staticmethod(
lambda name: {
"name": "NewProject",
"stubs": {name: "1.2.0"},
"packages": {},
"dev-packages": {"micropy-cli": "*"},
"config": {"vscode": True, "pylint": True},
}
)
expect_vsc_data = staticmethod(
lambda name: [
str(Path(f".micropy/{name}/frozen")),
str(Path(".micropy/fware_test_stub/frozen")),
str(Path(f".micropy/{name}/stubs")),
str(Path(".micropy/NewProject")),
]
)
def build_project(self, mpy, path):
proj_path = path / "highlevel_new_project"
if proj_path.exists():
shutil.rmtree(proj_path, ignore_errors=True)
proj = project.Project(proj_path)
proj_stub = list(mpy.stubs)[0]
proj.add(project.modules.StubsModule, mpy.stubs, stubs=[proj_stub])
proj.add(project.modules.PackagesModule, "requirements.txt")
proj.add(project.modules.DevPackagesModule, "dev-requirements.txt")
proj.add(project.modules.TemplatesModule, ("vscode", "pylint"))
return (proj, mpy, proj_stub)
def check_mp_data(self, path, utils, name="esp32", expect=None):
expect_data = expect or self.expect_mp_data(name)
micropy_file = path
assert micropy_file.exists()
mp_data = json.loads(micropy_file.read_text())
assert utils.dict_equal(mp_data, expect_data)
def check_vscode(self, path, name="esp32", expect=None):
vsc_path = path / ".vscode" / "settings.json"
assert vsc_path.exists()
with vsc_path.open() as f:
lines = [line.strip() for line in f.readlines() if line]
valid = [line for line in lines if "//" not in line[:2]]
vsc_data = json.loads("\n".join(valid))
expect_data = expect or self.expect_vsc_data(name)
assert vsc_data["python.analysis.typeshedPaths"] == expect_data
def test_setup_stubs(self, mock_micropy, get_stub_paths, shared_datadir):
mpy = mock_micropy
stub_path = shared_datadir / "esp32_test_stub"
mpy.stubs.add(stub_path)
def test_create_project(self, micropy_stubs, tmp_path, utils):
proj, mpy, proj_stub = self.build_project(micropy_stubs(), tmp_path)
proj.create()
self.check_mp_data(proj.info_path, utils, name=proj_stub.path.name)
self.check_vscode(proj.path, name=proj_stub.path.name)
def test_add_package(self, mock_pkg, micropy_stubs, tmp_path, utils):
proj, mpy, proj_stub = self.build_project(micropy_stubs(), tmp_path)
proj.create()
proj.add_package("newpackage")
expect_data = deepcopy(self.expect_mp_data(proj_stub.path.name))
expect_data["packages"]["newpackage"] = "*"
self.check_mp_data(proj.info_path, utils, expect=expect_data)
@pytest.mark.parametrize("local_pkg", ["src/lib/coolpackage", "/tmp/absolute/package"])
def test_add_local_package(self, tmp_path, local_pkg, micropy_stubs, utils):
proj, mpy, proj_stub = self.build_project(micropy_stubs(), tmp_path)
proj.create()
local_package = Path(local_pkg)
if not local_package.is_absolute():
local_package = proj.path / Path(local_pkg)
local_package.mkdir(parents=True, exist_ok=True)
(local_package / "__init__.py").touch()
local_path = utils.str_path(local_pkg)
proj.add_package(f"-e {local_path}")
# check micropy.json
expect_data = deepcopy(self.expect_mp_data(proj_stub.path.name))
expect_data["packages"][local_package.name] = f"-e {local_path}"
self.check_mp_data(proj.info_path, utils, expect=expect_data)
# check vscode settings
expect_vscode = deepcopy(self.expect_vsc_data(proj_stub.path.name))
expect_vscode.append(local_path)
self.check_vscode(proj.path, expect=expect_vscode)
shutil.rmtree(proj.path)
================================================
FILE: tests/test_main.py
================================================
import micropy.exceptions as exc
import pytest
from micropy import data, main
def test_setup(mock_micropy_path):
"""Tests MicroPy Initial Setup"""
expect_mp_dir = mock_micropy_path
expect_stubs_dir = mock_micropy_path / "stubs"
config = main.MicroPyOptions(root_dir=mock_micropy_path)
mp = main.MicroPy(options=config)
assert expect_mp_dir.exists()
assert expect_stubs_dir.exists()
# Test after initial setup
mp_ = main.MicroPy(options=config)
assert len(mp_.stubs) == len(mp.stubs)
def test_add_stub(mock_micropy, shared_datadir):
"""Test Adding Valid Stub"""
fware_path = shared_datadir / "fware_test_stub"
stub_path = shared_datadir / "esp8266_test_stub"
stubs = mock_micropy.stubs
fware_stub = stubs.add(fware_path, data.STUB_DIR)
stub = stubs.add(stub_path, data.STUB_DIR)
assert stub in list(mock_micropy.stubs)
assert stub.path in data.STUB_DIR.iterdir()
assert stub.path.exists()
assert fware_stub in list(mock_micropy.stubs._firmware)
def test_stub_error():
with pytest.raises(exc.StubError):
raise exc.StubError(None)
def test_resolve_project(mocker, mock_micropy):
mock_proj = mocker.patch.object(main, "Project").return_value
mock_proj.exists = False
assert not mock_micropy.resolve_project(".").exists
mock_proj.exists = True
assert mock_micropy.resolve_project(".")
================================================
FILE: tests/test_packages.py
================================================
from pathlib import Path
import pytest
from boltons.namedutils import namedlist
from micropy import packages
EXPPKG = namedlist("ExpectPackage", ["name", "specs", "full_name"])
class TestPackages:
class MockSource:
def __init__(self, pkg, has_init):
self.pkg = pkg
self.has_init = has_init
@pytest.fixture(params=[True, False], ids=["package", "module"])
def mock_source(self, request, mock_pkg):
if request.param:
# true packages vs file
(mock_pkg / "__init__.py").touch()
return self.MockSource(mock_pkg, request.param)
@pytest.fixture(params=[True, False], ids=["package", "module"])
def mock_source_path(self, request, tmp_path):
path = tmp_path / "file.py"
if request.param is True:
pkg = tmp_path
path = tmp_path / "__init__.py"
else:
pkg = tmp_path
path.touch()
return self.MockSource(pkg, request.param)
@pytest.mark.parametrize(
"package,expect",
[
(
"git+https://github.com/jczic/MicroWebSrv2.git@master#egg=MicroWebSrv2",
packages.VCSDependencySource,
),
("picoweb", packages.PackageDependencySource),
("-e /foobar/pkg", packages.LocalDependencySource),
],
)
def test_factory(self, package, expect):
source = packages.create_dependency_source(package)
assert isinstance(source, expect)
@pytest.mark.parametrize(
"requirement,expect",
[
(
["git+https://github.com/jczic/MicroWebSrv2.git@master#egg=MicroWebSrv2"],
[
"microwebsrv2",
"git+https://github.com/jczic/MicroWebSrv2.git@master#egg=MicroWebSrv2",
"git+https://github.com/jczic/MicroWebSrv2.git@master#egg=MicroWebSrv2",
],
),
(["picoweb"], ["picoweb", "*", "picoweb"]),
(["picoweb==^7.1"], ["picoweb", "==^7.1", "picoweb==^7.1"]),
(["BlynkLib==0.0.0"], ["blynklib", "==0.0.0", "blynklib==0.0.0"]),
(
["-e /foobar/somepkg", "somepackage"],
["somepackage", "-e /foobar/somepkg", "-e /foobar/somepkg"],
),
(["-e /foobar/somepkg"], ["somepkg", "-e /foobar/somepkg", "-e /foobar/somepkg"]),
],
)
def test_package(self, mock_pkg, requirement, expect):
source = packages.create_dependency_source(*requirement)
pkg = source.package
assert pkg.name == expect[0] # name
assert pkg.pretty_specs == expect[1] # specs
assert str(pkg) == expect[2] # full name ()
def test_package_source(self, mock_source):
def format_desc(x):
return f"desc{x}"
source = packages.create_dependency_source("blynklib", format_desc=format_desc)
with source as files:
if mock_source.has_init:
assert isinstance(files, Path)
assert mock_source.pkg.name == files.name
else:
assert isinstance(files, list)
file_names = [(p.name, s.name) for p, s in files]
file_names = list(sum(file_names, ()))
assert sorted(["file.py", "file.pyi", "module.py", "module.pyi"]) == sorted(
file_names
)
def test_package_path(self, mock_source_path):
source = packages.create_dependency_source(f"-e {mock_source_path.pkg}")
with source as files:
if mock_source_path.has_init:
# is proper package
assert files.is_dir()
if mock_source_path.has_init is False:
# is module
assert files.is_dir()
assert files == source.package.path
@pytest.mark.parametrize(
"pkg,expect",
[
(("micropy-cli", "*"), EXPPKG("micropy-cli", "*", "micropy-cli")),
(("blynklib", "==0.0.0"), EXPPKG("blynklib", "==0.0.0", "blynklib==0.0.0")),
(
("custompkg", "-e src/lib/custompackage"),
EXPPKG("custompkg", "-e src/lib/custompackage", "-e src/lib/custompackage"),
),
],
)
def test_package_from_text(self, pkg, expect, utils):
pkg = packages.Package.from_text(*pkg)
assert pkg.name == expect.name
assert pkg.full_name == pkg.full_name
assert pkg.pretty_specs == expect.specs
is_e = "-e" in pkg.full_name
assert pkg.editable == is_e
if pkg.editable:
assert pkg.path == Path("src/lib/custompackage")
else:
assert pkg.path is None
================================================
FILE: tests/test_project.py
================================================
import shutil
import pytest
from boltons import setutils
from micropy import config, packages, project
from micropy.exceptions import RequirementException
from micropy.project import modules
from requests import RequestException
@pytest.fixture
def get_module(tmp_path):
def _get_module(names, mp, **kwargs):
_templates = list(modules.TemplatesModule.TEMPLATES.keys())
mods = {
"stubs": (
(
modules.StubsModule,
mp.stubs,
),
{"stubs": list(mp.stubs)[:2]},
),
"template": ((modules.TemplatesModule, _templates), {}),
"reqs": ((modules.PackagesModule, "requirements.txt"), {}),
"dev-reqs": ((modules.DevPackagesModule, "dev-requirements.txt"), {}),
}
if names == "all":
names = ",".join(list(mods.keys()))
_mods = [mods[n.strip()] for n in names.split(",") if n]
yield from _mods
return _get_module
@pytest.fixture
def get_config():
def _get_config(request, name="NewProject", stubs=None, templates=None, packages=None):
packages = packages or dict()
templates = templates or ["vscode", "pylint"]
stubs = stubs or []
_mods = {
"base": {
"name": name,
},
"stubs": {"stubs": {s.name: s.stub_version for s in stubs}},
"template": {"config": {t: (t in templates) for t in templates}},
"reqs": {"packages": packages.get("reqs", {})},
"dev-reqs": {"dev-packages": packages.get("dev-reqs", {"micropy-cli": "*"})},
}
if request == "all":
request = ",".join(list(_mods.keys()))
mods = request.split(",")
test_config = _mods["base"].copy()
for m in mods:
test_config = {**test_config, **_mods[m or "base"]}
return test_config
return _get_config
@pytest.fixture
def get_context():
def _get_context(request, stubs=None, pkg_path=None, data_dir=None):
stubs = stubs or []
_frozen = [s.frozen for s in stubs]
_fware = [s.firmware.frozen for s in stubs if s.firmware is not None]
_stub_paths = [s.stubs for s in stubs]
_paths = setutils.IndexedSet([*_frozen, *_fware, *_stub_paths])
_context = {
"base": {},
"stubs": {
"stubs": set(stubs),
"paths": _paths,
"datadir": data_dir,
},
"reqs": {"paths": setutils.IndexedSet([pkg_path]), "local_paths": set()},
}
if request == "all":
request = ",".join(list(_context.keys()))
mods = request.split(",")
if "reqs" in mods and "stubs" in mods:
_ctx = _context["stubs"].copy()
_ctx["paths"].update(_context["reqs"]["paths"])
_ctx["local_paths"] = _context["reqs"]["local_paths"]
return _ctx
context = {}
for m in mods:
context = {**context, **_context.get(m, {})}
return context
return _get_context
@pytest.fixture
def test_project(micropy_stubs, tmp_path, get_module):
def _test_project(mods="", path=None):
mp = micropy_stubs()
proj_path = path if path else tmp_path / "NewProject"
proj = project.Project(proj_path)
mods = get_module(mods, mp)
for m in mods:
proj.add(*m[0], **m[1])
yield proj, mp
return _test_project
@pytest.fixture
def tmp_project(tmp_path, shared_datadir):
path = shared_datadir / "project_test"
proj_path = tmp_path / "NewProject"
shutil.copytree(path, proj_path)
return proj_path
def test_implementation(mocker):
"""Test Abstract Base Class"""
mocker.patch.object(modules.ProjectModule, "__abstractmethods__", new_callable=set)
inst = modules.ProjectModule()
inst.config
inst.load()
inst.create()
inst.update()
inst.add([])
inst.remove([])
def test_project_queue(tmp_path, mock_cwd, mocker):
mocker.patch("micropy.project.project.Config")
path = tmp_path / "project_path"
proj = project.Project(path)
# should create/load projects based on .PRIORITY,
# not based on order added
mock_parent = mocker.Mock()
mock_parent.m1.return_value = mocker.Mock(PRIORITY=8)
mock_parent.m2.return_value = mocker.Mock(PRIORITY=0)
mock_parent.m3.return_value = mocker.Mock(PRIORITY=9)
proj.add(mock_parent.m1)
proj.add(mock_parent.m2)
proj.add(mock_parent.m3)
proj.create()
# should be called in order of priority (9 being highest)
mock_parent.assert_has_calls(
[
# adding them doesn't matter
mocker.call.m1(log=mocker.ANY, parent=mocker.ANY),
mocker.call.m2(log=mocker.ANY, parent=mocker.ANY),
mocker.call.m3(log=mocker.ANY, parent=mocker.ANY),
# create order should differ
mocker.call.m3().create(),
mocker.call.m1().create(),
mocker.call.m2().create(),
]
)
@pytest.mark.parametrize("mods", ["", "stubs", "template", "reqs", "dev-reqs", "all"])
class TestProject:
def test_create(self, test_project, mock_checks, mods, utils):
test_proj, _ = next(test_project(mods))
assert test_proj.config.get("name") == "NewProject"
resp = test_proj.create()
assert str(resp) == utils.str_path(test_proj.path)
assert test_proj.exists
assert test_proj.info_path.exists()
if test_proj._children:
test_proj.remove(type(test_proj._children[-1]))
def test_config(self, test_project, get_config, mods, utils):
test_proj, mp = next(test_project(mods))
expect_config = get_config(mods, stubs=list(mp.stubs)[:2])
assert test_proj.config._config == {"name": "NewProject"}
test_proj.create()
assert utils.dict_equal(test_proj.config.raw(), expect_config)
# should be the same post-load
test_proj.load()
assert utils.dict_equal(test_proj.config.raw(), expect_config)
def test_context(self, test_project, get_context, mods, tmp_path, utils):
proj_path = tmp_path / "tmpprojpath"
test_proj, mp = next(test_project(mods, path=proj_path))
test_proj.create()
pkg_path = test_proj.data_path / test_proj.name
expect_context = get_context(
mods, stubs=mp.stubs, pkg_path=pkg_path, data_dir=test_proj.data_path
)
assert utils.dict_equal(test_proj.context.raw(), expect_context)
test_proj.load() # should be the same post load
assert utils.dict_equal(test_proj.context.raw(), expect_context)
def test_load(self, mock_pkg, tmp_project, mock_checks, test_project, mods, utils):
proj, mp = next(test_project(mods, path=tmp_project))
proj.load(run_checks=mp.RUN_CHECKS)
def test_update(self, mock_pkg, tmp_project, mock_checks, test_project, mods, utils):
proj, mp = next(test_project(mods, path=tmp_project))
proj.update()
class TestStubsModule:
@pytest.fixture()
def stub_module(self, mocker, tmp_path, micropy_stubs):
mp = micropy_stubs()
parent_mock = mocker.MagicMock()
parent_mock.data_path = tmp_path / ".micropy"
stub_item = list(mp.stubs)[0]
stub_mod = modules.StubsModule(
mp.stubs, stubs=[stub_item], parent=parent_mock, log=mocker.Mock()
)
return stub_mod, mp
def test_load(self, tmp_project, stub_module, get_stub_paths):
custom_stub = next(get_stub_paths())
stub_mod, mp = stub_module
stub_data = {
"esp32-micropython-1.11.0": "1.2.0",
"esp8266-micropython-1.11.0": "1.2.0",
"custom-stub": str(custom_stub),
}
stub_mod.parent.config.get.return_value = stub_data
stub_mod.stub_manager.add.return_value = mp.stubs
assert stub_mod.load(stub_data=stub_data)
def test_add_stub(self, test_project, get_stub_paths, mocker):
proj, mp = next(test_project("stubs"))
proj.create()
mocker.patch.object(config.config.dpath, "merge")
stub_path = next(get_stub_paths())
stub = mocker.MagicMock()
stub.path = stub_path
stub.frozen = stub_path / "frozen"
stub.stubs = stub_path / "stubs"
stub.firmware = stub
proj.add_stub(stub)
print(proj.stubs)
class TestPackagesModule:
@pytest.fixture
def test_package(self, mocker, tmp_path, mock_pkg, test_project):
new_path = tmp_path / "somepath"
proj, mp = next(test_project("reqs", path=new_path))
proj.create()
return proj
@pytest.mark.flaky
def test_add_package(self, test_project, mock_pkg, tmp_path):
proj, mp = next(test_project("reqs"))
proj.create()
proj.add_package("somepkg==7")
# Shouldn't allow duplicate pkgs
res = proj.add_package("somepkg")
assert res is None
@pytest.mark.flaky()
def test_add_local_package(self, test_project, tmp_path, utils):
proj, mp = next(test_project("reqs"))
pkg = tmp_path / "custompkg"
pkg.mkdir(parents=True)
# Add from path
proj.add_package(f"-e {pkg}")
def test_package_error(self, test_project, mock_pkg, mocker, tmp_path, caplog):
packages.source_package.utils.ensure_valid_url.side_effect = [RequestException]
path = tmp_path / "newdir"
proj, mp = next(test_project("reqs", path=path))
proj.create()
with pytest.raises(RequirementException):
pkgs = proj.add_package("newpackage")
assert proj.config.get("packages/newpackage", None) is None
assert "newpackage" not in pkgs.keys()
def test_add_dev_package(self, mocker, mock_pkg, test_project):
proj, mp = next(test_project("reqs,dev-reqs"))
proj.create()
proj.add_package("somepkg")
proj.add_package("anotha_pkg", dev=True)
================================================
FILE: tests/test_pyd.py
================================================
from __future__ import annotations
import hashlib
import stat
import sys
from pathlib import PurePosixPath
from typing import Literal, Type
from unittest.mock import ANY, MagicMock
import pytest
from micropy.exceptions import PyDeviceError
from micropy.pyd import backend_rshell, backend_upydevice, consumers
from micropy.pyd.abc import DevicePath, MetaPyDeviceBackend, PyDeviceConsumer
from micropy.pyd.pydevice import PyDevice
from pytest_mock import MockFixture
@pytest.fixture
def mock_upy(mocker: MockFixture):
mock_upy = mocker.patch.object(backend_upydevice, "upydevice", autospec=True)
return mock_upy
@pytest.fixture
def mock_upy_uos(mocker: MockFixture):
mock_uos = mocker.patch.object(backend_upydevice, "UOS", autospec=True)
return mock_uos
@pytest.fixture
def mock_upy_retry(mocker: MockFixture):
mock_retry = mocker.patch.object(backend_upydevice, "retry", autospec=True)
return mock_retry
@pytest.fixture
def mock_rsh(mocker: MockFixture):
mock_rsh = mocker.patch.object(backend_rshell, "rsh", autospec=True)
mock_rsh.connect = mocker.Mock()
mock_rsh.find_serial_device_by_port = mocker.Mock()
mock_rsh.cp = mocker.Mock()
return mock_rsh
class MockAdapter:
backend: Literal["upy", "rsh"]
mock: MagicMock
mock_uos: MagicMock
def __init__(self, backend: Literal["upy", "rsh"], mock: MagicMock, mock_uos=None):
self.backend = backend
self.mock = mock
if mock_uos:
self.mock_uos = mock_uos
@property
def is_rsh(self) -> bool:
return self.backend == "rsh"
@property
def is_upy(self) -> bool:
return self.backend == "upy"
@property
def connect(self) -> MagicMock:
return self.mock.Device.return_value.connect if self.is_upy else self.mock.connect
@property
def device(self):
return self.mock.Device.return_value if self.is_upy else self.mock
MOCK_PORT = "/dev/port"
IS_WIN_PY310 = sys.version_info >= (3, 10) and sys.platform.startswith("win")
@pytest.fixture(params=[True, False])
def with_consumer(request: pytest.FixtureRequest, mocker: MockFixture):
if request.param is True:
consumer_mock = mocker.MagicMock(PyDeviceConsumer)
return dict(consumer=consumer_mock)
return dict()
class TestPyDeviceBackend:
backend: Literal["upy", "rsh"]
pyd_cls: Type[MetaPyDeviceBackend]
@pytest.fixture(
params=[
"upy",
pytest.param(
"rsh",
marks=pytest.mark.skipif(
IS_WIN_PY310,
reason="skipping due to rshell/pyreadline broken for >=py310 on windows.",
),
),
]
)
def pymock_setup(self, request: pytest.FixtureRequest):
self.backend = request.param
self.pyd_cls = (
backend_upydevice.UPyDeviceBackend
if self.backend == "upy"
else backend_rshell.RShellPyDeviceBackend
)
@pytest.fixture
def pymock(self, pymock_setup, request: pytest.FixtureRequest, mock_upy_uos):
mod_mock = request.getfixturevalue(f"mock_{self.backend}")
m = MockAdapter(self.backend, mod_mock, mock_upy_uos)
yield m
m.mock.reset_mock()
def test_init(self, pymock):
m = pymock
pyd = self.pyd_cls().establish(MOCK_PORT)
if self.backend == "upy":
m.mock.Device.assert_called_once_with(MOCK_PORT, init=True, autodetect=True)
else:
assert m.mock.ASCII_XFER is False
assert m.mock.QUIET is True
assert pyd.location == MOCK_PORT
def test_init__connect_fail(self, pymock):
m = pymock
m.connect.side_effect = [SystemExit, SystemExit]
with pytest.raises(PyDeviceError):
self.pyd_cls().establish(MOCK_PORT).connect()
def test_connected__default(self, pymock):
m = pymock
pyd = self.pyd_cls()
assert not pyd.connected
if m.is_upy:
with pytest.raises(PyDeviceError):
pyd._ensure_connected()
def test_connected(self, pymock):
pyd = self.pyd_cls().establish(MOCK_PORT)
pyd.connect()
assert pyd.connected
def test_disconnect(self, pymock):
m = pymock
pyd = self.pyd_cls().establish(MOCK_PORT)
pyd.disconnect()
if m.is_upy:
m.mock.Device.return_value.disconnect.assert_called_once()
def test_reset(self, pymock, mocker: MockFixture):
mocker.patch("time.sleep")
m = pymock
pyd = self.pyd_cls().establish(MOCK_PORT)
pyd.reset()
if m.is_upy:
m.device.reset.assert_called_once()
m.device.connect.assert_called_once()
def test_eval(self, pymock, mocker: MockFixture, with_consumer):
m = pymock
pyd = self.pyd_cls().establish(MOCK_PORT)
pyd.connect()
pyd._pydevice.exec_raw = mocker.Mock(return_value=["", ""])
pyd.eval("abc", **with_consumer)
has_cons = "consumer" in with_consumer
if m.is_upy:
m.device.cmd.assert_called_once_with("abc", follow=True, pipe=mocker.ANY)
else:
if has_cons:
pyd._pydevice.exec_raw.assert_called_once_with("abc", data_consumer=mocker.ANY)
else:
pyd._pydevice.exec_raw.assert_called_once_with("abc", data_consumer=None)
def test_eval_script(self, pymock, mocker: MockFixture, with_consumer):
m = pymock
pyd = self.pyd_cls().establish(MOCK_PORT)
pyd.connect()
pyd._pydevice.exec_raw = mocker.Mock(return_value=[b"", b""])
pyd.eval_script(b"import something", "somefile.py", **with_consumer)
if m.is_upy:
if "consumer" in with_consumer:
with_consumer["consumer"].on_start.assert_called_once()
m.device.cmd.assert_any_call("import ubinascii")
mock_random = mocker.patch("random.sample", return_value="abc.py")
pyd = self.pyd_cls().establish(MOCK_PORT)
pyd.connect()
pyd.eval_script(b"import something", None, **with_consumer)
mock_random.assert_called_once()
@property
def read_file_effects(self):
cmd_effects = [
None, # import ubin
None, # open file
8, # content size
0, # seek start
0, # pos
b"Hi there", # read,
8, # pos
None, # close
]
return cmd_effects
def test_read_file(self, mock_upy_retry, pymock):
m = pymock
if m.is_rsh:
# upy only
return
# content size
m.mock_uos.return_value.stat.side_effect = ["ENOENT", [0, 0, 0, 0, 0, 0, 8]]
# chunk size will default to 8/4
m.device.cmd.side_effect = [8, b"Hi", b" t", b"he", b"re"]
pyd = self.pyd_cls().establish(MOCK_PORT)
res = pyd.read_file("/some/path", verify_integrity=False)
assert res == "Hi there"
mock_upy_retry.assert_not_called()
assert m.device.cmd.call_count == 5
def test_read_file__with_integrity(self, mock_upy_retry, pymock):
m = pymock
if m.is_rsh:
# upy only
return
# content size
m.mock_uos.return_value.stat.side_effect = ["ENOENT", [0, 0, 0, 0, 0, 0, 8]]
# chunk size will default to 8/4
chunks = [b"Hi", b" t", b"he", b"re"]
chunks_hash = hashlib.sha256()
for chunk in chunks:
chunks_hash.update(chunk)
m.device.cmd.side_effect = [8, *chunks, chunks_hash.hexdigest()]
pyd = self.pyd_cls().establish(MOCK_PORT)
res = pyd.read_file("/some/path", verify_integrity=True)
assert res == "Hi there"
mock_upy_retry.assert_not_called()
assert m.device.cmd.call_count == 6
def test_read_file__with_integrity_fail(self, mock_upy_retry, pymock, mocker: MockFixture):
m = pymock
if m.is_rsh:
# upy only
return
# content size
m.mock_uos.return_value.stat.side_effect = ["ENOENT", [0, 0, 0, 0, 0, 0, 8]]
# chunk size will default to 8/4
chunks = [b"Hi", b" t", b"he", b"re"]
m.device.cmd.side_effect = [8, *chunks, "notrightsha"]
reset_mock = mocker.MagicMock()
pyd = self.pyd_cls().establish(MOCK_PORT)
pyd.reset = reset_mock
pyd.read_file("/some/path", verify_integrity=True)
assert reset_mock.call_count == 4
assert m.device.cmd.call_count == 6
def test_read_file__bad_chunk(self, mock_upy_retry, pymock, mocker: MockFixture):
m = pymock
if m.is_rsh:
# upy only
return
# content size
m.mock_uos.return_value.stat.side_effect = ["ENOENT", [0, 0, 0, 0, 0, 0, 8]]
# chunk size will default to 8/4
chunks = [b"Hi", b"", b" t", b"he", b"re"]
m.device.cmd.side_effect = [8, *chunks]
m.mock.Device.return_value.reset = mocker.MagicMock(return_value=None)
pyd = self.pyd_cls().establish(MOCK_PORT)
res = pyd.read_file("/some/path", verify_integrity=False)
assert res == "Hi there"
mock_upy_retry.assert_not_called()
assert m.device.cmd.call_count == 6
assert m.mock.Device.return_value.reset.call_count == 1
def test_read_file__error_chunk(self, mock_upy_retry, pymock, mocker: MockFixture):
m = pymock
if m.is_rsh:
# upy only
return
# content size
m.mock_uos.return_value.stat.side_effect = ["ENOENT", [0, 0, 0, 0, 0, 0, 8]]
# chunk size will default to 8/4
chunks = [b"Hi", RuntimeError, b" t", b"he", b"re"]
m.device.cmd.side_effect = [8, *chunks]
reset_mock = mocker.MagicMock()
pyd = self.pyd_cls().establish(MOCK_PORT)
pyd.reset = reset_mock
pyd.read_file("/some/path", verify_integrity=True)
assert reset_mock.call_count == 5
assert m.device.cmd.call_count == 4
def test_pull_file(self, pymock, tmp_path, mock_upy_retry):
m = pymock
pyd = self.pyd_cls().establish(MOCK_PORT)
pyd.connect()
if m.is_upy:
m.mock_uos.return_value.stat.side_effect = [
"ENOENT", # resolve root
"ENOENT", # resolve root
[0, 0, 0, 0, 0, 0, 8],
]
# chunk size will default to 8/4
m.device.cmd.side_effect = [8, b"Hi", b" t", b"he", b"re"]
pyd.pull_file("/some/path", (tmp_path / "out.txt"), verify_integrity=False)
assert (tmp_path / "out.txt").read_text() == "Hi there"
else:
m.mock.find_serial_device_by_port.return_value.name_path = "/"
pyd.pull_file("/some/path", (tmp_path / "out.txt"))
m.mock.cp.assert_called_once_with("/some/path", str(tmp_path / "out.txt"))
def test_iter_files(self, pymock):
m = pymock
if m.is_rsh:
return
pyd = self.pyd_cls().establish(MOCK_PORT)
pyd.connect()
m.device.cmd.side_effect = [
None,
None,
None,
[("name", "stat", "", "")],
None,
[("name", stat.S_IFDIR, "", "")],
None,
[("underName", "", "", "")],
]
assert list(pyd.iter_files("/some/path")) == []
m.device.cmd.assert_any_call("import uos", silent=True)
m.device.cmd.assert_any_call("list(uos.ilistdir('/some/path'))", silent=True, rtn_resp=True)
assert list(pyd.iter_files("/some/path")) == [PurePosixPath("/some/path/name")]
assert list(pyd.iter_files("/some/path")) == [PurePosixPath("/some/path/name/underName")]
class TestPyDevice:
@pytest.fixture
def mock_backend(self, mocker: MockFixture):
mock = mocker.MagicMock(MetaPyDeviceBackend)
mock.return_value.establish.return_value = mock.return_value
return mock
@pytest.fixture(
params=[
["dir", "/some/dir"],
["file", "/some/file.txt"],
["dir", r"c:\\some\\dos\\dir"],
["file", r"c:\\some\\dos\\file.txt"],
]
)
def path_type(self, request: pytest.FixtureRequest):
return request.param
@pytest.mark.parametrize(
"pyd_kwargs",
[
dict(),
dict(auto_connect=False),
dict(delegate_cls=lambda *x: x, stream_consumer="stream", message_consumer="message"),
],
)
def test_init(self, mock_backend, pyd_kwargs):
pyd = PyDevice(MOCK_PORT, backend=mock_backend, **pyd_kwargs)
mock_backend.assert_called_once()
mock_backend.return_value.establish.assert_called_once_with(MOCK_PORT)
if pyd_kwargs.get("auto_connect", True):
mock_backend.return_value.connect.assert_called_once()
if "delegate_cls" in pyd_kwargs:
assert pyd.consumer == (
"stream",
"message",
)
def test_connect(self, mock_backend):
pyd = PyDevice(MOCK_PORT, backend=mock_backend, auto_connect=False)
pyd.connect()
mock_backend.return_value.connect.assert_called_once()
def test_disconnect(self, mock_backend):
pyd = PyDevice(MOCK_PORT, backend=mock_backend)
pyd.disconnect()
mock_backend.return_value.disconnect.assert_called_once()
def test_copy_from(self, mock_backend, path_type, mocker):
pyd = PyDevice(MOCK_PORT, backend=mock_backend)
ptype, p = path_type
pyd.copy_from(p, "/host/path")
if ptype == "dir":
mock_backend.return_value.copy_dir.assert_called_once_with(
p,
"/host/path",
consumer=mocker.ANY,
verify_integrity=mocker.ANY,
exclude_integrity=None,
)
else:
mock_backend.return_value.pull_file.assert_called_once_with(
p,
"/host/path",
consumer=mocker.ANY,
verify_integrity=mocker.ANY,
)
def test_copy_from__integrity(self, mock_backend, path_type, mocker):
pyd = PyDevice(MOCK_PORT, backend=mock_backend)
ptype, p = path_type
if ptype == "dir":
pyd.copy_from(p, "/host/path", verify_integrity=True, exclude_integrity={"abc.py"})
else:
pyd.copy_from(p, "/host/path", verify_integrity=True)
if ptype == "dir":
mock_backend.return_value.copy_dir.assert_called_once_with(
p,
"/host/path",
consumer=mocker.ANY,
verify_integrity=True,
exclude_integrity={"abc.py"},
)
else:
mock_backend.return_value.pull_file.assert_called_once_with(
p, "/host/path", consumer=mocker.ANY, verify_integrity=True
)
def test_copy_to(self, mock_backend, path_type, mocker):
pyd = PyDevice(MOCK_PORT, backend=mock_backend)
ptype, p = path_type
if ptype == "dir":
with pytest.raises(RuntimeError):
pyd.copy_to("/host/path", p)
else:
pyd.copy_to("/host/path/f.txt", p)
mock_backend.return_value.push_file.assert_called_once_with(
"/host/path/f.txt", p, consumer=mocker.ANY
)
def test_remove(self, mock_backend):
pyd = PyDevice(MOCK_PORT, backend=mock_backend)
pyd.remove(DevicePath("/some/path"))
mock_backend.return_value.remove.assert_called_once_with("/some/path")
class TestConsumers:
@pytest.mark.parametrize(
"on_desc,expected_tqdm",
[
[None, dict(total=5, unit="B", unit_scale=True, unit_divisor=1024, bar_format=ANY)],
[
lambda n, cfg: ("other", dict(override=True)),
dict(
total=5,
unit="B",
unit_scale=True,
unit_divisor=1024,
bar_format=ANY,
override=True,
),
],
],
)
def test_progress_consumer(self, mocker: MockFixture, on_desc, expected_tqdm):
tqdm_mock = mocker.patch.object(consumers, "tqdm")
cons = consumers.ProgressStreamConsumer(on_desc)
cons.on_start(name="abc", size=5)
tqdm_mock.assert_called_once_with(**expected_tqdm)
cons.on_update(size=1)
tqdm_mock.return_value.update.assert_called_once_with(1)
cons.on_end()
tqdm_mock.return_value.close.assert_called_once()
def test_delegate(self):
delegate = consumers.ConsumerDelegate()
assert delegate.on_message("") is None
delegate = consumers.ConsumerDelegate(consumers.MessageHandlers(on_message=lambda m: m))
assert delegate.on_message("a") == "a"
@pytest.mark.skipif(
IS_WIN_PY310, reason="skipping due to rshell/pyreadline broken for >=py310 on windows."
)
def test_rsh_consumer(self):
calls = []
message_cons = consumers.MessageHandlers(on_message=lambda m: calls.append(m))
rsh_cons = backend_rshell.RShellConsumer(message_cons.on_message)
chars = iter(list("a line.\nnext line.\n"))
for i in chars:
rsh_cons.on_message(i.encode())
assert len(calls) == 2
assert calls == ["a line.", "next line."]
================================================
FILE: tests/test_stub_source.py
================================================
from micropy.stubs import source
from tests.test_stubs_repo import stub_repo # noqa
def test_stub_info_spec_locator(shared_datadir):
test_path = shared_datadir / "esp8266_test_stub"
assert source.StubInfoSpecLocator().prepare(test_path) == test_path.absolute()
def test_stub_info_spec_locator__returns_location_on_fail(tmp_path):
assert source.StubInfoSpecLocator().prepare(tmp_path) == tmp_path
def test_source_ready(shared_datadir, test_urls, tmp_path, mocker, test_archive):
"""should prepare and resolve stub"""
# Test LocalStub ready
test_path = shared_datadir / "esp8266_test_stub"
local_stub = source.get_source(test_path)
expected_path = local_stub.location.resolve()
with local_stub.ready() as source_path:
assert source_path == expected_path
# Setup RemoteStub
test_parent = tmp_path / "tmpdir"
test_parent.mkdir()
expected_path = (test_parent / "archive_test_stub").resolve()
mocker.patch.object(source.tempfile, "mkdtemp", return_value=test_parent)
mocker.patch.object(source.utils, "stream_download", return_value=test_archive)
# Test Remote Stub
remote_stub = source.get_source(test_urls["download"])
with remote_stub.ready() as source_path:
print(list(source_path.parent.iterdir()))
assert (source_path / "info.json").exists()
assert len(list(source_path.iterdir())) == 3
def test_stub_repo_locator(stub_repo): # noqa
locator = source.RepoStubLocator(stub_repo)
assert locator.prepare("stub1-foo") == "https://test-manifest/stub1-foo"
================================================
FILE: tests/test_stubs/bad_test_stub/modules.json
================================================
[
{
"nodename": "esp32",
"release": "1.10.0"
},
{
"pathtofile": "/foobar/foo/bar.py",
"something": "bar"
}
]
================================================
FILE: tests/test_stubs/esp32_test_stub/frozen/ntptime.py
================================================
try:
import usocket as socket
except:
import socket
try:
import ustruct as struct
except:
import struct
# (date(2000, 1, 1) - date(1900, 1, 1)).days * 24*60*60
NTP_DELTA = 3155673600
host = "pool.ntp.org"
def time():
NTP_QUERY = bytearray(48)
NTP_QUERY[0] = 0x1B
addr = socket.getaddrinfo(host, 123)[0][-1]
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.settimeout(1)
res = s.sendto(NTP_QUERY, addr)
msg = s.recv(48)
s.close()
val = struct.unpack("!I", msg[40:44])[0]
return val - NTP_DELTA
# There's currently no timezone support in MicroPython, so
# utime.localtime() will return UTC time (as if it was .gmtime())
def settime():
t = time()
import machine
import utime
tm = utime.localtime(t)
tm = tm[0:3] + (0,) + tm[3:6] + (0,)
machine.RTC().datetime(tm)
print(utime.localtime())
================================================
FILE: tests/test_stubs/esp32_test_stub/frozen/ntptime.pyi
================================================
# make_stub_files: Fri 21 Jun 2019 at 00:44:00
from typing import Any, Dict, Optional, Sequence, Tuple, Union
Node = Any
def time() -> Any: ...
# 0: return val-NTP_DELTA
# ? 0: return val-NTP_DELTA
def settime() -> None: ...
================================================
FILE: tests/test_stubs/esp32_test_stub/info.json
================================================
{
"firmware": {
"machine": "ESP32 module with ESP32",
"firmware": "esp32 1.11.0",
"nodename": "esp8266",
"version": "1.11.0",
"release": "1.11.0",
"sysname": "esp32",
"name": "micropython"
},
"stubber": { "version": "1.2.0" },
"modules": [
{
"file": "/stubs/esp32_1_11_0/umqtt/robust.py",
"module": "umqtt.robust"
},
{
"file": "/stubs/esp32_1_11_0/umqtt/simple.py",
"module": "umqtt.simple"
}
]
}
================================================
FILE: tests/test_stubs/esp32_test_stub/stubs/machine.py
================================================
"""
Module: 'machine' on esp8266 v1.9.4
"""
# MCU: (sysname='esp8266', nodename='esp8266', release='2.2.0-dev(9422289)', version='v1.9.4-8-ga9a3caad0 on 2018-05-11', machine='ESP module with ESP8266')
# Stubber: 1.1.2
class ADC:
""""""
def read():
pass
DEEPSLEEP = 4
DEEPSLEEP_RESET = 5
HARD_RESET = 6
class I2C:
""""""
================================================
FILE: tests/test_stubs/esp32_test_stub/stubs/modules.json
================================================
[
{
"nodename": "esp8266",
"release": "2.2.0-dev(9422289)",
"version": "v1.9.4-8-ga9a3caad0 on 2018-05-11",
"machine": "ESP module with ESP8266",
"sysname": "esp8266"
},
{ "stubber": "1.1.2" },
{
"file": "/stubs/esp8266_v1_9_4/uasyncio/core.py",
"module": "uasyncio.core"
},
{ "file": "/stubs/esp8266_v1_9_4/umqtt/robust.py", "module": "umqtt.robust" },
{ "file": "/stubs/esp8266_v1_9_4/umqtt/simple.py", "module": "umqtt.simple" },
{
"file": "/stubs/esp8266_v1_9_4/urllib/urequest.py",
"module": "urllib.urequest"
},
{ "file": "/stubs/esp8266_v1_9_4/upip.py", "module": "upip" },
{ "file": "/stubs/esp8266_v1_9_4/_boot.py", "module": "_boot" },
{ "file": "/stubs/esp8266_v1_9_4/_onewire.py", "module": "_onewire" },
{ "file": "/stubs/esp8266_v1_9_4/apa102.py", "module": "apa102" },
{ "file": "/stubs/esp8266_v1_9_4/array.py", "module": "array" },
{ "file": "/stubs/esp8266_v1_9_4/btree.py", "module": "btree" },
{ "file": "/stubs/esp8266_v1_9_4/dht.py", "module": "dht" },
{ "file": "/stubs/esp8266_v1_9_4/ds18x20.py", "module": "ds18x20" },
{ "file": "/stubs/esp8266_v1_9_4/errno.py", "module": "errno" },
{ "file": "/stubs/esp8266_v1_9_4/esp.py", "module": "esp" },
{
"file": "/stubs/esp8266_v1_9_4/example_pub_button.py",
"module": "example_pub_button"
},
{
"file": "/stubs/esp8266_v1_9_4/example_sub_led.py",
"module": "example_sub_led"
},
{ "file": "/stubs/esp8266_v1_9_4/flashbdev.py", "module": "flashbdev" },
{ "file": "/stubs/esp8266_v1_9_4/framebuf.py", "module": "framebuf" },
{ "file": "/stubs/esp8266_v1_9_4/gc.py", "module": "gc" },
{ "file": "/stubs/esp8266_v1_9_4/inisetup.py", "module": "inisetup" },
{ "file": "/stubs/esp8266_v1_9_4/json.py", "module": "json" },
{ "file": "/stubs/esp8266_v1_9_4/math.py", "module": "math" },
{ "file": "/stubs/esp8266_v1_9_4/micropython.py", "module": "micropython" },
{ "file": "/stubs/esp8266_v1_9_4/neopixel.py", "module": "neopixel" },
{ "file": "/stubs/esp8266_v1_9_4/network.py", "module": "network" },
{ "file": "/stubs/esp8266_v1_9_4/ntptime.py", "module": "ntptime" },
{ "file": "/stubs/esp8266_v1_9_4/onewire.py", "module": "onewire" },
{ "file": "/stubs/esp8266_v1_9_4/select.py", "module": "select" },
{ "file": "/stubs/esp8266_v1_9_4/sys.py", "module": "sys" },
{ "file": "/stubs/esp8266_v1_9_4/time.py", "module": "time" },
{ "file": "/stubs/esp8266_v1_9_4/ubinascii.py", "module": "ubinascii" },
{ "file": "/stubs/esp8266_v1_9_4/uhashlib.py", "module": "uhashlib" },
{ "file": "/stubs/esp8266_v1_9_4/uheapq.py", "module": "uheapq" },
{ "file": "/stubs/esp8266_v1_9_4/ujson.py", "module": "ujson" },
{ "file": "/stubs/esp8266_v1_9_4/urandom.py", "module": "urandom" },
{ "file": "/stubs/esp8266_v1_9_4/ure.py", "module": "ure" },
{ "file": "/stubs/esp8266_v1_9_4/uselect.py", "module": "uselect" },
{ "file": "/stubs/esp8266_v1_9_4/ussl.py", "module": "ussl" },
{ "file": "/stubs/esp8266_v1_9_4/ustruct.py", "module": "ustruct" },
{ "file": "/stubs/esp8266_v1_9_4/utime.py", "module": "utime" },
{ "file": "/stubs/esp8266_v1_9_4/utimeq.py", "module": "utimeq" },
{ "file": "/stubs/esp8266_v1_9_4/uzlib.py", "module": "uzlib" },
{
"file": "/stubs/esp8266_v1_9_4/websocket_helper.py",
"module": "websocket_helper"
}
]
================================================
FILE: tests/test_stubs/esp8266_test_stub/frozen/ntptime.py
================================================
try:
import usocket as socket
except:
import socket
try:
import ustruct as struct
except:
import struct
# (date(2000, 1, 1) - date(1900, 1, 1)).days * 24*60*60
NTP_DELTA = 3155673600
host = "pool.ntp.org"
def time():
NTP_QUERY = bytearray(48)
NTP_QUERY[0] = 0x1B
addr = socket.getaddrinfo(host, 123)[0][-1]
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.settimeout(1)
res = s.sendto(NTP_QUERY, addr)
msg = s.recv(48)
s.close()
val = struct.unpack("!I", msg[40:44])[0]
return val - NTP_DELTA
# There's currently no timezone support in MicroPython, so
# utime.localtime() will return UTC time (as if it was .gmtime())
def settime():
t = time()
import machine
import utime
tm = utime.localtime(t)
tm = tm[0:3] + (0,) + tm[3:6] + (0,)
machine.RTC().datetime(tm)
print(utime.localtime())
================================================
FILE: tests/test_stubs/esp8266_test_stub/frozen/ntptime.pyi
================================================
# make_stub_files: Fri 21 Jun 2019 at 00:44:00
from typing import Any, Dict, Optional, Sequence, Tuple, Union
Node = Any
def time() -> Any: ...
# 0: return val-NTP_DELTA
# ? 0: return val-NTP_DELTA
def settime() -> None: ...
================================================
FILE: tests/test_stubs/esp8266_test_stub/info.json
================================================
{
"firmware": {
"machine": "ESP module with ESP8266",
"firmware": "esp8266 v1.9.4",
"nodename": "esp8266",
"version": "1.9.4",
"release": "2.2.0-dev(9422289)",
"sysname": "esp8266",
"name": "micropython"
},
"stubber": { "version": "1.2.0" },
"modules": [
{
"file": "/stubs/esp8266_1_9_4/umqtt/robust.py",
"module": "umqtt.robust"
},
{
"file": "/stubs/esp8266_1_9_4/umqtt/simple.py",
"module": "umqtt.simple"
}
]
}
================================================
FILE: tests/test_stubs/esp8266_test_stub/stubs/machine.py
================================================
"""
Module: 'machine' on esp8266 v1.9.4
"""
# MCU: (sysname='esp8266', nodename='esp8266', release='2.2.0-dev(9422289)', version='v1.9.4-8-ga9a3caad0 on 2018-05-11', machine='ESP module with ESP8266')
# Stubber: 1.1.2
class ADC:
""""""
def read():
pass
DEEPSLEEP = 4
DEEPSLEEP_RESET = 5
HARD_RESET = 6
class I2C:
""""""
================================================
FILE: tests/test_stubs/esp8266_test_stub/stubs/modules.json
================================================
[
{
"nodename": "esp8266",
"release": "2.2.0-dev(9422289)",
"version": "v1.9.4-8-ga9a3caad0 on 2018-05-11",
"machine": "ESP module with ESP8266",
"sysname": "esp8266"
},
{ "stubber": "1.1.2" },
{
"file": "/stubs/esp8266_v1_9_4/uasyncio/core.py",
"module": "uasyncio.core"
},
{ "file": "/stubs/esp8266_v1_9_4/umqtt/robust.py", "module": "umqtt.robust" },
{ "file": "/stubs/esp8266_v1_9_4/umqtt/simple.py", "module": "umqtt.simple" },
{
"file": "/stubs/esp8266_v1_9_4/urllib/urequest.py",
"module": "urllib.urequest"
},
{ "file": "/stubs/esp8266_v1_9_4/upip.py", "module": "upip" },
{ "file": "/stubs/esp8266_v1_9_4/_boot.py", "module": "_boot" },
{ "file": "/stubs/esp8266_v1_9_4/_onewire.py", "module": "_onewire" },
{ "file": "/stubs/esp8266_v1_9_4/apa102.py", "module": "apa102" },
{ "file": "/stubs/esp8266_v1_9_4/array.py", "module": "array" },
{ "file": "/stubs/esp8266_v1_9_4/btree.py", "module": "btree" },
{ "file": "/stubs/esp8266_v1_9_4/dht.py", "module": "dht" },
{ "file": "/stubs/esp8266_v1_9_4/ds18x20.py", "module": "ds18x20" },
{ "file": "/stubs/esp8266_v1_9_4/errno.py", "module": "errno" },
{ "file": "/stubs/esp8266_v1_9_4/esp.py", "module": "esp" },
{
"file": "/stubs/esp8266_v1_9_4/example_pub_button.py",
"module": "example_pub_button"
},
{
"file": "/stubs/esp8266_v1_9_4/example_sub_led.py",
"module": "example_sub_led"
},
{ "file": "/stubs/esp8266_v1_9_4/flashbdev.py", "module": "flashbdev" },
{ "file": "/stubs/esp8266_v1_9_4/framebuf.py", "module": "framebuf" },
{ "file": "/stubs/esp8266_v1_9_4/gc.py", "module": "gc" },
{ "file": "/stubs/esp8266_v1_9_4/inisetup.py", "module": "inisetup" },
{ "file": "/stubs/esp8266_v1_9_4/json.py", "module": "json" },
{ "file": "/stubs/esp8266_v1_9_4/math.py", "module": "math" },
{ "file": "/stubs/esp8266_v1_9_4/micropython.py", "module": "micropython" },
{ "file": "/stubs/esp8266_v1_9_4/neopixel.py", "module": "neopixel" },
{ "file": "/stubs/esp8266_v1_9_4/network.py", "module": "network" },
{ "file": "/stubs/esp8266_v1_9_4/ntptime.py", "module": "ntptime" },
{ "file": "/stubs/esp8266_v1_9_4/onewire.py", "module": "onewire" },
{ "file": "/stubs/esp8266_v1_9_4/select.py", "module": "select" },
{ "file": "/stubs/esp8266_v1_9_4/sys.py", "module": "sys" },
{ "file": "/stubs/esp8266_v1_9_4/time.py", "module": "time" },
{ "file": "/stubs/esp8266_v1_9_4/ubinascii.py", "module": "ubinascii" },
{ "file": "/stubs/esp8266_v1_9_4/uhashlib.py", "module": "uhashlib" },
{ "file": "/stubs/esp8266_v1_9_4/uheapq.py", "module": "uheapq" },
{ "file": "/stubs/esp8266_v1_9_4/ujson.py", "module": "ujson" },
{ "file": "/stubs/esp8266_v1_9_4/urandom.py", "module": "urandom" },
{ "file": "/stubs/esp8266_v1_9_4/ure.py", "module": "ure" },
{ "file": "/stubs/esp8266_v1_9_4/uselect.py", "module": "uselect" },
{ "file": "/stubs/esp8266_v1_9_4/ussl.py", "module": "ussl" },
{ "file": "/stubs/esp8266_v1_9_4/ustruct.py", "module": "ustruct" },
{ "file": "/stubs/esp8266_v1_9_4/utime.py", "module": "utime" },
{ "file": "/stubs/esp8266_v1_9_4/utimeq.py", "module": "utimeq" },
{ "file": "/stubs/esp8266_v1_9_4/uzlib.py", "module": "uzlib" },
{
"file": "/stubs/esp8266_v1_9_4/websocket_helper.py",
"module": "websocket_helper"
}
]
================================================
FILE: tests/test_stubs.py
================================================
import shutil
from pathlib import Path
import pytest
from micropy import exceptions, stubs
@pytest.fixture
def mock_fware(mocker, shared_datadir):
fware_stub = shared_datadir / "fware_test_stub"
mock_remote = mocker.patch.object(stubs.source.RemoteStubLocator, "prepare").return_value
mock_remote.__enter__.return_value = fware_stub
def test_stub_validation(shared_datadir):
"""should pass validation"""
stub_path = shared_datadir / "esp8266_test_stub"
manager = stubs.StubManager()
manager.validate(stub_path)
assert manager.is_valid(stub_path)
assert not manager.is_valid(Path("/foobar/bar"))
def test_bad_stub_validation(shared_datadir, mocker):
"""should fail validation"""
stub_path = shared_datadir / "esp8266_test_stub"
manager = stubs.StubManager()
mock_validate = mocker.patch.object(stubs.stubs.utils, "Validator")
mock_validate.return_value.validate.side_effect = [Exception, FileNotFoundError]
with pytest.raises(exceptions.StubValidationError):
manager.validate(stub_path)
with pytest.raises(exceptions.StubError):
manager.validate(Path("/foobar/foo"))
def test_bad_stub(tmp_path):
"""should raise exception on invalid stub"""
with pytest.raises(FileNotFoundError):
stubs.stubs.DeviceStub(tmp_path)
def test_valid_stub(shared_datadir):
"""should have all attributes"""
stub_path = shared_datadir / "esp8266_test_stub"
stub = stubs.stubs.DeviceStub(stub_path)
stub_2 = stubs.stubs.DeviceStub(stub_path)
fware = stubs.stubs.FirmwareStub(shared_datadir / "fware_test_stub")
assert stub == stub_2
expect_fware = {
"machine": "ESP module with ESP8266",
"firmware": "esp8266 v1.9.4",
"nodename": "esp8266",
"version": "1.9.4",
"release": "2.2.0-dev(9422289)",
"sysname": "esp8266",
"name": "micropython",
}
expect_repr = (
"DeviceStub(sysname=esp8266, firmware=micropython," f" version=1.9.4, path={stub_path})"
)
assert stub.path.exists()
assert stub.stubs.exists()
assert stub.frozen.exists()
assert stub.version == "1.9.4"
assert stub.firm_info == expect_fware
assert repr(stub) == expect_repr
assert str(stub) == "esp8266-1.9.4"
assert stub.firmware_name == "micropython"
del stub.firm_info["name"]
assert stub.firmware_name == "esp8266 v1.9.4"
stub.firmware = fware
assert stub.firmware_name == "micropython"
assert str(stub) == "esp8266-micropython-1.9.4"
def test_valid_fware_stub(shared_datadir):
stub_path = shared_datadir / "fware_test_stub"
stub = stubs.stubs.FirmwareStub(stub_path)
assert str(stub) == "micropython"
assert stub.frozen.exists()
assert repr(stub) == ("FirmwareStub(firmware=micropython," " repo=micropython/micropython)")
def test_resolve_stub(shared_datadir):
"""should resolve correct stub type"""
device_stub = shared_datadir / "esp8266_test_stub"
fware_stub = shared_datadir / "fware_test_stub"
invalid_stub = shared_datadir / "esp8266_invalid_stub"
manager = stubs.StubManager()
stub_type = manager._get_stubtype(device_stub)
assert stub_type == stubs.stubs.DeviceStub
stub_type = manager._get_stubtype(fware_stub)
assert stub_type == stubs.stubs.FirmwareStub
with pytest.raises(exceptions.StubError):
manager._get_stubtype(Path("/foobar/foo"))
with pytest.raises(exceptions.StubValidationError):
manager._get_stubtype(invalid_stub)
def test_resolve_firmware(tmp_path, shared_datadir):
"""should resolve firmware"""
device_stub = shared_datadir / "esp8266_test_stub"
fware_stub_path = shared_datadir / "fware_test_stub"
manager = stubs.StubManager(resource=tmp_path)
fware_stub = manager.add(fware_stub_path)
dev_stub = stubs.stubs.DeviceStub(device_stub)
resolved = manager.resolve_firmware(dev_stub)
assert fware_stub == resolved
def test_add_single_stub(shared_datadir, tmp_path):
"""should add a single stub"""
stub_path = shared_datadir / "esp8266_test_stub"
manager = stubs.StubManager()
manager.add(stub_path, dest=tmp_path)
assert len(manager) == 1
assert stub_path.name in [d.name for d in tmp_path.iterdir()]
def test_add_stubs_from_dir(datadir, tmp_path):
"""should add all valid stubs in directory"""
manager = stubs.StubManager()
manager.add(datadir, dest=tmp_path)
assert len(manager) == 2
assert len(list(tmp_path.iterdir())) - 1 == len(manager)
assert manager._should_recurse(datadir)
with pytest.raises(exceptions.StubError):
empty_path = tmp_path / "empty"
empty_path.mkdir()
manager._should_recurse(empty_path)
def test_add_with_resource(datadir, mock_fware, tmp_path, mocker):
"""should not require dest kwarg"""
resource = tmp_path / "tmp_resource"
resource.mkdir()
load_spy = mocker.spy(stubs.StubManager, "_load")
manager = stubs.StubManager(resource=resource)
manager.add(datadir)
assert len(manager) == 2
assert "esp8266_test_stub" in [p.name for p in resource.iterdir()]
assert load_spy.call_count == 5
# Should not add any new stubs
assert manager.add(datadir)
assert load_spy.call_count == 5
# Should force load
assert manager.add(datadir, force=True)
assert load_spy.call_count == 10
def test_add_no_resource_no_dest(datadir, mock_fware):
"""should fail with typeerror"""
manager = stubs.StubManager()
with pytest.raises(TypeError):
manager.add(datadir)
def test_loads_from_resource(datadir, mock_fware):
"""should load from resource if provided"""
manager = stubs.StubManager(resource=datadir)
assert len(manager) == 2
def test_name_property(shared_datadir):
"""should raise error if name is not overridden"""
test_stub = shared_datadir / "esp8266_test_stub"
class ErrorStub(stubs.stubs.Stub):
def __init__(self, path, copy_to=None, **kwargs):
return super().__init__(path, copy_to=copy_to, **kwargs)
with pytest.raises(NotImplementedError):
x = ErrorStub(test_stub)
x.name
def test_stub_resolve_link(mock_mp_stubs, tmp_path):
"""should create DeviceStub from symlink"""
stub = list(mock_mp_stubs.stubs)[0]
link_path = tmp_path / "stub_symlink"
linked_stub = stubs.stubs.DeviceStub.resolve_link(stub, link_path)
assert stub == linked_stub
assert stub.path != linked_stub.path
assert linked_stub.path.is_symlink()
assert linked_stub.path.resolve() == stub.path
def test_manager_resolve_subresource(mock_mp_stubs, tmp_path):
"""should create StubManager from subresource symlinks"""
test_stubs = list(mock_mp_stubs.stubs)[:2]
subresource = tmp_path / "stub_subresource"
subresource.mkdir()
manager = mock_mp_stubs.stubs.resolve_subresource(test_stubs, subresource)
linked_stub = list(manager)[0]
assert linked_stub.path.is_symlink()
assert linked_stub in list(mock_mp_stubs.stubs)
def test_load_firmware_first(mocker, tmp_path, shared_datadir):
"""should always load firmware first"""
mock_manager = mocker.patch.object(stubs.StubManager, "_load")
mock_iterdir = mocker.patch.object(stubs.stubs.Path, "iterdir")
# mock_mgr = mock_manager.return_value
tmp_path = tmp_path / "fware_first_test"
tmp_resource = tmp_path / "tmp_resource"
tmp_resource.mkdir(parents=True)
test_stub = shared_datadir / "esp32_test_stub"
test_fware = shared_datadir / "fware_test_stub"
# Ensure Firmware loads first, regardless of how Path.iterdir() orders it
shutil.copytree(test_stub, (tmp_path / "99_esp32_test_stub"))
shutil.copytree(test_fware, (tmp_path / "00_fware_test_stub"))
mock_iterdir.return_value = (test_stub, test_fware)
manager = stubs.StubManager(resource=tmp_resource)
manager.load_from(tmp_path)
# Get First call args
fargs, _ = mock_manager.call_args_list[0]
assert fargs[0].location == test_fware
def test_iter_by_firm_stubs(mocker):
"""should iter stubs by firmware"""
firm_stub = mocker.MagicMock()
dev_stub = mocker.MagicMock()
dev_stub.firmware = firm_stub
unk_stub = mocker.MagicMock()
unk_stub.firmware = None
manager = stubs.StubManager()
manager._loaded = {firm_stub, dev_stub, unk_stub}
manager._firmware = {firm_stub}
stub_iter = list(manager.iter_by_firmware())
assert stub_iter == [(firm_stub, [dev_stub]), ("Unknown", [unk_stub])]
================================================
FILE: tests/test_stubs_repo.py
================================================
import pytest
from micropy.stubs import RepositoryInfo, StubPackage, StubRepository, StubsManifest
class ManifestStub(StubsManifest[StubPackage]):
def resolve_package_url(self, package: StubPackage) -> str:
return f"https://test-manifest/{package.name}"
Test1Manifest = ManifestStub(
repository=RepositoryInfo(
name="Test", display_name="Test Display", source="https://test-manifest.com"
),
packages=frozenset(
[
StubPackage(name="stub1-foo", version="1.0.0"),
StubPackage(name="stub1-foo", version="1.1.0"),
StubPackage(name="stub1-foo", version="2.0.0"),
StubPackage(name="stub2-bar", version="2.0.0"),
]
),
)
Test2Manifest = ManifestStub(
repository=RepositoryInfo(
name="Test2", display_name="Test Display2", source="https://test2-manifest.com"
),
packages=frozenset(
[
StubPackage(name="stub3-thing", version="3.0.0"),
StubPackage(name="stub3-thing", version="3.1.0"),
StubPackage(name="stub4-device", version="4.0.0"),
]
),
)
@pytest.fixture
def stub_repo():
repo = StubRepository(manifests=[Test1Manifest, Test2Manifest])
return repo
def test_repo_inits(stub_repo):
assert len(stub_repo.manifests) == 2
assert len(list(stub_repo.packages)) == 7
@pytest.mark.parametrize(
"query,expect_name,include_versions",
[
("stub1", ["Test/stub1-foo-2.0.0"], False),
("DEvICE", ["Test2/stub4-device-4.0.0"], False),
(
"stub",
[
"Test/stub1-foo-2.0.0",
"Test/stub2-bar-2.0.0",
"Test2/stub3-thing-3.1.0",
"Test2/stub4-device-4.0.0",
],
False,
),
(
"stub",
[
"Test/stub1-foo-1.0.0",
"Test/stub1-foo-1.1.0",
"Test/stub1-foo-2.0.0",
"Test/stub2-bar-2.0.0",
"Test2/stub3-thing-3.0.0",
"Test2/stub3-thing-3.1.0",
"Test2/stub4-device-4.0.0",
],
True,
),
],
)
def test_repo_search(stub_repo, query, expect_name, include_versions):
results = stub_repo.search(query, include_versions=include_versions)
names = [i.absolute_versioned_name for i in results]
print(names)
assert sorted(names) == sorted(expect_name)
================================================
FILE: tests/test_template.py
================================================
import json
from pathlib import Path
import pylint.lint
import pytest
from micropy.project.template import Template, TemplateProvider
@pytest.fixture
def stub_context(mock_mp_stubs):
stubs = list(mock_mp_stubs.stubs)[:3]
stub_paths = [stub.stubs for stub in stubs]
frozen_paths = [stub.frozen for stub in stubs]
fware_paths = [stub.firmware.frozen for stub in stubs]
ctx_paths = [*stub_paths, *frozen_paths, *fware_paths]
return (stubs, (stub_paths, frozen_paths, fware_paths), ctx_paths)
def test_vscode_template(stub_context, shared_datadir, tmp_path, mock_checks):
stubs, paths, ctx_paths = stub_context
prov = TemplateProvider(["vscode"])
ctx_datadir = tmp_path / "ctx_cata"
ctx_datadir.mkdir(exist_ok=True)
# Add test local path
ctx_local = ctx_datadir / "src" / "lib" / "somelib"
ctx_local.mkdir(parents=True)
ctx_absolute = Path("/fakedir/notinprojectdir/somelib")
ctx_local_paths = [ctx_local, ctx_absolute]
prov.render_to(
"vscode",
tmp_path,
stubs=stubs,
paths=ctx_paths,
datadir=ctx_datadir,
local_paths=ctx_local_paths,
)
expected_path = tmp_path / ".vscode" / "settings.json"
out_content = expected_path.read_text()
print(out_content)
# Get rid of comments
with expected_path.open() as f:
lines = [line.strip() for line in f.readlines() if line]
valid = [line for line in lines if "//" not in line[:2]]
# Valid JSON?
expect_paths = [str(p.relative_to(tmp_path)) for p in ctx_paths]
expect_paths.append(str(ctx_local.relative_to(tmp_path))) # add local path (should be relative)
# local path outside of project dir (must be absolute)
expect_paths.append(str(ctx_absolute.absolute()))
content = json.loads("\n".join(valid))
assert sorted(expect_paths) == sorted(content["python.autoComplete.extraPaths"])
assert expected_path.exists()
# Test Update
ctx_paths.append(tmp_path / "foobar" / "foo.py")
prov.update(
"vscode",
tmp_path,
stubs=stubs,
paths=ctx_paths,
datadir=ctx_datadir,
local_paths=ctx_local_paths,
)
content = json.loads(expected_path.read_text())
expect_paths.append(str((tmp_path / "foobar" / "foo.py").relative_to(tmp_path)))
assert sorted(expect_paths) == sorted(content["python.autoComplete.extraPaths"])
# Test update with missing file
expected_path.unlink() # delete file
prov.update("vscode", tmp_path, stubs=stubs, paths=ctx_paths, datadir=ctx_datadir)
assert expected_path.exists()
def test_pylint_template(stub_context, tmp_path):
def test_pylint_load():
try:
lint_args = ["--rcfile", str(expected_path.absolute())]
pylint.lint.Run(lint_args)
except SyntaxError:
pytest.fail(str(SyntaxError))
except: # noqa
pass
stubs, paths, ctx_paths = stub_context
ctx_datadir = tmp_path / "ctx_cata"
ctx_datadir.mkdir(exist_ok=True)
prov = TemplateProvider(["pylint"])
prov.render_to("pylint", tmp_path, stubs=stubs, paths=ctx_paths, datadir=ctx_datadir)
expected_path = tmp_path / ".pylintrc"
assert expected_path.exists()
# Will Pylint load it?
test_pylint_load()
# Test Update
new_path = tmp_path / ".micropy" / "foobar" / "foo"
ctx_paths.append(new_path)
prov.update("pylint", tmp_path, stubs=stubs, paths=ctx_paths, datadir=ctx_datadir)
init_hook = expected_path.read_text().splitlines(True)[2]
hook_imports = init_hook.split(",")
hook_path = str(Path(".micropy/foobar/foo")).replace(
"\\", "/"
) # no need to use \\ on pylint Windows
assert f' "{hook_path}"' in hook_imports
test_pylint_load()
def test_generic_template(mock_mp_stubs, tmp_path):
prov = TemplateProvider(["bootstrap", "pymakr"])
prov.render_to("boot", tmp_path)
expected_path = tmp_path / "src" / "boot.py"
assert expected_path.exists()
expected_content = (prov.TEMPLATE_DIR / "src" / "boot.py").read_text()
out_content = expected_path.read_text()
print(out_content)
assert expected_content.strip() == out_content.strip()
templ = prov.get("boot")
assert templ.update(tmp_path) is None
def test_no_context():
class BadTemplate(Template):
def __init__(self, template, **kwargs):
return super().__init__(template, **kwargs)
with pytest.raises(NotImplementedError):
x = BadTemplate("abc")
print(x.context)
================================================
FILE: tests/test_utils/fail.json
================================================
[
{
"nodename": "esp32",
"release": "1.10.0"
},
{
"pathtofile": "/foobar/foo/bar.py",
"something": "bar"
}
]
================================================
FILE: tests/test_utils/pass.json
================================================
[
{
"nodename": "esp32",
"release": "1.10.0",
"version": "'v1.10-247-g0fb15fc3f on 2019-03-29",
"machine": "ESP32 module with ESP32",
"sysname": "esp32"
},
{
"stubber": "1.1.2"
},
{
"file": "/foobar/foo/bar.py",
"module": "bar"
},
{
"file": "/barfoo/bar/foo.py",
"module": "foo"
}
]
================================================
FILE: tests/test_utils/schema.json
================================================
{
"type": "array",
"items": {
"oneOf": [
{
"type": "object",
"required": [
"machine",
"nodename",
"release",
"sysname",
"version"
],
"properties": {
"machine": { "type": "string" },
"nodename": { "type": "string" },
"release": { "type": "string" },
"sysname": { "type": "string" },
"version": { "type": "string" }
}
},
{
"type": "object",
"required": ["stubber"],
"properties": { "stubber": { "type": "string" } }
},
{
"type": "object",
"required": ["file", "module"],
"properties": {
"file": { "type": "string" },
"module": { "type": "string" }
}
}
]
}
}
================================================
FILE: tests/test_utils.py
================================================
import io
import sys
import pytest
import requests
from jsonschema import ValidationError
from micropy import utils
from requests.exceptions import ConnectionError, HTTPError, InvalidURL
@pytest.fixture
def schema(datadir):
file = datadir / "schema.json"
pass_file = datadir / "pass.json"
fail_file = datadir / "fail.json"
return (file, pass_file, fail_file)
def test_validate(schema):
"""Test for successful validation"""
schema, pass_file, _ = schema
val = utils.Validator(schema_path=schema)
val.validate(pass_file)
def test_fail_validate(schema):
"""Test for invalid file"""
schema, _, fail_file = schema
val = utils.Validator(schema_path=schema)
with pytest.raises(ValidationError):
val.validate(fail_file)
def test_is_url(test_urls):
"""should respond true/false for url"""
u = test_urls
assert utils.is_url(u["valid"])
assert utils.is_url(u["valid_https"])
assert not utils.is_url(u["invalid"])
assert not utils.is_url(u["invalid_file"])
def test_ensure_valid_url(mocker, test_urls):
"""should ensure url is valid"""
u = test_urls
with pytest.raises(InvalidURL):
utils.ensure_valid_url(test_urls["invalid"])
with pytest.raises(ConnectionError):
mocker.patch.object(utils, "is_url", return_value=True)
mock_head = mocker.patch.object(requests, "head")
mock_head.side_effect = [ConnectionError]
utils.ensure_valid_url(u["valid"])
mocker.stopall()
with pytest.raises(HTTPError):
utils.ensure_valid_url(u["bad_resp"])
result = utils.ensure_valid_url(u["valid"])
assert result == u["valid"]
def test_ensure_existing_dir(tmp_path):
"""should ensure dir exists"""
not_exist = tmp_path / "i_dont_exist"
file = tmp_path / "file.txt"
file.touch()
with pytest.raises(NotADirectoryError):
utils.ensure_existing_dir(not_exist)
with pytest.raises(NotADirectoryError):
utils.ensure_existing_dir(file)
result = utils.ensure_existing_dir(str(tmp_path))
assert result == tmp_path
assert result.exists()
assert result.is_dir()
def test_is_downloadable(mocker, test_urls):
"""should check if url can be downloaded from"""
u = test_urls
uheaders = u["headers"]
mock_head = mocker.patch.object(requests, "head")
head_mock_val = mocker.PropertyMock(
side_effect=[uheaders["not_download"], uheaders["can_download"]]
)
type(mock_head.return_value).headers = head_mock_val
assert not utils.is_downloadable(u["valid"])
assert not utils.is_downloadable("not-a-real-url")
assert utils.is_downloadable(u["valid"])
def test_get_url_filename(test_urls):
"""should return filename"""
filename = "archive_test_stub.tar.gz"
result = utils.get_url_filename(test_urls["download"])
assert result == filename
def test_is_existing_dir(tmp_path):
bad_path = tmp_path / "not-real-path"
is_file = tmp_path / "file.txt"
is_file.touch()
assert not utils.is_existing_dir(bad_path)
assert not utils.is_existing_dir(is_file)
assert utils.is_existing_dir(tmp_path)
def test_search_xml(mocker, shared_datadir, test_urls):
u = test_urls
test_xml = shared_datadir / "test_source.xml"
mock_get = mocker.patch.object(requests, "get")
with test_xml.open("rb") as f:
type(mock_get.return_value).content = f.read()
results = utils.search_xml(u["valid"], "Key", ignore_cache=True)
assert sorted(results) == sorted(
["packages/esp32-micropython-1.10.0.tar.gz", "packages/esp32-micropython-1.11.0.tar.gz"]
)
@pytest.mark.xfail(sys.version_info >= (3, 8), reason="requires python >= 3.8")
def test_generate_stub__py37(tmp_path):
with pytest.raises(ImportError):
expect_path = tmp_path / "foo.py"
expect_path.touch()
utils.generate_stub(expect_path)
@pytest.mark.xfail(sys.version_info >= (3, 8), reason="requires python >= 3.8")
def test_prepare_create_stubs__py37():
with pytest.raises(ImportError):
utils.stub.prepare_create_stubs()
@pytest.mark.xfail(sys.version_info < (3, 8), reason="requires python >= 3.8", raises=ImportError)
def test_prepare_create_stubs():
create_stubs = utils.stub.prepare_create_stubs()
assert isinstance(create_stubs, io.StringIO)
assert len(create_stubs.getvalue()) > 1
def test_generate_stub(shared_datadir, tmp_path, mocker):
mock_stubber = mocker.patch.object(utils.stub, "stubmaker")
expect_path = tmp_path / "foo.py"
expect_path.touch()
result = utils.generate_stub(expect_path)
mock_stubber.generate_pyi_from_file.assert_called_once()
assert result == (expect_path, expect_path.with_suffix(".pyi"))
# Test print monkeypatch
print_mock = mocker.Mock(return_value=None)
utils.generate_stub(expect_path, log_func=print_mock)
def test_get_package_meta(mocker, requests_mock):
"""should get package meta"""
mock_data = {
"releases": {
"0.0.0": [{"url": "early-version.tar.gz"}],
"0.1.0": [
{
"url": "do-not-return-me",
},
{"url": "return-me.tar.gz"},
],
}
}
requests_mock.get("https://pypi.org/pypi/foobar/json", json=mock_data)
result = utils.get_package_meta("foobar", "https://pypi.org/pypi/foobar/json")
assert result == {"url": "return-me.tar.gz"}
result = utils.get_package_meta("foobar==0.0.0", "https://pypi.org/pypi/foobar/json")
assert result == {"url": "early-version.tar.gz"}
def test_extract_tarbytes(mocker):
"""should extract tar file from memory"""
test_bytes = bytearray("foobar", "utf-8")
mock_io = mocker.patch.object(utils.helpers.io, "BytesIO")
mock_io.return_value = io.BytesIO(test_bytes)
mock_tarfile = mocker.patch.object(utils.helpers, "tarfile")
mock_tar = mock_tarfile.open.return_value.__enter__.return_value
utils.extract_tarbytes(test_bytes, "foobar")
mock_tarfile.open.assert_called_once_with(fileobj=io.BytesIO(test_bytes), mode="r:gz")
mock_tar.extractall.assert_called_once_with("foobar", mocker.ANY, numeric_owner=mocker.ANY)
def test_iter_requirements(mocker, tmp_path):
"""should iter requirements"""
tmp_file = tmp_path / "tmp_reqs.txt"
tmp_file.touch()
tmp_file.write_text("micropy-cli==1.0.0")
result = next(utils.iter_requirements(tmp_file))
assert result.name == "micropy-cli"
assert result.specs == [("==", "1.0.0")]
def test_create_dir_link(mocker, tmp_path):
"""Should create a symlink or directory junction if needed"""
targ_path = tmp_path / "target_dir"
targ_path.mkdir()
link_path = tmp_path / "link_path"
mock_sys = mocker.patch.object(utils.helpers, "sys")
mock_platform = type(mock_sys).platform = mocker.PropertyMock()
mock_subproc = mocker.patch.object(utils.helpers, "subproc")
mock_path = mocker.patch.object(utils.helpers, "Path").return_value
mock_path.symlink_to.side_effect = [mocker.ANY, OSError, OSError, OSError]
mock_platform.return_value = "linux"
# Test POSIX (should not raise exception)
utils.create_dir_link(link_path, targ_path)
mock_path.symlink_to.assert_called_once()
assert mock_subproc.call_count == 0
# Test POSIX failed for unknown reason
with pytest.raises(OSError):
utils.create_dir_link(link_path, targ_path)
# Test Windows (should try to make symlink, fallback on DJ)
mock_platform.return_value = "win32"
mock_subproc.call.return_value = 0
utils.create_dir_link(link_path, targ_path)
assert mock_subproc.call.call_count == 1
# Test Windows fails for some reason
mock_subproc.call.return_value = 1
with pytest.raises(OSError):
utils.create_dir_link(link_path, targ_path)
def test_is_dir_link(mocker, tmp_path):
"""Should test if a path is a symlink or directory junction"""
link_path = tmp_path / "link"
targ_path = tmp_path / "target"
mock_sys = mocker.patch.object(utils.helpers, "sys")
mock_platform = type(mock_sys).platform = mocker.PropertyMock()
mock_path = mocker.patch.object(utils.helpers, "Path").return_value
mock_path.is_symlink.side_effect = [True, False, False, False]
# Test Symlink (POSIX)
mock_platform.return_value = "linux"
assert utils.is_dir_link(link_path)
assert not utils.is_dir_link(link_path)
# Test Directory Junction (Windows)
mock_platform.return_value = "win32"
# From what I can tell, while Path.is_symlink always returns false for DJs.
# However, on a DJ, Path.absolute will return the absolute path to the DJ,
# while Path.resolve will return the absolute path to the source directory.
# With this in mind, this check SHOULD work.
mock_path.resolve.return_value = targ_path
mock_path.absolute.return_value = link_path
assert utils.is_dir_link(link_path)
mock_path.absolute.return_value = targ_path
assert not utils.is_dir_link(link_path)
@pytest.mark.parametrize(
"versions,expect",
[
(["1.0.0rc.1"], False),
(["1.0.0rc.1", "1.0.0"], "1.0.0"),
(["1.0.0rc.1", "1.0.0", "2.0.0rc.1", "2.0.0"], "2.0.0"),
],
)
def test_is_update_available(mocker, requests_mock, versions, expect):
"""Test self-update check method"""
fake_data = {"releases": {k: [] for k in versions}}
requests_mock.get("https://pypi.org/pypi/micropy-cli/json", json=fake_data)
mocker.patch("micropy.utils._compat.metadata.version", return_value="0.0.0")
utils.helpers.get_cached_data.clear_cache()
assert utils.helpers.is_update_available() == expect
def test_stream_download(mocker):
"""Test stream download"""
mock_req = mocker.patch.object(utils.helpers, "requests")
mock_stream = mocker.MagicMock()
mock_stream.headers = {"content-length": "1000"}
mock_req.get.return_value = mock_stream
tqdm_mock = mocker.patch.object(utils.helpers, "tqdm")
utils.stream_download("https://someurl.com/file.ext")
expect_args = {
"unit_scale": True,
"unit_divisor": 1024,
"smoothing": 0.1,
"bar_format": mocker.ANY,
}
tqdm_mock.assert_called_once_with(total=1000, unit="B", **expect_args)