Showing preview only (7,540K chars total). Download the full file or copy to clipboard to get everything.
Repository: ansible/ansibullbot
Branch: devel
Commit: 4d7ff8ded98e
Files: 199
Total size: 7.2 MB
Directory structure:
gitextract_zad12un7/
├── .coveragerc
├── .github/
│ ├── .codecov.yml
│ └── CODE_OF_CONDUCT.md
├── .gitignore
├── .gitmodules
├── CONTRIBUTING.md
├── ISSUE_HELP.md
├── LICENSE
├── README.md
├── Vagrantfile
├── ansible.cfg
├── ansibullbot/
│ ├── __init__.py
│ ├── _text_compat.py
│ ├── ansibletriager.py
│ ├── ci/
│ │ ├── __init__.py
│ │ ├── azp.py
│ │ └── base.py
│ ├── constants.py
│ ├── defaulttriager.py
│ ├── exceptions.py
│ ├── ghapiwrapper.py
│ ├── historywrapper.py
│ ├── issuewrapper.py
│ ├── plugins/
│ │ ├── __init__.py
│ │ ├── backports.py
│ │ ├── botstatus.py
│ │ ├── ci_rebuild.py
│ │ ├── collection_facts.py
│ │ ├── community_workgroups.py
│ │ ├── component_matching.py
│ │ ├── contributors.py
│ │ ├── cross_references.py
│ │ ├── deprecation.py
│ │ ├── docs_info.py
│ │ ├── filament.py
│ │ ├── label_commands.py
│ │ ├── needs_contributor.py
│ │ ├── needs_info.py
│ │ ├── needs_revision.py
│ │ ├── notifications.py
│ │ ├── shipit.py
│ │ ├── small_patch.py
│ │ ├── spam.py
│ │ ├── test_support_plugins.py
│ │ └── traceback.py
│ └── utils/
│ ├── __init__.py
│ ├── botmetadata.py
│ ├── component_tools.py
│ ├── extractors.py
│ ├── feature_flags.py
│ ├── galaxy.py
│ ├── gh_gql_client.py
│ ├── git_tools.py
│ ├── github.py
│ ├── logs.py
│ ├── moduletools.py
│ ├── net_tools.py
│ ├── receiver_client.py
│ ├── sentry.py
│ ├── sqlite_utils.py
│ ├── systemtools.py
│ ├── timetools.py
│ └── version_tools.py
├── azure-pipelines.yml
├── ci_output/
│ ├── codecoverage/
│ │ └── .gitdir
│ └── testresults/
│ └── .gitdir
├── constraints.txt
├── docs/
│ ├── collection_migration.md
│ └── contribution_tips.md
├── features.yaml
├── playbooks/
│ ├── bot-on-dev.yml
│ ├── bot-on-prod.yml
│ ├── files/
│ │ └── centos7.vimrc
│ ├── group_vars/
│ │ ├── all.yml
│ │ ├── ansibullbot.yml
│ │ └── tower.yml
│ ├── host_vars/
│ │ ├── ansibullbot-dev.eng.ansible.com.yml
│ │ └── ansibullbot.eng.ansible.com.yml
│ ├── hosts.yml
│ ├── requirements.yml
│ ├── setup-ansibullbot-dev.yml
│ ├── setup-ansibullbot.yml
│ ├── teardown-ansibullbot-dev.yml
│ ├── tower.yml
│ ├── update-ansibullbot.yml
│ └── vagrant.yml
├── pytest.ini
├── requirements.txt
├── scripts/
│ ├── ansibot_actions.py
│ ├── ansibot_receiver.py
│ ├── ansibot_status.cgi
│ ├── generate_issues_by_file.py
│ └── slack-notice.py
├── setup.py
├── templates/
│ ├── bad_pr.j2
│ ├── bot_status.j2
│ ├── collection_migration.j2
│ ├── commit_msg_mentions.j2
│ ├── community_shipit_notify.j2
│ ├── community_workgroups.j2
│ ├── components_banner.j2
│ ├── docs_team_info.j2
│ ├── fork.j2
│ ├── incoming_ref_missing.j2
│ ├── issue_missing_data.j2
│ ├── merge_commit_notify.j2
│ ├── multiple_module_notify.j2
│ ├── needs_info_base.j2
│ ├── notify.j2
│ ├── shippable_test_result.j2
│ ├── test_support_plugins.j2
│ └── waiting_on_contributor_close.j2
├── test-requirements.txt
├── tests/
│ ├── __init__.py
│ ├── bin/
│ │ └── ansibot-test
│ ├── component/
│ │ ├── __init__.py
│ │ ├── module_matching.py
│ │ ├── test_idempotence.py
│ │ └── test_supershipit.py
│ ├── fixtures/
│ │ ├── component_data/
│ │ │ ├── component_errors.json
│ │ │ ├── component_expected_results.json
│ │ │ ├── component_match_map.json
│ │ │ └── component_skip.json
│ │ ├── docs_info/
│ │ │ ├── 0_issue.yml
│ │ │ ├── 1_issue.yml
│ │ │ ├── 2_issue.yml
│ │ │ ├── 3_issue.yml
│ │ │ ├── 4_issue.yml
│ │ │ ├── 5_issue.yml
│ │ │ └── files/
│ │ │ ├── docsite_index.rst
│ │ │ ├── hacks_fail_me.txt
│ │ │ └── lib_ansible_foo.py
│ │ ├── issue_template_meta.json
│ │ ├── needs_contributor/
│ │ │ └── 0_issue.yml
│ │ ├── needs_revision/
│ │ │ ├── 0_issue.yml
│ │ │ ├── 0_prstatus.json
│ │ │ ├── 0_reviews.json
│ │ │ ├── 1_issue.yml
│ │ │ ├── 1_reviews.json
│ │ │ └── 2_issue.yml
│ │ ├── rebuild/
│ │ │ ├── 0_issue.yml
│ │ │ ├── 0_prstatus.json
│ │ │ ├── 1_issue.yml
│ │ │ ├── 1_prstatus.json
│ │ │ ├── 2_issue.yml
│ │ │ ├── 2_prstatus.json
│ │ │ ├── 3_issue.yml
│ │ │ └── 3_prstatus.json
│ │ ├── rebuild_merge/
│ │ │ ├── 0_issue.yml
│ │ │ ├── 0_prstatus.json
│ │ │ ├── 1_issue.yml
│ │ │ ├── 1_prstatus.json
│ │ │ ├── 2_issue.yml
│ │ │ ├── 2_prstatus.json
│ │ │ ├── 3_issue.yml
│ │ │ └── 3_prstatus.json
│ │ └── shipit/
│ │ ├── 0_issue.yml
│ │ ├── 0_prstatus.json
│ │ ├── 1_issue.yml
│ │ ├── 1_prstatus.json
│ │ ├── 2_issue.yml
│ │ └── 2_prstatus.json
│ ├── manual/
│ │ ├── group_errors.py
│ │ └── matching_test.py
│ ├── unit/
│ │ ├── __init__.py
│ │ ├── decorators/
│ │ │ ├── __init__.py
│ │ │ └── test_github.py
│ │ ├── parsers/
│ │ │ ├── __init__.py
│ │ │ ├── metadata_1.yml
│ │ │ └── test_bot_metadata_parser.py
│ │ ├── triagers/
│ │ │ ├── __init__.py
│ │ │ └── plugins/
│ │ │ ├── __init__.py
│ │ │ ├── test_automerge.py
│ │ │ ├── test_docs_info.py
│ │ │ ├── test_needs_contributor.py
│ │ │ ├── test_needs_info.py
│ │ │ ├── test_needs_revision.py
│ │ │ ├── test_notifications.py
│ │ │ ├── test_rebuild.py
│ │ │ ├── test_rebuild_merge.py
│ │ │ └── test_shipit.py
│ │ ├── utils/
│ │ │ ├── __init__.py
│ │ │ ├── test_component_tools.py
│ │ │ ├── test_extractors_pr_number.py
│ │ │ ├── test_githubid_extractor.py
│ │ │ ├── test_sqlite_tools.py
│ │ │ ├── test_template_extractor.py
│ │ │ ├── test_template_extractor_simple.py
│ │ │ └── test_time_tools.py
│ │ └── wrappers/
│ │ ├── __init__.py
│ │ └── test_history_wrapper.py
│ └── utils/
│ ├── __init__.py
│ ├── componentmocks.py
│ ├── helpers.py
│ ├── issue_mock.py
│ └── repo_mock.py
├── tox.ini
├── triage_ansible.py
└── triage_ansible_mp.py
================================================
FILE CONTENTS
================================================
================================================
FILE: .coveragerc
================================================
[run]
omit = tests/*
================================================
FILE: .github/.codecov.yml
================================================
coverage:
precision: 2
round: nearest
range: "40..100"
status:
# Only consider coverage of the code snippet changed in PR
# https://docs.codecov.io/docs/commit-status
project: no
patch: yes
changes: no
comment:
layout: "header, diff"
behavior: default
require_changes: no
flags:
library:
paths:
- aiohttp/
configs:
paths:
- requirements/
- ".git*"
- "*.toml"
- "*.yml"
changelog:
paths:
- CHANGES/
- CHANGES.rst
docs:
paths:
- docs/
- "*.md"
- "*.rst"
- "*.txt"
tests:
paths:
- tests/
tools:
paths:
- tools/
third-party:
paths:
- vendor/
ignore:
- old/
- tests/
================================================
FILE: .github/CODE_OF_CONDUCT.md
================================================
# Community Code of Conduct
Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
================================================
FILE: .gitignore
================================================
# Created by https://www.gitignore.io/api/git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs
# Edit at https://www.gitignore.io/?templates=git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs
### Emacs ###
# -*- mode: gitignore; -*-
*~
\#*\#
/.emacs.desktop
/.emacs.desktop.lock
*.elc
auto-save-list
tramp
.\#*
# Org-mode
.org-id-locations
*_archive
# flymake-mode
*_flymake.*
# eshell files
/eshell/history
/eshell/lastdir
# elpa packages
/elpa/
# reftex files
*.rel
# AUCTeX auto folder
/auto/
# cask packages
.cask/
dist/
# Flycheck
flycheck_*.el
# server auth directory
/server/
# projectiles files
.projectile
# directory configuration
.dir-locals.el
# network security
/network-security.data
### Git ###
# Created by git for backups. To disable backups in Git:
# $ git config --global mergetool.keepBackup false
*.orig
# Created by git when using merge tools for conflicts
*.BACKUP.*
*.BASE.*
*.LOCAL.*
*.REMOTE.*
*_BACKUP_*.txt
*_BASE_*.txt
*_LOCAL_*.txt
*_REMOTE_*.txt
### JupyterNotebook ###
.ipynb_checkpoints
*/.ipynb_checkpoints/*
# Remove previous ipynb_checkpoints
# git rm -r .ipynb_checkpoints/
#
### Linux ###
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### PyCharm+all ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### PyCharm+all Patch ###
# Ignores the whole .idea folder and all .iml files
# See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360
.idea/
# Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023
*.iml
modules.xml
.idea/misc.xml
*.ipr
### pydev ###
.pydevproject
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
### Python Patch ###
.venv/
### Vim ###
# Swap
[._]*.s[a-v][a-z]
[._]*.sw[a-p]
[._]s[a-rt-v][a-z]
[._]ss[a-gi-z]
[._]sw[a-p]
# Session
Session.vim
# Temporary
.netrwhist
# Auto-generated tag files
tags
# Persistent undo
[._]*.un~
### WebStorm ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
# Generated files
# Sensitive or high-churn files
# Gradle
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# CMake
# Mongo Explorer plugin
# File-based project format
# IntelliJ
# mpeltonen/sbt-idea plugin
# JIRA plugin
# Cursive Clojure plugin
# Crashlytics plugin (for Android Studio and IntelliJ)
# Editor-based Rest Client
# Android studio 3.1+ serialized cache file
### WebStorm Patch ###
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
# *.iml
# modules.xml
# .idea/misc.xml
# *.ipr
# Sonarlint plugin
.idea/sonarlint
### Windows ###
# Windows thumbnail cache files
Thumbs.db
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# End of https://www.gitignore.io/api/git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs
data*
headers*
run*
ansibullbot.egg-info
ansibullbot.cfg
*.sublime-*
ansibullbot/utils/ghostdriver.log
ansible/
*.bak
testrl.py
tests/fixtures/ansible-*.tar.gz
.vagrant
*.retry
*.xml
/issues/
================================================
FILE: .gitmodules
================================================
[submodule "playbooks/roles/ansibullbot"]
path = playbooks/roles/ansibullbot
url = https://github.com/mkrizek/ansible-role-ansibullbot.git
branch = 2.3.3
[submodule "playbooks/roles/firewall"]
path = playbooks/roles/firewall
url = https://github.com/samdoran/ansible-role-firewall.git
branch = 2.1.2
[submodule "playbooks/roles/repo_epel"]
path = playbooks/roles/repo_epel
url = https://github.com/samdoran/ansible-role-repo-epel.git
branch = 1.2.2
[submodule "playbooks/roles/ansibullbot_instance"]
path = playbooks/roles/ansibullbot_instance
url = https://github.com/samdoran/ansible-role-ansibullbot-instance.git
branch = 3.1.2
[submodule "playbooks/roles/mongodb"]
path = playbooks/roles/mongodb
url = https://github.com/samdoran/ansible-role-mongodb.git
branch = 1.1.2
[submodule "playbooks/roles/fail2ban"]
path = playbooks/roles/fail2ban
url = https://github.com/samdoran/ansible-role-fail2ban.git
branch = 1.0.3
[submodule "playbooks/roles/yum_cron"]
path = playbooks/roles/yum_cron
url = https://github.com/samdoran/ansible-role-yum-cron.git
branch = 1.2.1
[submodule "playbooks/roles/caddy"]
path = playbooks/roles/caddy
url = https://github.com/samdoran/ansible-role-caddy.git
branch = 2.1.1
[submodule "playbooks/roles/authorized_keys"]
path = playbooks/roles/authorized_keys
url = https://github.com/samdoran/ansible-role-authorized-keys.git
branch = 1.0.2
================================================
FILE: CONTRIBUTING.md
================================================
# Ansibullbot Contributor's Guide
## Python compatibility
Ansibullbot is compatible with Python 3.8+.
## Getting started
1. Fork this repo
2. Clone your fork
3. Create a feature branch
4. Optionally: create a [Python virtual environment](https://realpython.com/python-virtual-environments-a-primer/)
4. Install the python requirements: `pip install -r requirements.txt`
5. Create the log file:
* either add `--log path/to/file.log` to the `triage_ansible.py` below
* or use `sudo touch /var/log/ansibullbot.log && sudo chmod 777 /var/log/ansibullbot.log`
6. Create the config file, copy [`examples/ansibullbot.cfg`](https://github.com/ansible/ansibullbot/blob/devel/examples/ansibullbot.cfg) to one of these paths:
* `~/.ansibullbot.cfg`
* `$CWD/ansibullot.cfg`
* `/etc/ansibullot/ansibullbot.cfg`
* define `ANSIBULLBOT_CONFIG` environment variable where the configuration file is located
7. fill in the credentials
## Testing your changes
Run with `verbose`, `debug` and `dry-run` ...
```bash
./triage_ansible.py --debug --verbose --dry-run
```
## Testing changes to BOTMETA.yml
1. Download [`BOTMETA.yml`](https://github.com/ansible/ansible/blob/devel/.github/BOTMETA.yml) to a local directory
2. Edit the file with whatever changes you want to make.
3. Run `triage_ansible.py` with `--botmetafile=<PATHTOFILE>`.
If you have a specific issue to test against, use the `--id` parameter to speed up testing.
## Testing changes related to a single label
The `--id` parameter can take a path to a script. The `scripts` directory is full of scripts that will return json'ified lists of issue numbers. One example is the `scripts/list_open_issues_with_needs_info.sh` script which scrapes the github UI for any issues with the needs_info label. Here's how you might use that to test your changes to ansibullbot against all issues with needs_info ...
```
./triage_ansible.py --debug --verbose --dry-run --id=scripts/list_open_issues_with_needs_info.sh
```
## Updating Ansible Playbooks and Roles used by Ansibullbot ##
Ansibullbot is deployed and managed using [Ansible](https://www.ansible.com) and [Ansible Tower](https://www.ansible.com/tower). There are several roles used by Ansibullbot, each of which is a [git submodule](https://git-scm.com/book/en/v2/Git-Tools-Submodules).
When making changes anything besides the roles, make the changes to this repository and submit a pull request.
When making changes to roles, first submit pull request to the role repository and ensure it is merged to the pull request repository. Then, submit a pull request to this repository updating the submodule to the include the new commit.
To update the role submodule and include it in your pull request:
1. Run `git submodule update --remote [path to role]` to pull in the latest role commits.
1. `git add [path to role]`
1. Commit and push the branch to your fork
2. Submit the pull request
================================================
FILE: ISSUE_HELP.md
================================================
# Ansibullbot Help
Making progress in resolving issues for modules depends upon your interaction! Please be sure to respond to requests or additional information as needed.
If at any time you think this bot is misbehaving (not for test failures), please leave a comment containing the keyword [`bot_broken`](#cmd-bot_broken) and an Ansible staff member will intervene.
#### Table of contents
* [Overview](#overview)
* [For issue submitters](#for-issue-submitters)
* [For pull request submitters](#for-pull-request-submitters)
* [When will your pull request be merged?](#when-will-your-pull-request-be-merged)
* [New Modules](#new-modules)
* [Existing Modules](#existing-modules)
* [core](#core)
* [certified](#certified)
* [community](#community)
* [network](#network)
* [Non-module changes](#non-module-changes)
* [For community maintainers](#for-community-maintainers)
* [How to disable notifications](#how-to-disable-notifications)
* [For anyone else](#for-anyone-else)
* [Commands](#commands)
* [Labels](#labels)
* [When to use label commands](#when-to-use-label-commands)
* [How to use label commands](#how-to-use-label-commands)
## Overview
The Ansibull Triage Bot serves many functions:
* Responds quickly to issue and pull request submitters to thank them;
* Identifies the maintainers responsible for reviewing pull requests for any files affected;
* Tracks the current status of pull requests;
* Pings responsible parties to remind them of any actions that they may be responsible for;
* Provides maintainers with the ability to move pull requests through our [workflow](#when-will-your-pull-request-be-merged);
* Identifies issues and pull requests abandoned by their authors so that we can close them;
* Identifies modules abandoned by their maintainers so that we can find new maintainers;
* Automatically labels issues and pull requests based on keywords or affected files.
## For issue submitters
Please note that if you have a question about how to use this feature or module with Ansible, that's probably something you should ask on the [ansible-project](https://groups.google.com/forum/#!forum/ansible-project) mailing list, rather than submitting a bug report. For more details, please see [I’ve Got A Question](http://docs.ansible.com/ansible/community.html#i-ve-got-a-question).
If the feature/module maintainer or ansibullbot needs further information, please respond to the request, so that you can help the devs to help you!
The bot requires a minimal subset of information from the issue template:
* issue type
* component name
* ansible version
* summary
If any of those items are missing or empty, ansibullbot will keep the issue in a `needs_info` state until the data is provided in the issue's description. The bot is expecting an issue description styled after the default issue template, so please use that whenever possible.
Expect the bot to do a few things:
1. Add common [labels](#labels) such as `needs_triage`, `bug_report`, `feature_idea`, etc.
These labels are determined by templated data in the description. Please fill out the templates as accurately as possible so that the appropriate labels are used.
2. Notify and assign the maintainer(s) of the relevant file(s) or module(s).
Notifications will happen via a comment with the `@NAME` syntax. If you know of other interested parties, feel free to ping them in a comment or in your issue description.
If you are not sure who the issue is waiting on, please use the [`bot_status`](#cmd-bot_status) command.
## For pull request submitters
Expect the bot to do a few things:
1. All of the items described in the for [issue submitters](#for-issue-submitters) section.
2. Add [labels](#labels) indicating the status of the pull request.
Please prefix your pull request's title with `WIP` if you are not yet finished making changes. This will tell the bot to ignore the [`needs_rebase`](#label-needs_rebase) and [`shipit`](#label-shipit) workflows until you remove it from the title.
If you are finished committing to your pull request or have made changes due to a request, please use the [`ready_for_review`](#cmd-ready_for_review) command.
If you are not sure who the pull request is waiting on, please use the [`bot_status`](#cmd-bot_status) command.
### When will your pull request be merged?
:information_source: `Approve` pull request status is ignored, [`shipit`](#cmd-shipit) command is used by maintainer to approve a pull request. The bot automatically adds a [`shipit`](#label-shipit) label to the pull request when the required number of [`shipit`](#cmd-shipit) commands has been reached.
The bot will label a pull request with [`shipit`](#label-shipit) when at least **two** [`shipit`] commands are issued, the following rules describe how [`shipit`](#cmd-shipit) commands are count:
* [`shipit`](#cmd-shipit) issued by a module maintainer or a maintainer of a module in the same namespace or a core team member are always taken in account
* when the submitter is a module maintainer or a maintainer of a module in the same namespace or a core team member, their [`shipit`](#cmd-shipit) is automatically counted
* [`shipit`](#cmd-shipit) issued by anyone else is taken in account when both conditions are met:
* at least one module maintainer or a maintainer of a module in the same namespace or a core team member has approved the pull request with a [`shipit`](#cmd-shipit) command
* at least three people which aren't maintainer nor core team member have approved the pull request using the [`shipit`](#cmd-shipit)
#### New Modules
Once the pull request labeled with [`shipit`](#label-shipit), the module will be merged once a member of the Ansible organization has reviewed it and decided to include it.
:information_source: If you are a maintainer of a module in the same namespace, only one `shipit` is required.
#### Existing Modules
Members of the Ansible Core Team typically do all the maintenance on these modules, so only they can approve changes.
#### Non-module changes
The ansible core team approves these pull requests and it may take some time for them to get to your request.
## For community maintainers
:information_source: `Approve` pull request status is ignored, [`shipit`](#cmd-shipit) command must be used in order to approve a pull request.
Thanks in advance for taking a look at issues and pull requests and for your ongoing maintenance. If you are unable to troubleshoot or review this issue/pull request with the information provided, please ping the submitter of the issue in a comment to let them know.
### How to disable notifications
If you wish to stop receiving notifications from Ansibullbot to issues and pull requests you need to add your github name into the `ignored` key under plugin you are no longer insterested in in the [BOTMETA.yml](https://github.com/ansible/ansible/blob/devel/.github/BOTMETA.yml) file and send a pull request against the [ansible/ansible](https://github.com/ansible/ansible) repository. See an example below:
```yaml
...
$modules/cloud/amazon/:
ignored: erydo seiffert simplesteph nadirollo tedder joshsouza defionscode
maintainers: $team_aws
...
```
If the plugin was migrated to a [collection](https://github.com/ansible-collections) you also need to add an ignore entry into `BOTMETA.yml` in the collection repository as well.
## For anyone else
Reactions help us determine how many people are interested in a pull request or have run across a similar bug. Please leave a +1 [reaction](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments) (:+1:) if that applies to you. Any additional details you can provide, such as your usecase, environment, steps to reproduce, or workarounds you have found, can help out with resolving issues or getting pull requests merged.
## Commands
To streamline the maintenance process, we've added some commands to Ansibullbot that you can use to help direct the work flow. Using the automation is simply a matter of adding one of the following commands in your comments:
Command | Scope | Allowed | Description
--- | --- | --- | ---
**<a name="cmd-bot_broken">bot_broken</a>** | issues pull requests | anyone | Use this command if you think the bot is misbehaving (not for test failures), and an Ansible staff member will investigate.
**<a name="cmd-bot_broken">!bot_broken</a>** | issues pull requests | anyone | Clear `bot_broken` command.
**<a name="cmd-bot_skip">bot_skip</a>** | issues pull requests | staff | Ansible staff members use this to have the bot skip triaging an issue.
**<a name="cmd-bot_skip">!bot_skip</a>** | issues pull requests | staff | Clear `bot_skip` command.
**<a name="cmd-bot_status">bot_status</a>** | pull requests | submitters maintainers | Use this command if you would like the bot to comment with some helpful metadata about the issue.
**<a name="cmd-!needs_collection_redirect">!needs_collection_redirect</a>** | issues pull requests | anyone | Use this command if bot made a mistake in deciding an issue or PR was for a file in a collection. Ansible Core team member will need to re-open the issue/PR. Contact a Core team member to review the issue/PR on IRC: `#ansible-devel` on Libera.chat IRC.
**<a name="cmd-needs_info">needs_info</a>** | issues pull requests | maintainers past committers | Use this command if you need more information from the submitter. We will notify the submitter and apply the [`needs_info`](#label-needs_info) label.
**<a name="cmd-!needs_info">!needs_info</a>** | issues pull requests | maintainers past committers | If you do not need any more information and just need time to work the issue, leave a comment that contains the command `!needs_info` and the [`needs_info`](#label-needs_info) label will be replaced with [`waiting_on_maintainer`](#label-waiting_on_maintainer).
**<a name="cmd-needs_revision">needs_revision</a>** | pull requests | maintainers | Use this command if you would like the submitter to make changes.
**<a name="cmd-!needs_revision">!needs_revision</a>** | pull requests | maintainers | If you want to clear the [`needs_revision`](#label-needs_revision) label, use this command.
**<a name="cmd-needs_rebase">needs_rebase</a>** | pull requests | maintainers | Use this command if the submitters branch is out of date. The bot should automatically apply this label, so you may never need to use it.
**<a name="cmd-!needs_rebase">!needs_rebase</a>** | pull requests | maintainers | Clear the [`needs_rebase`](#label-needs_rebase) label.
**<a name="cmd-notabug">notabug</a>** | issues | maintainers | If you believe this is not a bug, please leave a comment stating `notabug`, along with any additional information as to why it is not, and we will close this issue.
**<a name="cmd-bug_resolved">bug_resolved</a>** | issues | maintainers | If you believe this issue is resolved, please leave a comment stating `bug_resolved`, and we will close this issue.
**<a name="cmd-resolved_by_pr">resolved_by_pr</a>** | issues | maintainers | If you believe this issue has been resolved by a pull request, please leave a comment stating `resolved_by_pr` followed by the pull request number.
**<a name="cmd-wontfix">wontfix</a>** | issues | maintainers | If this is a bug that you can't or won't fix, please leave a comment including the word `wontfix`, along with an explanation for why it won't be fixed.
**<a name="cmd-needs_contributor">needs_contributor</a>** | issues | maintainers | If this bug or feature request is something that you want implemented but do not have the time or expertise to do, comment with `needs_contributor`, and the issue will be put into a [`waiting_on_contributor`](#label-waiting_on_contributor) state.
**<a name="cmd-duplicate_of">duplicate_of</a>** | issues | maintainers | If this bug or feature request is a duplicate of another issue, comment with `duplicate_of` followed by the issue number that it duplicates, and the issue will be closed.
**<a name="cmd-close_me">close_me</a>** | issues | maintainers | If the issue can be closed for a reason you will specify in the comment, use this command.
**<a name="cmd-ready_for_review">ready_for_review</a>** | pull requests | submitters | If you are finished making commits to your pull request or have made changes due to a request, please use this command to trigger a review from the maintainer(s).
**<a name="cmd-shipit">shipit</a>** | pull requests | maintainers | If you approve the code in this pull request, use this command to have it merged. Note that Github `Approve` pull request status is ignored. Nonetheless `shipit` in review summary of commented or approved review is taken in account. In place of `shipit`, `+1` and `LGTM` can be used too. Note that these commands must not be surrounded by any character, spaces excepted.
**<a name="cmd-add-label">+label</a>** | issues pull requests | staff maintainers | Add a [supported label](#labels). See [When to use label commands](#when-to-use-label-commands).
**<a name="cmd-remove-label">-label</a>** | issues pull requests | staff maintainers | Remove a [supported label](#labels). See [When to use label commands](#when-to-use-label-commands).
**<a name="cmd-rebuild_merge">rebuild_merge</a>** | pull requests | staff | Allow core team members to trigger CI, then the pull request is automatically merged if CI results are successful.
**<a name="cmd-rebuild">/rebuild</a>** | pull requests | anyone | Allows anyone to re-trigger CI.
**<a name="cmd-rebuild_failed">/rebuild_failed</a>** | pull requests | anyone | Allows anyone to re-trigger CI only on failed jobs [this is usually much faster than /rebuild].
**<a name="cmd-component">!component</a>** | issues | anyone | Set, append or remove a file from the matched components. To set, use `!component =lib/ansible/foo/bar`. To add, use `!component +lib/ansible/foo/bar`. To remove, use `!component -lib/ansible/foo/bar`.
**<a name="cmd-waffling">!waffling</a>** | all | maintainers | Disable waffling detection on a label. To use `!waffling <labelname>` on a separate line in a comment.
## Labels
The bot adds many labels on issues and pull requests.
Label | Scope | Prevent automerge | Description
--- | --- | --- | ---
**<a name="label-automerge">automerge</a>** | pull requests | no | Identify pull requests automatically merged by the bot.
**<a name="label-backport">backport</a>** | pull requests | yes | Added to pull requests which don't target `devel` branch.
**<a name="label-bot_broken">bot_broken</a>** | pull requests | yes | Allow to identify pull requests for which [`bot_broken`](#cmd-bot_broken) had been used.
**<a name="label-bug">bug</a>** | issues pull requests | no | Added to issues or pull requests reporting/fixing bugs.
**<a name="label-c:_name_">c:_name_</a>** | issues pull requests | no | Categorize issues or pull requests by their relevant source code files.
**<a name="label-ci_verified">ci_verified</a>** | pull requests | yes | Identify pull requests for which CI failed. A pull request must successfully pass CI in order to be merged.
**<a name="label-docs">docs</a>** | issues pull requests | no | Identify issues or pull requests related to documentation.
**<a name="label-docsite_pr">docsite_pr</a>** | pull requests | no | Identify pull requests created through documentation's "Edit on GitHub" link
**<a name="label-easyfix">easyfix</a>** | issue or pull requests | no | Identify easy entrance point for people who are looking to start contributing.
**<a name="label-feature">feature</a>** | issues pull requests | no | Added to issues or pull requests requesting/adding new features.
**<a name="label-filament">filament</a>** | pull requests | no | Identify pull requests related to [Ansible Lightbulb](https://github.com/ansible/lightbulb) project.
**<a name="label-merge_commit">merge_commit</a>** | pull requests | no | Added to pull requests containing at least one merge commit. Pull requests must not contain merge commit.
**<a name="label-module">module</a>** | pull requests | no | Identify pull requests updating existing modules.
**<a name="label-needs_ci">needs_ci</a>** | pull requests | no | Identify pull requests for which CI status is missing. When a pull request is closed and reopened or when new commits are updated, the CI is triggered again.
**<a name="label-needs_collection_redirect">needs_collection_redirect</a>** | issue or pull requests | no | [Collection Migration Docs](https://github.com/ansible/ansibullbot/blob/devel/docs/collection_migration.md)
**<a name="label-needs_info">needs_info</a>** | issues | yes | Identify issues for which reviewer requested further information.
**<a name="label-needs_maintainer">needs_maintainer</a>** | pull requests | no | Ansibullbot is unable to identify authors or maintainers of the related module. Check `author` field format in [`DOCUMENTATION block`](http://docs.ansible.com/ansible/devel/dev_guide/developing_modules_documenting.html#documentation-block).
**<a name="label-needs_rebase">needs_rebase</a>** | pull requests | yes | Pull requests which are out of sync with ansible/ansible's `devel` branch. Please review the [rebase guide](http://docs.ansible.com/ansible/devel/dev_guide/developing_rebasing.html) for further information.
**<a name="label-needs_revision">needs_revision</a>** | pull requests | yes | Used for pull request which fail continuous integration tests or if a maintainer has requested a review/revision of the code. This label can be cleared by fixing any failed tests or by commenting [`ready_for_review`](#cmd-ready_for_review).
**<a name="label-needs_template">needs_template</a>** | issues pull requests | no | Label added when description is incomplete. See [issue templates](https://github.com/ansible/ansible/tree/devel/.github/ISSUE_TEMPLATE) or pull request [template](https://raw.githubusercontent.com/ansible/ansible/devel/.github/PULL_REQUEST_TEMPLATE.md).
**<a name="label-needs_triage">needs_triage</a>** | issues pull requests | no | This label will be added if your issue is being labeled for the first time. We (ansible staff and maintainers) use this label to find issues that need a human first touch. We'll remove it once we've given the issue a quick look for any labeling problems or missing data.
**<a name="label-needs_verified">needs_verified</a>** | issues | no | This label implies a maintainer needs to check if the issue can be reproduced in the latest version.
**<a name="label-new_module">new_module</a>** | pull requests | yes | Identify pull requests adding new module.
**<a name="label-owner_pr">owner_pr</a>** | pull requests | no | Identify pull requests made by module maintainers.
**<a name="label-shipit">shipit</a>** | pull requests | no | Identify pull requests for which the required number of `shipit` has been reached. For [community](#community) reviewed pull requests, if `automerge` workflow applies, then pull request should be automatically merged. For all other cases, merge should be performed by a core team members. If your pull request gets no comment and becomes tagged with [`stale_review`](#label-stale_review), you can add it to the [IRC core team meeting agenda](https://github.com/ansible/community/blob/devel/meetings/core-team.yaml) to receive more comments.
**<a name="label-stale_ci">stale_ci</a>** | pull requests | yes | Added when the last CI result is older than one week. When a pull request is closed and reopened, the CI is triggered again. In some case, the bot will automatically trigger the CI when a pull request is labeled with both [`shipit`](#label-shipit) and `stale_ci`.
**<a name="label-stale_review">stale_review</a>** | pull requests | no | Added when submitter made some updates after a reviewer requested some changes, if the submitter updates are older than seven days and the reviewer didn't update their review.
**<a name="label-test">test</a>** | pull requests | no | Identify pull requests related to tests.
**<a name="label-waiting_on_contributor">waiting_on_contributor</a>** | issues pull requests | no | The feature or fix would be accepted, but there are no plans to actively work on it.
**<a name="label-WIP">WIP</a>** | pull requests | yes | Identify pull requests which are not ready (from the submitter point of view) to be merged.
Some labels are used to categorize issues and pull requests:
* Pull requests related to [test](https://github.com/ansible/community/wiki):
* `test`
* Namespace labels:
* `aci`
* `avi`
* `aws`
* `azure`
* `cloud`
* `cloudstack`
* `digital_ocean`
* `docker`
* `f5`
* `gce`
* `infoblox`
* `jboss`
* `meraki`
* `netapp`
* `networking`
* `nxos`
* `openstack`
* `ovirt`
* `ucs`
* `vmware`
* `windows`
* Module labels:
* `m:unarchive`
* `m:xml`
### When to use label commands
The `+label` and `-label` commands are restricted to a subset of available labels and are not meant to replace the other bot commands:
* `affects_X.Y` -- indicates that the issue is relevant to a particular ansible *major.minor* version.
* `c:...` -- these labels categorize issues or pull requests by their relevant source code files.
* `easyfix` -- indicates that the issue an easy entrance point for people who are looking to start contributing.
* `m:...` -- these labels categorize issues or pull requests by their module name.
* `module` -- classifies the issue as a module related issue.
* `needs_triage` -- a human being still needs to validate the issue is properly labeled and has all the information required.
* `test` and namespace labels
### How to use label commands
To use the commands, please type the command and label on one line each in a comment.
Example:
```
-label needs_triage
+label cloud
+label gce
```
================================================
FILE: LICENSE
================================================
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
================================================
FILE: README.md
================================================
[](https://dev.azure.com/ansible/ansibullbot/_build/latest?definitionId=12&branchName=devel) [](https://codecov.io/gh/ansible/ansibullbot)
See the Ansibullbot Project Board for what is being worked on: [](https://github.com/ansible/ansibullbot/projects/1)
# User Guide
If you are looking for help, please see the [ISSUE HELP](ISSUE_HELP.md)
# Ansibull Github Issue/Pullrequest Bot
```
$ ./triage_ansible.py --help
usage: triage_ansible.py [-h] [--cachedir CACHEDIR_BASE] [--logfile LOGFILE]
[--daemonize]
[--daemonize_interval DAEMONIZE_INTERVAL] [--debug]
[--dry-run] [--force] [--pause] [--dump_actions]
[--botmetafile BOTMETAFILE]
[--repo {ansible/ansible}] [--skip_no_update]
[--collect_only] [--sort {asc,desc}]
[--skiprepo SKIPREPO] [--only_prs] [--only_issues]
[--only_closed]
[--ignore_state] [--ignore_bot_broken]
[--ignore_module_commits] [--pr PR]
[--start-at START_AT] [--resume] [--last LAST]
[--commit ANSIBLE_COMMIT] [--ignore_galaxy]
[--ci {azp}]
Triage issue and pullrequest queues for Ansible. (NOTE: only useful if you
have commit access to the repo in question.)
optional arguments:
-h, --help show this help message and exit
--cachedir CACHEDIR_BASE
--logfile LOGFILE Send logging to this file
--daemonize run in a continuos loop
--daemonize_interval DAEMONIZE_INTERVAL
seconds to sleep between loop iterations
--debug, -d Debug output
--dry-run, -n Don't make any changes
--force, -f Do not ask questions
--pause, -p Always pause between prs|issues
--dump_actions serialize the actions to disk [/tmp/actions]
--botmetafile BOTMETAFILE
Use this filepath for botmeta instead of from the
repo
--repo {ansible/ansible}, -r {ansible/ansible}
Github repo to triage (defaults to all)
--skip_no_update skip processing if updated_at hasn't changed
--collect_only stop after caching issues
--sort {asc,desc} Direction to sort issues [desc=9-0 asc=0-9]
--skiprepo SKIPREPO Github repo to skip triaging
--only_prs Triage pullrequests only
--only_issues Triage issues only
--only_closed Triage closed issues|prs only
--ignore_state Do not skip processing closed issues
--ignore_bot_broken Do not skip processing bot_broken|bot_skip issues
--ignore_module_commits
Do not enumerate module commit logs
--pr PR, --id PR Triage only the specified pr|issue (separated by
commas)
--start-at START_AT Start triage at the specified pr|issue
--resume pickup right after where the bot last stopped
--last LAST triage the last N issues or PRs
--commit ANSIBLE_COMMIT
Use a specific commit for the indexers
--ignore_galaxy do not index or search for components in galaxy
--ci {azp} Specify a CI provider that repo uses
```
================================================
FILE: Vagrantfile
================================================
# -*- mode: ruby -*-
# vi: set ft=ruby :
$script = <<SCRIPT
# AUTHENTICATION
echo 'root:vagrant' | chpasswd
echo 'vagrant:vagrant' | chpasswd
egrep "^PasswordAuthentication yes" /etc/ssh/sshd_config
RC=$?
if [[ $RC != 0 ]]; then
echo "Enabling ssh passwords"
sed -i.bak 's/PasswordAuthentication\ no/PasswordAuthentication\ yes/' /etc/ssh/sshd_config
service sshd restart
fi
# BASELINE PACKAGES
PACKAGES="epel-release ansible git rsync vim-enhanced bind-utils policycoreutils-python net-tools lsof"
for PKG in $PACKAGES; do
rpm -q $PKG || yum -y install $PKG
done
# VIMRC
rm -f /etc/vimrc
cp /vagrant/playbooks/files/centos7.vimrc /etc/vimrc
# WORKAROUNDS
setenforce 0
fgrep docker /etc/group || groupadd -g 993 docker
fgrep ansibot /etc/group || groupadd -g 1099 ansibot
id ansibot || useradd -u 1099 -g ansibot ansibot
usermod -a -G docker ansibot
rsync -avz --exclude='/vagrant/.vagrant' /vagrant/* /home/ansibot/ansibullbot
# PSEUDO ANSIBLE-LOCAL PROVISIONER
setenforce 0
echo "ansibullbot ansible_host=localhost ansible_connection=local" > /tmp/inv.ini
echo "ansibullbot.eng.ansible.com ansible_host=localhost ansible_connection=local" >> /tmp/inv.ini
cd /vagrant/playbooks
#PLAYBOOKS="setup-ansibullbot.yml"
PLAYBOOKS="vagrant.yml"
for PLAYBOOK in $PLAYBOOKS; do
ansible-playbook \
-v \
-i /tmp/inv.ini \
--skip-tags=ssh \
$PLAYBOOK
done
# --tags=packages,ansibullbot,caddy \
# --skip-tags=botinstance,dns,ssh,ansibullbot_service,ansibullbot_logs \
#--skip-tags=botinstance,dns,ssh,ansibullbot_service,ansibullbot_logs \
# -e "ansibullbot_action=install" \
# HACK IN FIREWALL EXCEPTIONS
firewall-cmd --zone=public --add-port=80/tcp --permanent
firewall-cmd --reload
SCRIPT
Vagrant.configure("2") do |config|
config.vm.box = "centos/7"
config.vm.hostname = "ansibullbot.eng.ansible.com"
config.hostmanager.enabled = true
config.hostmanager.manage_host = true
config.hostmanager.manage_guest = true
config.hostmanager.ignore_private_ip = false
config.hostmanager.include_offline = true
config.vm.network "private_network", ip: "10.0.0.210"
config.vm.synced_folder ".", "/vagrant", type: "nfs", nfs_udp: false
config.vm.provider :libvirt do |libvirt|
libvirt.cpus = 2
libvirt.memory = 2048
end
config.vm.provision "shell", inline: $script
end
================================================
FILE: ansible.cfg
================================================
# config file for ansible -- http://ansible.com/
# ==============================================
# nearly all parameters can be overridden in ansible-playbook
# or with command line flags. ansible will read ANSIBLE_CONFIG,
# ansible.cfg in the current working directory, .ansible.cfg in
# the home directory or /etc/ansible/ansible.cfg, whichever it
# finds first
[defaults]
# some basic default values...
inventory = playbooks/hosts.yml
#library = /usr/share/my_modules/
#remote_tmp = $HOME/.ansible/tmp
#local_tmp = $HOME/.ansible/tmp
forks = 50
#poll_interval = 15
#sudo_user = root
#ask_sudo_pass = True
#ask_pass = True
transport = ssh
#remote_port = 22
#module_lang = C
#module_set_locale = False
#vault_password_file = ~/bin/getvaultkey.sh
# plays will gather facts by default, which contain information about
# the remote system.
#
# smart - gather by default, but don't regather if already gathered
# implicit - gather by default, turn off with gather_facts: False
# explicit - do not gather by default, must say gather_facts: True
gathering = smart
# by default retrieve all facts subsets
# all - gather all subsets
# network - gather min and network facts
# hardware - gather hardware facts (longest facts to retrieve)
# virtual - gather min and virtual facts
# facter - import facts from facter
# ohai - import facts from ohai
# You can combine them using comma (ex: network,virtual)
# You can negate them using ! (ex: !hardware,!facter,!ohai)
# A minimal set of facts is always gathered.
#gather_subset = all
# additional paths to search for roles in, colon separated
#roles_path = /etc/ansible/roles
# uncomment this to disable SSH key host checking
host_key_checking = False
# change the default callback
stdout_callback = debug
# enable additional callbacks
#callback_whitelist = profile_tasks
# Determine whether includes in tasks and handlers are "static" by
# default. As of 2.0, includes are dynamic by default. Setting these
# values to True will make includes behave more like they did in the
# 1.x versions.
#task_includes_static = True
#handler_includes_static = True
# change this for alternative sudo implementations
#sudo_exe = sudo
# What flags to pass to sudo
# WARNING: leaving out the defaults might create unexpected behaviours
#sudo_flags = -H -S -n
# SSH timeout
#timeout = 10
# default user to use for playbooks if user is not specified
# (/usr/bin/ansible will use current user as default)
#remote_user = root
# logging is off by default unless this path is defined
# if so defined, consider logrotate
#log_path = /var/log/ansible.log
# default module name for /usr/bin/ansible
#module_name = command
# use this shell for commands executed under sudo
# you may need to change this to bin/bash in rare instances
# if sudo is constrained
#executable = /bin/sh
# if inventory variables overlap, does the higher precedence one win
# or are hash values merged together? The default is 'replace' but
# this can also be set to 'merge'.
#hash_behaviour = replace
# by default, variables from roles will be visible in the global variable
# scope. To prevent this, the following option can be enabled, and only
# tasks and handlers within the role will see the variables there
#private_role_vars = yes
# list any Jinja2 extensions to enable here:
#jinja2_extensions = jinja2.ext.do,jinja2.ext.i18n
# if set, always use this private key file for authentication, same as
# if passing --private-key to ansible or ansible-playbook
#private_key_file = /path/to/file
# If set, configures the path to the Vault password file as an alternative to
# specifying --vault-password-file on the command line.
#vault_password_file = /path/to/vault_password_file
# format of string {{ ansible_managed }} available within Jinja2
# templates indicates to users editing templates files will be replaced.
# replacing {file}, {host} and {uid} and strftime codes with proper values.
#ansible_managed = Ansible managed: {file} modified on %Y-%m-%d %H:%M:%S by {uid} on {host}
ansible_managed = Ansible managed. Any local changes will be overwritten.
# This short version is better used in templates as it won't flag the file as changed every run.
#ansible_managed = Ansible managed: {file} on {host}
# by default, ansible-playbook will display "Skipping [host]" if it determines a task
# should not be run on a host. Set this to "False" if you don't want to see these "Skipping"
# messages. NOTE: the task header will still be shown regardless of whether or not the
# task is skipped.
#display_skipped_hosts = True
# by default, if a task in a playbook does not include a name: field then
# ansible-playbook will construct a header that includes the task's action but
# not the task's args. This is a security feature because ansible cannot know
# if the *module* considers an argument to be no_log at the time that the
# header is printed. If your environment doesn't have a problem securing
# stdout from ansible-playbook (or you have manually specified no_log in your
# playbook on all of the tasks where you have secret information) then you can
# safely set this to True to get more informative messages.
#display_args_to_stdout = False
# by default (as of 1.3), Ansible will raise errors when attempting to dereference
# Jinja2 variables that are not set in templates or action lines. Uncomment this line
# to revert the behavior to pre-1.3.
#error_on_undefined_vars = False
# by default (as of 1.6), Ansible may display warnings based on the configuration of the
# system running ansible itself. This may include warnings about 3rd party packages or
# other conditions that should be resolved if possible.
# to disable these warnings, set the following value to False:
#system_warnings = True
# by default (as of 1.4), Ansible may display deprecation warnings for language
# features that should no longer be used and will be removed in future versions.
# to disable these warnings, set the following value to False:
#deprecation_warnings = True
# (as of 1.8), Ansible can optionally warn when usage of the shell and
# command module appear to be simplified by using a default Ansible module
# instead. These warnings can be silenced by adjusting the following
# setting or adding warn=yes or warn=no to the end of the command line
# parameter string. This will for example suggest using the git module
# instead of shelling out to the git command.
# command_warnings = False
# set plugin path directories here, separate with colons
#action_plugins = /usr/share/ansible/plugins/action
#cache_plugins = /usr/share/ansible/plugins/cache
#callback_plugins = /usr/share/ansible/plugins/callback
#connection_plugins = /usr/share/ansible/plugins/connection
#lookup_plugins = /usr/share/ansible/plugins/lookup
#inventory_plugins = /usr/share/ansible/plugins/inventory
#vars_plugins = /usr/share/ansible/plugins/vars
#filter_plugins = /usr/share/ansible/plugins/filter
#test_plugins = /usr/share/ansible/plugins/test
#strategy_plugins = /usr/share/ansible/plugins/strategy
# by default callbacks are not loaded for /bin/ansible, enable this if you
# want, for example, a notification or logging callback to also apply to
# /bin/ansible runs
#bin_ansible_callbacks = False
# don't like cows? that's unfortunate.
# set to 1 if you don't want cowsay support or export ANSIBLE_NOCOWS=1
#nocows = 1
# set which cowsay stencil you'd like to use by default. When set to 'random',
# a random stencil will be selected for each task. The selection will be filtered
# against the `cow_whitelist` option below.
#cow_selection = default
#cow_selection = random
# when using the 'random' option for cowsay, stencils will be restricted to this list.
# it should be formatted as a comma-separated list with no spaces between names.
# NOTE: line continuations here are for formatting purposes only, as the INI parser
# in python does not support them.
#cow_whitelist=bud-frogs,bunny,cheese,daemon,default,dragon,elephant-in-snake,elephant,eyes,\
# hellokitty,kitty,luke-koala,meow,milk,moofasa,moose,ren,sheep,small,stegosaurus,\
# stimpy,supermilker,three-eyes,turkey,turtle,tux,udder,vader-koala,vader,www
# don't like colors either?
# set to 1 if you don't want colors, or export ANSIBLE_NOCOLOR=1
#nocolor = 1
# if set to a persistent type (not 'memory', for example 'redis') fact values
# from previous runs in Ansible will be stored. This may be useful when
# wanting to use, for example, IP information from one group of servers
# without having to talk to them in the same playbook run to get their
# current IP information.
fact_caching = jsonfile
fact_caching_connection = ~/.cache/facts
fact_caching_timout = 3600
# retry files
# When a playbook fails by default a .retry file will be created in ~/
# You can disable this feature by setting retry_files_enabled to False
# and you can change the location of the files by setting retry_files_save_path
retry_files_enabled = False
#retry_files_save_path = ~/.ansible-retry
# squash actions
# Ansible can optimise actions that call modules with list parameters
# when looping. Instead of calling the module once per with_ item, the
# module is called once with all items at once. Currently this only works
# under limited circumstances, and only with parameters named 'name'.
#squash_actions = apk,apt,dnf,package,pacman,pkgng,yum,zypper
# prevents logging of task data, off by default
#no_log = False
# prevents logging of tasks, but only on the targets, data is still logged on the master/controller
#no_target_syslog = False
# controls whether Ansible will raise an error or warning if a task has no
# choice but to create world readable temporary files to execute a module on
# the remote machine. This option is False by default for security. Users may
# turn this on to have behaviour more like Ansible prior to 2.1.x. See
# https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user
# for more secure ways to fix this than enabling this option.
#allow_world_readable_tmpfiles = False
# controls the compression level of variables sent to
# worker processes. At the default of 0, no compression
# is used. This value must be an integer from 0 to 9.
#var_compression_level = 9
# controls what compression method is used for new-style ansible modules when
# they are sent to the remote system. The compression types depend on having
# support compiled into both the controller's python and the client's python.
# The names should match with the python Zipfile compression types:
# * ZIP_STORED (no compression. available everywhere)
# * ZIP_DEFLATED (uses zlib, the default)
# These values may be set per host via the ansible_module_compression inventory
# variable
#module_compression = 'ZIP_DEFLATED'
# This controls the cutoff point (in bytes) on --diff for files
# set to 0 for unlimited (RAM may suffer!).
#max_diff_size = 1048576
[privilege_escalation]
become=True
become_method=sudo
become_user=root
become_ask_pass=False
[paramiko_connection]
# uncomment this line to cause the paramiko connection plugin to not record new host
# keys encountered. Increases performance on new host additions. Setting works independently of the
# host key checking setting above.
#record_host_keys=False
# by default, Ansible requests a pseudo-terminal for commands executed under sudo. Uncomment this
# line to disable this behaviour.
#pty=False
[ssh_connection]
# ssh arguments to use
# Leaving off ControlPersist will result in poor performance, so use
# paramiko on older platforms rather than removing it, -C controls compression use
#ssh_args = -C -o ControlMaster=auto -o ControlPersist=60s
# The path to use for the ControlPath sockets. This defaults to
# "%(directory)s/ansible-ssh-%%h-%%p-%%r", however on some systems with
# very long hostnames or very long path names (caused by long user names or
# deeply nested home directories) this can exceed the character limit on
# file socket names (108 characters for most platforms). In that case, you
# may wish to shorten the string below.
#
# Example:
# control_path = %(directory)s/%%h-%%r
#control_path = %(directory)s/ansible-ssh-%%h-%%p-%%r
# Enabling pipelining reduces the number of SSH operations required to
# execute a module on the remote server. This can result in a significant
# performance improvement when enabled, however when using "sudo:" you must
# first disable 'requiretty' in /etc/sudoers
#
# By default, this option is disabled to preserve compatibility with
# sudoers configurations that have requiretty (the default on many distros).
#
pipelining = True
# if True, make ansible use scp if the connection type is ssh
# (default is sftp)
#scp_if_ssh = True
# if False, sftp will not use batch mode to transfer files. This may cause some
# types of file transfer failures impossible to catch however, and should
# only be disabled if your sftp version has problems with batch mode
#sftp_batch_mode = False
[accelerate]
#accelerate_port = 5099
#accelerate_timeout = 30
#accelerate_connect_timeout = 5.0
# The daemon timeout is measured in minutes. This time is measured
# from the last activity to the accelerate daemon.
#accelerate_daemon_timeout = 30
# If set to yes, accelerate_multi_key will allow multiple
# private keys to be uploaded to it, though each user must
# have access to the system via SSH to add a new key. The default
# is "no".
#accelerate_multi_key = yes
[selinux]
# file systems that require special treatment when dealing with security context
# the default behaviour that copies the existing context or uses the user default
# needs to be changed to use the file system dependent context.
#special_context_filesystems=nfs,vboxsf,fuse,ramfs
# Set this to yes to allow libvirt_lxc connections to work without SELinux.
#libvirt_lxc_noseclabel = yes
[colors]
#highlight = white
#verbose = blue
#warn = bright purple
#error = red
#debug = dark gray
#deprecate = purple
#skip = cyan
#unreachable = red
#ok = green
#changed = yellow
#diff_add = green
#diff_remove = red
#diff_lines = cyan
================================================
FILE: ansibullbot/__init__.py
================================================
================================================
FILE: ansibullbot/_text_compat.py
================================================
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Toshio Kuratomi <a.badger@gmail.com>, 2016
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
.. warn:: This module_util is currently internal implementation.
We want to evaluate this code for stability and API suitability before
making backwards compatibility guarantees. The API may change between
releases. Do not use this unless you are willing to port your module code.
"""
import codecs
try:
codecs.lookup_error('surrogateescape')
HAS_SURROGATEESCAPE = True
except LookupError:
HAS_SURROGATEESCAPE = False
_COMPOSED_ERROR_HANDLERS = frozenset((None, 'surrogate_or_replace',
'surrogate_or_strict',
'surrogate_then_replace'))
def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
"""Make sure that a string is a byte string
:arg obj: An object to make sure is a byte string. In most cases this
will be either a text string or a byte string. However, with
``nonstring='simplerepr'``, this can be used as a traceback-free
version of ``str(obj)``.
:kwarg encoding: The encoding to use to transform from a text string to
a byte string. Defaults to using 'utf-8'.
:kwarg errors: The error handler to use if the text string is not
encodable using the specified encoding. Any valid `codecs error
handler <https://docs.python.org/2/library/codecs.html#codec-base-classes>`_
may be specified. There are three additional error strategies
specifically aimed at helping people to port code. The first two are:
:surrogate_or_strict: Will use ``surrogateescape`` if it is a valid
handler, otherwise it will use ``strict``
:surrogate_or_replace: Will use ``surrogateescape`` if it is a valid
handler, otherwise it will use ``replace``.
Because ``surrogateescape`` was added in Python3 this usually means that
Python3 will use ``surrogateescape`` and Python2 will use the fallback
error handler. Note that the code checks for ``surrogateescape`` when the
module is imported. If you have a backport of ``surrogateescape`` for
Python2, be sure to register the error handler prior to importing this
module.
The last error handler is:
:surrogate_then_replace: Will use ``surrogateescape`` if it is a valid
handler. If encoding with ``surrogateescape`` would traceback,
surrogates are first replaced with a replacement characters
and then the string is encoded using ``replace`` (which replaces
the rest of the nonencodable bytes). If ``surrogateescape`` is
not present it will simply use ``replace``. (Added in Ansible 2.3)
This strategy is designed to never traceback when it attempts
to encode a string.
The default until Ansible-2.2 was ``surrogate_or_replace``
From Ansible-2.3 onwards, the default is ``surrogate_then_replace``.
:kwarg nonstring: The strategy to use if a nonstring is specified in
``obj``. Default is 'simplerepr'. Valid values are:
:simplerepr: The default. This takes the ``str`` of the object and
then returns the bytes version of that string.
:empty: Return an empty byte string
:passthru: Return the object passed in
:strict: Raise a :exc:`TypeError`
:returns: Typically this returns a byte string. If a nonstring object is
passed in this may be a different type depending on the strategy
specified by nonstring. This will never return a text string.
.. note:: If passed a byte string, this function does not check that the
string is valid in the specified encoding. If it's important that the
byte string is in the specified encoding do::
encoded_string = to_bytes(to_text(input_string, 'latin-1'), 'utf-8')
.. version_changed:: 2.3
Added the ``surrogate_then_replace`` error handler and made it the default error handler.
"""
if isinstance(obj, bytes):
return obj
# We're given a text string
# If it has surrogates, we know because it will decode
original_errors = errors
if errors in _COMPOSED_ERROR_HANDLERS:
if HAS_SURROGATEESCAPE:
errors = 'surrogateescape'
elif errors == 'surrogate_or_strict':
errors = 'strict'
else:
errors = 'replace'
if isinstance(obj, str):
try:
# Try this first as it's the fastest
return obj.encode(encoding, errors)
except UnicodeEncodeError:
if original_errors in (None, 'surrogate_then_replace'):
# We should only reach this if encoding was non-utf8 original_errors was
# surrogate_then_escape and errors was surrogateescape
# Slow but works
return_string = obj.encode('utf-8', 'surrogateescape')
return_string = return_string.decode('utf-8', 'replace')
return return_string.encode(encoding, 'replace')
raise
# Note: We do these last even though we have to call to_bytes again on the
# value because we're optimizing the common case
if nonstring == 'simplerepr':
try:
value = str(obj)
except UnicodeError:
try:
value = repr(obj)
except UnicodeError:
# Giving up
return to_bytes('')
elif nonstring == 'passthru':
return obj
elif nonstring == 'empty':
# python2.4 doesn't have b''
return to_bytes('')
elif nonstring == 'strict':
raise TypeError('obj must be a string type')
else:
raise TypeError('Invalid value %s for to_bytes\' nonstring parameter' % nonstring)
return to_bytes(value, encoding, errors)
def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
"""Make sure that a string is a text string
:arg obj: An object to make sure is a text string. In most cases this
will be either a text string or a byte string. However, with
``nonstring='simplerepr'``, this can be used as a traceback-free
version of ``str(obj)``.
:kwarg encoding: The encoding to use to transform from a byte string to
a text string. Defaults to using 'utf-8'.
:kwarg errors: The error handler to use if the byte string is not
decodable using the specified encoding. Any valid `codecs error
handler <https://docs.python.org/2/library/codecs.html#codec-base-classes>`_
may be specified. We support three additional error strategies
specifically aimed at helping people to port code:
:surrogate_or_strict: Will use surrogateescape if it is a valid
handler, otherwise it will use strict
:surrogate_or_replace: Will use surrogateescape if it is a valid
handler, otherwise it will use replace.
:surrogate_then_replace: Does the same as surrogate_or_replace but
`was added for symmetry with the error handlers in
:func:`ansible.module_utils._text.to_bytes` (Added in Ansible 2.3)
Because surrogateescape was added in Python3 this usually means that
Python3 will use `surrogateescape` and Python2 will use the fallback
error handler. Note that the code checks for surrogateescape when the
module is imported. If you have a backport of `surrogateescape` for
python2, be sure to register the error handler prior to importing this
module.
The default until Ansible-2.2 was `surrogate_or_replace`
In Ansible-2.3 this defaults to `surrogate_then_replace` for symmetry
with :func:`ansible.module_utils._text.to_bytes` .
:kwarg nonstring: The strategy to use if a nonstring is specified in
``obj``. Default is 'simplerepr'. Valid values are:
:simplerepr: The default. This takes the ``str`` of the object and
then returns the text version of that string.
:empty: Return an empty text string
:passthru: Return the object passed in
:strict: Raise a :exc:`TypeError`
:returns: Typically this returns a text string. If a nonstring object is
passed in this may be a different type depending on the strategy
specified by nonstring. This will never return a byte string.
From Ansible-2.3 onwards, the default is `surrogate_then_replace`.
.. version_changed:: 2.3
Added the surrogate_then_replace error handler and made it the default error handler.
"""
if isinstance(obj, str):
return obj
if errors in _COMPOSED_ERROR_HANDLERS:
if HAS_SURROGATEESCAPE:
errors = 'surrogateescape'
elif errors == 'surrogate_or_strict':
errors = 'strict'
else:
errors = 'replace'
if isinstance(obj, bytes):
# Note: We don't need special handling for surrogate_then_replace
# because all bytes will either be made into surrogates or are valid
# to decode.
return obj.decode(encoding, errors)
# Note: We do these last even though we have to call to_text again on the
# value because we're optimizing the common case
if nonstring == 'simplerepr':
try:
value = str(obj)
except UnicodeError:
try:
value = repr(obj)
except UnicodeError:
# Giving up
return ''
elif nonstring == 'passthru':
return obj
elif nonstring == 'empty':
return ''
elif nonstring == 'strict':
raise TypeError('obj must be a string type')
else:
raise TypeError('Invalid value %s for to_text\'s nonstring parameter' % nonstring)
return to_text(value, encoding, errors)
================================================
FILE: ansibullbot/ansibletriager.py
================================================
# Key features:
# * daemonize mode that can continuously loop and process w/out scripts
# * maintainers can be assigned to more than just the files in
# ansibullbot.ansible/modules
# * false positives on module issue detection can be corrected by a wide range
# of people
# * more people (not just maintainers) should have access to a subset of bot
# commands
# * a generic label add|remove command will allow the community to fill in
# where the bot can't
# * different workflows should be a matter of enabling different plugins
import datetime
import json
import logging
import os
from copy import deepcopy
from pprint import pprint
import ansibullbot.constants as C
from ansibullbot._text_compat import to_bytes, to_text
from ansibullbot.exceptions import LabelWafflingError
from ansibullbot.utils.botmetadata import BotMetadataParser
from ansibullbot.defaulttriager import DefaultActions, DefaultTriager, render_boilerplate
from ansibullbot.utils.component_tools import AnsibleComponentMatcher
from ansibullbot.utils.extractors import extract_pr_number_from_comment
from ansibullbot.utils.moduletools import ModuleIndexer
from ansibullbot.utils.receiver_client import post_to_receiver
from ansibullbot.utils.timetools import strip_time_safely
from ansibullbot.utils.version_tools import AnsibleVersionIndexer, get_version_major_minor
from ansibullbot.issuewrapper import IssueWrapper
from ansibullbot.plugins.backports import get_backport_facts
from ansibullbot.plugins.botstatus import get_bot_status_facts
from ansibullbot.plugins.ci_rebuild import get_ci_facts
from ansibullbot.plugins.ci_rebuild import get_rebuild_facts
from ansibullbot.plugins.ci_rebuild import get_rebuild_command_facts
from ansibullbot.plugins.ci_rebuild import get_rebuild_merge_facts
from ansibullbot.plugins.community_workgroups import get_community_workgroup_facts
from ansibullbot.plugins.component_matching import get_component_match_facts
from ansibullbot.plugins.collection_facts import get_collection_facts
from ansibullbot.plugins.cross_references import get_cross_reference_facts
from ansibullbot.plugins.filament import get_filament_facts
from ansibullbot.plugins.label_commands import get_label_command_facts
from ansibullbot.plugins.label_commands import get_waffling_overrides
from ansibullbot.plugins.needs_contributor import get_needs_contributor_facts
from ansibullbot.plugins.needs_info import is_needsinfo
from ansibullbot.plugins.needs_info import needs_info_template_facts
from ansibullbot.plugins.needs_info import needs_info_timeout_facts
from ansibullbot.plugins.needs_revision import get_needs_revision_facts
from ansibullbot.plugins.needs_revision import get_ci_run_facts
from ansibullbot.plugins.contributors import get_contributor_facts
from ansibullbot.plugins.notifications import get_notification_facts
from ansibullbot.plugins.shipit import get_automerge_facts
from ansibullbot.plugins.shipit import get_shipit_facts
from ansibullbot.plugins.small_patch import get_small_patch_facts
from ansibullbot.plugins.spam import get_spam_facts
from ansibullbot.plugins.test_support_plugins import get_test_support_plugins_facts
from ansibullbot.plugins.traceback import get_traceback_facts
from ansibullbot.plugins.deprecation import get_deprecation_facts
from ansibullbot.plugins.docs_info import get_docs_facts
VALID_CI_PROVIDERS = frozenset(('azp',))
class AnsibleActions(DefaultActions):
def __init__(self):
super().__init__()
self.rebuild = False
self.rebuild_failed = False
self.cancel_ci = False
self.cancel_ci_branch = False
class AnsibleTriager(DefaultTriager):
CLOSING_LABELS = ['bot_closed']
ISSUE_TYPES = {
'bug report': 'bug',
'bugfix pull request': 'bug',
'feature idea': 'feature',
'feature pull request': 'feature',
'documentation report': 'docs',
'docs pull request': 'docs',
'new module pull request': 'new_plugin'
}
# modules having files starting like the key, will get the value label
MODULE_NAMESPACE_LABELS = {
'windows': "windows",
'network': "networking"
}
VALID_COMMANDS = [
'needs_info',
'!needs_info',
'notabug',
'bot_status',
'bot_broken',
'!bot_broken',
'bot_skip',
'!bot_skip',
'wontfix',
'bug_resolved',
'resolved_by_pr',
'needs_contributor',
'!needs_contributor',
'needs_rebase',
'!needs_rebase',
'needs_revision',
'!needs_revision',
'shipit',
'!shipit',
'duplicate_of',
'close_me',
'waiting_on_contributor',
'!waiting_on_contributor',
]
def __init__(self, args=None):
super().__init__(args)
if self.args.ci == 'azp':
from ansibullbot.ci.azp import AzurePipelinesCI as ci_class
else:
raise ValueError(
'Unknown CI provider specified in the config file: %s. Valid CI providers: %s' %
(C.DEFAULT_CI_PROVIDER, ', '.join(VALID_CI_PROVIDERS))
)
self.ci = None
self.ci_class = ci_class
def load_botmeta(self, gitrepo):
if self.args.botmetafile is not None:
with open(self.args.botmetafile, 'rb') as f:
rdata = f.read()
else:
rdata = gitrepo.get_file_content('.github/BOTMETA.yml')
logging.info('ansible triager [re]loading botmeta')
return BotMetadataParser.parse_yaml(rdata)
def _should_skip_issue(self, summary):
reponame = summary['repository']['nameWithOwner']
if summary['number'] in self.repos[reponame]['stale']:
return False
if not (lmeta := self.load_meta(reponame, str(summary['number']))):
return False
if strip_time_safely(lmeta['updated_at']) != strip_time_safely(summary['updated_at']):
return False
if summary['type'] == 'pullRequest' and (lmeta.get('needs_rebuild') or lmeta.get('admin_merge')):
return False
return True
def run(self):
'''Primary execution method'''
ts1 = datetime.datetime.now()
self.collect_repos()
if self.args.collect_only:
return
icount = 0
for repopath, repodata in self.repos.items():
repo = repodata['repo']
cachedir = os.path.join(self.cachedir_base, repopath)
logging.info('loading botmeta')
self.botmeta = self.load_botmeta(repodata['gitrepo'])
logging.info('creating version indexer')
self.version_indexer = AnsibleVersionIndexer(checkoutdir=repodata['gitrepo'].checkoutdir)
logging.info('creating module indexer')
self.module_indexer = ModuleIndexer(
botmeta=self.botmeta,
gh_client=self.gqlc,
cachedir=self.cachedir_base,
gitrepo=repodata['gitrepo'],
commits=not self.args.ignore_module_commits
)
logging.info('creating component matcher')
self.component_matcher = AnsibleComponentMatcher(
cachedir=self.cachedir_base,
gitrepo=repodata['gitrepo'],
botmeta=self.botmeta,
email_cache=self.module_indexer.emails_cache,
usecache=True,
use_galaxy=not self.args.ignore_galaxy
)
for issue in (
repodata['issuecache'].get(n, repodata['repo'].get_issue(n)) for n in repodata['numbers']
):
icount += 1
self.meta = {}
self.processed_meta = {}
self.set_resume(repopath, issue.number)
# keep track of how many times this isssue has been re-done
loopcount = 0
its1 = datetime.datetime.now()
redo = True
while redo:
redo = False
# use the loopcount to check new data
loopcount += 1
if loopcount <= 1:
logging.info('starting triage for %s' % issue.html_url)
else:
# if >1 get latest data
logging.info('restarting triage for %s' % issue.number)
issue = repo.get_issue(issue.number)
if self.args.skip_no_update and self._should_skip_issue(repodata['summaries'][str(issue.number)]):
logging.info('skipping: no changes since last run')
continue
# create the wrapper on each loop iteration
iw = IssueWrapper(
github=self.ghw,
repo=repo,
issue=issue,
cachedir=cachedir,
gitrepo=repodata['gitrepo'],
)
iw.updated_at = strip_time_safely(repodata['summaries'][str(issue.number)]['updated_at'])
if iw.is_pullrequest():
logging.info('creating CI wrapper')
self.ci = self.ci_class(self.cachedir_base, iw)
else:
self.ci = None
# force an update on the PR data
iw.update_pullrequest()
self.process(iw, repodata['labels'])
# build up actions from the meta
actions = AnsibleActions()
self.create_actions(iw, actions, repodata['labels'])
self.save_meta(iw, self.meta, actions)
# DEBUG!
logging.info('url: %s' % iw.html_url)
logging.info('title: %s' % iw.title)
if iw.is_pullrequest():
for fn in iw.files:
logging.info('component[f]: %s' % fn)
else:
for line in iw.template_data.get('component_raw', '').split('\n'):
logging.info('component[t]: %s' % line)
for fn in self.meta['component_filenames']:
logging.info('component[m]: %s' % fn)
if self.meta['template_missing_sections']:
logging.info(
'missing sections: ' +
', '.join(self.meta['template_missing_sections'])
)
if self.meta['is_needs_revision']:
logging.info('needs_revision')
for msg in self.meta['is_needs_revision_msgs']:
logging.info('needs_revision_msg: %s' % msg)
if self.meta['is_needs_rebase']:
logging.info('needs_rebase')
for msg in self.meta['is_needs_rebase_msgs']:
logging.info('needs_rebase_msg: %s' % msg)
pprint(vars(actions))
action_meta = self.apply_actions(iw, actions)
if action_meta['REDO']:
redo = True
its2 = datetime.datetime.now()
td = (its2 - its1).total_seconds()
logging.info('finished triage for %s in %ss' % (str(issue.number), td))
ts2 = datetime.datetime.now()
td = (ts2 - ts1).total_seconds()
logging.info('triaged %s issues in %s seconds' % (icount, td))
def save_meta(self, issuewrapper, meta, actions):
# save the meta+actions
dmeta = meta.copy()
dmeta['submitter'] = issuewrapper.submitter
dmeta['number'] = issuewrapper.number
dmeta['title'] = issuewrapper.title
dmeta['body'] = issuewrapper.body
dmeta['filenames'] = issuewrapper.files
dmeta['renamed_filenames'] = issuewrapper.renamed_files
dmeta['html_url'] = issuewrapper.html_url
dmeta['created_at'] = to_text(issuewrapper.created_at.isoformat())
dmeta['updated_at'] = to_text(issuewrapper.updated_at.isoformat())
dmeta['template_data'] = issuewrapper.template_data
if isinstance(actions, dict):
dmeta['actions'] = actions.copy()
else:
if actions:
dmeta['actions'] = vars(actions)
else:
dmeta['actions'] = {}
dmeta['labels'] = issuewrapper.labels
dmeta['assignees'] = issuewrapper.assignees
if issuewrapper.history:
dmeta['history'] = deepcopy(issuewrapper.history.history)
for idx, x in enumerate(dmeta['history']):
dmeta['history'][idx]['created_at'] = \
to_text(x['created_at'].isoformat())
else:
dmeta['history'] = []
if issuewrapper.is_pullrequest():
dmeta['pullrequest_reviews'] = issuewrapper.reviews
else:
dmeta['pullrequest_reviews'] = []
self.dump_meta(issuewrapper, dmeta)
namespace, reponame = issuewrapper.repo_full_name.split('/', 1)
# https://github.com/ansible/ansibullbot/issues/1355
dmeta_copy = dmeta.copy()
# These two might have dictionaries with keys that are considered
# invalid in mongodb (like having '.') which would crash the receiver
# and result in memory leaks.
# FIXME figure out a way how to store these without keys being invalid
dmeta_copy['collection_filemap'] = None
dmeta_copy['collection_file_matches'] = None
dmeta_copy['renamed_filenames'] = None
dmeta_copy['test_support_plugins'] = None
post_to_receiver(
'metadata',
{'user': namespace, 'repo': reponame, 'number': issuewrapper.number},
dmeta_copy
)
self.processed_meta = dmeta_copy.copy()
def dump_meta(self, issuewrapper, meta):
mfile = os.path.join(
issuewrapper.full_cachedir,
'meta.json'
)
meta['time'] = to_text(datetime.datetime.now().isoformat())
logging.info('dump meta to %s' % mfile)
with open(mfile, 'w', encoding='utf-8') as f:
json.dump(meta, f)
def create_actions(self, iw, actions, valid_labels):
'''Parse facts and make actions from them'''
# bot_broken + bot_skip bypass all actions
if not self.args.ignore_bot_broken:
bot_broken_commands = iw.history.get_commands(
None,
['bot_broken', '!bot_broken'],
timestamps=True
)
bot_broken_label = iw.history.label_last_applied('bot_broken')
if bot_broken_label:
bot_broken_commands.append((bot_broken_label, 'bot_broken'))
bot_broken_unlabel = iw.history.label_last_removed('bot_broken')
if bot_broken_unlabel:
bot_broken_commands.append((bot_broken_label, '!bot_broken'))
last_bot_broken = sorted(bot_broken_commands, key=lambda x: x[0])[-1:]
if last_bot_broken and last_bot_broken[0][-1] == 'bot_broken':
logging.warning('bot broken!')
if 'bot_broken' not in iw.labels:
actions.newlabel.append('bot_broken')
return
else:
if 'bot_broken' in iw.labels:
actions.unlabel.append('bot_broken')
if 'bot_skip' in self.meta['maintainer_commands'] or \
'bot_skip' in self.meta['submitter_commands'] or \
'!bot_skip' in self.meta['maintainer_commands'] or \
'!bot_skip' in self.meta['submitter_commands']:
bot_skip_users = [x.login for x in iw.repo.assignees]
bot_skip_users.append(iw.submitter)
bot_skip_commands = iw.history.get_commands(
bot_skip_users,
['bot_skip', '!bot_skip'],
timestamps=True
)
last_bot_skip = sorted(bot_skip_commands, key=lambda x: x[0])[-1:]
if last_bot_skip and last_bot_skip[0][-1] == 'bot_skip':
logging.warning('bot skip!')
return
if iw.is_pullrequest():
if not iw.incoming_repo_exists and C.features.is_enabled('close_missing_ref_prs'):
type_to_branch_prefix = {
'bugfix pull request': 'bugfix',
'feature pull request': 'feature',
'documenation pull request': 'docs',
'test pull request': 'testing',
None: 'misc',
}
pr_number = iw.number
pr_topic = iw.title.strip().replace(' ', '-').lower()
pr_type = type_to_branch_prefix[
iw.template_data.get('issue type')
]
pr_recovered_branch = (
'recovered-{pr_type}/{pr_number:d}-{pr_topic}'.
format(
pr_type=pr_type,
pr_number=pr_number,
pr_topic=pr_topic,
)
)
tvars = {
'pr_number': pr_number,
'pr_recovered_branch': pr_recovered_branch,
'pr_topic': pr_topic,
'pr_title_urlencoded': iw.title.replace(' ', '%20'),
'pr_type': pr_type,
'pr_submitter': iw.submitter,
}
comment = render_boilerplate(
tvars, boilerplate='incoming_ref_missing',
)
actions.comments.append(comment)
if C.features.is_enabled('close_missing_ref_prs'):
actions.close = True
actions.cancel_ci = True
actions.cancel_ci_branch = True
return
if not iw.from_fork:
tvars = {'submitter': iw.submitter}
comment = render_boilerplate(tvars, boilerplate='fork')
actions.comments.append(comment)
actions.close = True
actions.cancel_ci = True
actions.cancel_ci_branch = True
return
# indicate what components were matched
if not self.meta['is_bad_pr']:
if iw.is_issue() and self.meta.get('needs_component_message'):
tvars = {
'meta': self.meta
}
comment = render_boilerplate(
tvars, boilerplate='components_banner'
)
if comment not in actions.comments:
actions.comments.append(comment)
# UNKNOWN!!! ... sigh.
if iw.is_pullrequest():
if self.meta['mergeable_state'] == 'unknown' and iw.state != 'closed':
msg = 'skipping %s because it has a' % iw.number
msg += ' mergeable_state of unknown'
logging.warning(msg)
return
# TRIAGE!!!
if 'needs_triage' not in iw.labels and not iw.history.was_unlabeled('needs_triage') and not iw.history.was_unlabeled('triage'):
actions.newlabel.append('needs_triage')
# owner PRs
if iw.is_pullrequest():
if self.meta['owner_pr']:
if 'owner_pr' not in iw.labels:
actions.newlabel.append('owner_pr')
else:
if 'owner_pr' in iw.labels:
actions.unlabel.append('owner_pr')
# WIPs
if iw.is_pullrequest():
if iw.wip:
if 'WIP' not in iw.labels:
actions.newlabel.append('WIP')
if 'shipit' in iw.labels:
actions.unlabel.append('shipit')
else:
if 'WIP' in iw.labels:
actions.unlabel.append('WIP')
# MERGE COMMITS
if iw.is_pullrequest():
if self.meta['merge_commits']:
if not self.meta['has_merge_commit_notification']:
comment = render_boilerplate(
self.meta,
boilerplate='merge_commit_notify'
)
actions.comments.append(comment)
if 'merge_commit' not in iw.labels:
actions.newlabel.append('merge_commit')
if self.meta.get('has_ci'):
actions.cancel_ci = True
else:
if 'merge_commit' in iw.labels:
actions.unlabel.append('merge_commit')
# @YOU IN COMMIT MSGS
if iw.is_pullrequest():
if self.meta['has_commit_mention']:
if not self.meta['has_commit_mention_notification']:
comment = render_boilerplate(
self.meta,
boilerplate='commit_msg_mentions'
)
actions.comments.append(comment)
# SHIPIT+AUTOMERGE
if iw.is_pullrequest() and not self.meta['is_bad_pr']:
if self.meta['shipit']:
if 'shipit' not in iw.labels:
actions.newlabel.append('shipit')
if self.meta['automerge']:
logging.info(self.meta['automerge_status'])
if 'automerge' not in iw.labels:
actions.newlabel.append('automerge')
if self.botmeta.get('automerge') in ['Yes', 'yes', 'y', True, 1]:
actions.merge = True
else:
logging.debug(self.meta['automerge_status'])
if 'automerge' in iw.labels:
actions.unlabel.append('automerge')
else:
# not shipit and not automerge ...
if 'shipit' in iw.labels:
actions.unlabel.append('shipit')
if 'automerge' in iw.labels:
actions.unlabel.append('automerge')
if iw.is_pullrequest() and self.meta['is_bad_pr']:
if self.meta['is_bad_pr_reason']:
last_comment_date = iw.history.last_date_for_boilerplate('bad_pr')
if not last_comment_date:
comment = render_boilerplate(
tvars={'submitter': iw.submitter, 'is_bad_pr_reason': self.meta['is_bad_pr_reason']},
boilerplate='bad_pr'
)
if comment and comment not in actions.comments:
actions.comments.append(comment)
# NEEDS REVISION
if iw.is_pullrequest():
if not iw.wip:
if self.meta['is_needs_revision'] or self.meta['is_bad_pr']:
if 'needs_revision' not in iw.labels:
actions.newlabel.append('needs_revision')
else:
if 'needs_revision' in iw.labels:
actions.unlabel.append('needs_revision')
# NEEDS REBASE
if iw.is_pullrequest():
if self.meta['is_needs_rebase'] or self.meta['is_bad_pr']:
if 'needs_rebase' not in iw.labels:
actions.newlabel.append('needs_rebase')
else:
if 'needs_rebase' in iw.labels:
actions.unlabel.append('needs_rebase')
# comments with CI failures
if iw.is_pullrequest() and not self.meta['is_bad_pr']:
if self.meta['ci_state'] == 'failure' and \
self.meta['needs_testresult_notification']:
tvars = {
'submitter': iw.submitter,
'data': self.meta['ci_test_results']
}
try:
comment = render_boilerplate(
tvars,
boilerplate='shippable_test_result'
)
except Exception as e:
logging.debug(e)
raise
# https://github.com/ansible/ansibullbot/issues/423
if len(comment) < 65536:
if comment not in actions.comments:
actions.comments.append(comment)
# https://github.com/ansible/ansibullbot/issues/293
if iw.is_pullrequest():
label = 'needs_ci'
if not self.meta['has_ci']:
if 'pre_azp' not in iw.labels:
if label not in iw.labels:
actions.newlabel.append(label)
else:
if label in iw.labels:
actions.unlabel.append(label)
if 'pre_azp' in iw.labels:
actions.unlabel.append('pre_azp')
# MODULE CATEGORY LABELS
if not self.meta['is_bad_pr']:
if self.meta['is_new_module'] or self.meta['is_module']:
# add topic labels
for t in ['topic', 'subtopic']:
mmatches = self.meta['module_match']
if not isinstance(mmatches, list):
mmatches = [mmatches]
for mmatch in mmatches:
label = mmatch.get(t)
if label in self.MODULE_NAMESPACE_LABELS:
label = self.MODULE_NAMESPACE_LABELS[label]
if label and label in valid_labels and \
label not in iw.labels and \
not iw.history.was_unlabeled(label):
actions.newlabel.append(label)
# add namespace labels
namespace = mmatch.get('namespace')
if namespace in self.MODULE_NAMESPACE_LABELS:
label = self.MODULE_NAMESPACE_LABELS[namespace]
if label not in iw.labels and \
not iw.history.was_unlabeled(label):
actions.newlabel.append(label)
# NEW MODULE
if not self.meta['is_bad_pr']:
if self.meta['is_new_module']:
if 'new_module' not in iw.labels:
actions.newlabel.append('new_module')
else:
if 'new_module' in iw.labels:
actions.unlabel.append('new_module')
if self.meta['is_module']:
if 'module' not in iw.labels:
# don't add manually removed label
if not iw.history.was_unlabeled(
'module',
bots=C.DEFAULT_BOT_NAMES,
):
actions.newlabel.append('module')
else:
if 'module' in iw.labels:
# don't remove manually added label
if not iw.history.was_labeled(
'module',
bots=C.DEFAULT_BOT_NAMES,
):
actions.unlabel.append('module')
# NEW PLUGIN
if not self.meta['is_bad_pr']:
label = 'new_plugin'
if self.meta['is_new_plugin']:
if label not in iw.labels and not iw.history.was_unlabeled(label):
actions.newlabel.append(label)
else:
if label in iw.labels and not iw.history.was_labeled(label):
actions.unlabel.append(label)
# component labels
if not self.meta['is_bad_pr']:
if self.meta.get('component_labels') and not self.meta.get('merge_commits'):
# only add these labels to pullrequest or un-triaged issues
if iw.is_pullrequest() or \
(iw.is_issue() and
(not iw.labels or
'needs_triage' in iw.labels)):
# only add these if no c: labels have ever been changed by human
clabels = iw.history.get_changed_labels(
prefix='c:',
bots=C.DEFAULT_BOT_NAMES,
)
if not clabels:
for cl in self.meta['component_labels']:
ul = iw.history.was_unlabeled(
cl,
bots=C.DEFAULT_BOT_NAMES,
)
if not ul and \
cl not in iw.labels and \
cl not in actions.newlabel:
actions.newlabel.append(cl)
if self.meta['is_pullrequest'] and self.meta['is_backport']:
version = self.version_indexer.strip_ansible_version(self.meta['base_ref'])
if version:
for label in valid_labels:
if label.startswith('affects_'):
if label.endswith(version):
if label not in iw.labels:
actions.newlabel.append(label)
elif label in iw.labels:
actions.unlabel.append(label)
elif self.meta['ansible_label_version']:
vlabels = [x for x in iw.labels if x.startswith('affects_')]
if not vlabels:
label = 'affects_%s' % self.meta['ansible_label_version']
if label not in iw.labels:
# do not re-add version labels
if not iw.history.was_unlabeled(label):
actions.newlabel.append(label)
if self.meta['issue_type']:
label = self.ISSUE_TYPES.get(self.meta['issue_type'])
if label and label not in iw.labels:
# do not re-add issue type labels
if not iw.history.was_unlabeled(label):
actions.newlabel.append(label)
# use the filemap to add labels
if not self.meta['is_bad_pr']:
if iw.is_pullrequest() and not self.meta.get('merge_commits'):
fmap_labels = self.component_matcher.get_labels_for_files(iw.files)
for label in fmap_labels:
if label in valid_labels and label not in iw.labels:
# do not re-add these labels
if not iw.history.was_unlabeled(label):
actions.newlabel.append(label)
# needs info?
if self.meta['is_needs_info']:
if 'needs_info' not in iw.labels:
actions.newlabel.append('needs_info')
# template data warning
if self.meta['template_warning_required']:
tvars = {
'submitter': iw.submitter,
'itype': iw.github_type,
'missing_sections': self.meta['template_missing_sections']
}
comment = render_boilerplate(
tvars,
boilerplate='issue_missing_data'
)
actions.comments.append(comment)
if self.meta['template_missing_sections'] and iw.submitter not in self.maintainer_team:
if 'needs_template' not in iw.labels:
actions.newlabel.append('needs_template')
elif 'needs_info' in iw.labels:
actions.unlabel.append('needs_info')
# clear the needs_template label
if not self.meta['is_needs_info'] or \
not self.meta['template_missing_sections']:
if 'needs_template' in iw.labels:
actions.unlabel.append('needs_template')
# needs_info warn/close?
if self.meta['is_needs_info'] and self.meta['needs_info_action']:
# FIXME the condition should be probably moved
if not (self.meta['component_match_strategy'] == ['component_command'] and self.meta['template_missing_sections'] == ['component name']):
if self.meta['needs_info_action'] == 'close':
actions.close = True
tvars = {
'submitter': iw.submitter,
'action': self.meta['needs_info_action'],
'itype': iw.github_type
}
tvars.update(self.meta)
comment = render_boilerplate(
tvars,
boilerplate='needs_info_base'
)
actions.comments.append(comment)
# notify?
if not self.meta['is_bad_pr']:
if self.meta['to_notify']:
tvars = {
'notify': self.meta['to_notify'],
}
comment = render_boilerplate(tvars, boilerplate='notify')
if comment not in actions.comments:
actions.comments.append(comment)
# needs_contributor
if self.meta['is_needs_contributor']:
if 'waiting_on_contributor' not in iw.labels:
actions.newlabel.append('waiting_on_contributor')
elif 'waiting_on_contributor' in iw.labels:
actions.unlabel.append('waiting_on_contributor')
# wontfix / notabug / bug_resolved / resolved_by_pr / duplicate_of
if 'wontfix' in self.meta['maintainer_commands']:
actions.close = True
if 'notabug' in self.meta['maintainer_commands']:
actions.close = True
if 'bug_resolved' in self.meta['maintainer_commands']:
actions.close = True
if 'duplicate_of' in self.meta['maintainer_commands']:
actions.close = True
if 'close_me' in self.meta['maintainer_commands']:
actions.close = True
if 'resolved_by_pr' in self.meta['maintainer_commands']:
# 'resolved_by_pr': {'merged': True, 'number': 19141},
if self.meta['resolved_by_pr']['merged']:
actions.close = True
# bot_status
if self.meta['needs_bot_status']:
comment = render_boilerplate(
self.meta,
boilerplate='bot_status'
)
if comment not in actions.comments:
actions.comments.append(comment)
# traceback
if self.meta['has_traceback']:
if 'traceback' not in iw.labels:
actions.newlabel.append('traceback')
# label commands
if self.meta['label_cmds']:
if self.meta['label_cmds']['add']:
for label in self.meta['label_cmds']['add']:
if label not in iw.labels:
actions.newlabel.append(label)
if label in actions.unlabel:
actions.unlabel.remove(label)
if self.meta['label_cmds']['del']:
for label in self.meta['label_cmds']['del']:
if label in iw.labels:
actions.unlabel.append(label)
if label in actions.newlabel:
actions.newlabel.remove(label)
# small patch?
if iw.is_pullrequest():
label_name = 'small_patch'
if self.meta['is_small_patch']:
if label_name not in iw.labels:
actions.newlabel.append(label_name)
else:
if label_name in iw.labels:
actions.unlabel.append(label_name)
# docs_only?
# https://github.com/ansible/ansibullbot/issues/1047
if iw.is_pullrequest():
label_name = 'docs_only'
if self.meta['is_docs_only']:
if label_name not in iw.labels:
actions.newlabel.append(label_name)
if not iw.history.last_date_for_boilerplate('docs_team_info'):
comment = render_boilerplate(
self.meta,
boilerplate='docs_team_info'
)
actions.comments.append(comment)
else:
if label_name in iw.labels:
actions.unlabel.append(label_name)
if iw.is_pullrequest():
# https://github.com/ansible/ansibullbot/issues/312
# https://github.com/ansible/ansibullbot/issues/418
if self.meta['ci_verified']:
if 'ci_verified' not in iw.labels:
actions.newlabel.append('ci_verified')
else:
if 'ci_verified' in iw.labels:
actions.unlabel.append('ci_verified')
# https://github.com/ansible/ansibullbot/issues/367
if self.meta['is_backport']:
if 'backport' not in iw.labels:
actions.newlabel.append('backport')
else:
if 'backport' in iw.labels:
actions.unlabel.append('backport')
# https://github.com/ansible/ansibullbot/issues/29
if self.meta['deprecated']:
if 'deprecated' not in iw.labels:
actions.newlabel.append('deprecated')
else:
if 'deprecated' in iw.labels:
actions.unlabel.append('deprecated')
# label PRs with missing repos
if iw.is_pullrequest():
if not self.meta['has_remote_repo']:
if 'needs_repo' not in iw.labels:
actions.newlabel.append('needs_repo')
else:
if 'needs_repo' in iw.labels:
actions.unlabel.append('needs_repo')
# https://github.com/ansible/ansibullbot/issues/458
if not self.meta['is_bad_pr']:
if iw.is_pullrequest():
if self.meta['ci_stale']:
if 'stale_ci' not in iw.labels:
actions.newlabel.append('stale_ci')
else:
if 'stale_ci' in iw.labels:
actions.unlabel.append('stale_ci')
# https://github.com/ansible/ansibullbot/issues/589
if not self.meta['is_bad_pr']:
if self.meta['module_match'] and not self.meta['is_new_module']:
mmatches = self.meta['module_match']
if not isinstance(mmatches, list):
mmatches = [mmatches]
needs_maintainer = False
for mmatch in mmatches:
needs_maintainer = False
if not mmatch['maintainers'] and mmatch['support'] != 'core':
needs_maintainer = True
break
if needs_maintainer:
if 'needs_maintainer' not in iw.labels:
actions.newlabel.append('needs_maintainer')
else:
if 'needs_maintainer' in iw.labels:
actions.unlabel.append('needs_maintainer')
if not self.meta['stale_reviews']:
if 'stale_review' in iw.labels:
actions.unlabel.append('stale_review')
else:
if 'stale_review' not in iw.labels:
actions.newlabel.append('stale_review')
# https://github.com/ansible/ansibullbot/issues/302
if not self.meta['is_bad_pr']:
if iw.is_pullrequest():
if self.meta['needs_multiple_new_modules_notification']:
tvars = {
'submitter': iw.submitter
}
comment = render_boilerplate(
tvars, boilerplate='multiple_module_notify'
)
if comment not in actions.comments:
actions.comments.append(comment)
# https://github.com/ansible/ansible/pull/26921
if self.meta['is_filament']:
# no notifications on these
if actions.comments:
remove = []
for comment in actions.comments:
if '@' in comment:
remove.append(comment)
if remove:
for comment in remove:
actions.comments.remove(comment)
if 'filament' not in iw.labels:
actions.newlabel.append('filament')
if iw.age.days >= 5:
actions.close = True
# https://github.com/ansible/ansible/pull/68449
if self.meta['test_support_plugins']:
# should be fine to post just once, hopefully nobody will continue
# with the PR after this comment...
if not iw.history.last_date_for_boilerplate('test_support_plugins'):
comment = render_boilerplate(
self.meta,
boilerplate='test_support_plugins'
)
if comment not in actions.comments:
actions.comments.append(comment)
# https://github.com/ansible/ansibullbot/pull/664
if self.meta['needs_rebuild_all']:
actions.rebuild = True
if 'stale_ci' in actions.newlabel:
actions.newlabel.remove('stale_ci')
if 'stale_ci' in iw.labels:
actions.unlabel.append('stale_ci')
elif self.meta['needs_rebuild_failed']:
actions.rebuild_failed = True
if 'stale_ci' in actions.newlabel:
actions.newlabel.remove('stale_ci')
if 'stale_ci' in iw.labels:
actions.unlabel.append('stale_ci')
# https://github.com/ansible/ansibullbot/issues/640
if not self.meta['is_bad_pr']:
if not self.meta['needs_rebuild'] and self.meta['admin_merge']:
actions.merge = True
# https://github.com/ansible/ansibullbot/issues/785
if iw.is_pullrequest():
if self.meta.get('new_contributor'):
if 'new_contributor' not in iw.labels:
actions.newlabel.append('new_contributor')
else:
if 'new_contributor' in iw.labels:
actions.unlabel.append('new_contributor')
# https://github.com/ansible/ansibullbot/issues/535
if not self.meta['is_bad_pr']:
for cm in self.meta['component_matches']:
if cm.get('labels'):
for label in cm['labels']:
exists = label in iw.labels
unlabeled = iw.history.was_unlabeled(label)
valid = label in iw.repo.labels
# add it if a human did not remove it and is valid
if not exists and not unlabeled and valid:
actions.newlabel.append(label)
# https://github.com/ansible/ansibullbot/issues/534
if iw.is_pullrequest() and self.meta['is_empty_pr'] and not iw.wip:
actions = AnsibleActions()
actions.close = True
# https://github.com/ansible/ansibullbot/issues/820
if self.meta.get('wg', {}).get('needs_notification'):
comment = render_boilerplate(
self.meta,
boilerplate='community_workgroups'
)
if comment not in actions.comments:
actions.comments.append(comment)
# https://github.com/ansible/ansibullbot/issues/924
for key in ['has_pr', 'has_issue']:
if self.meta['needs_' + key]:
if key not in iw.labels:
actions.newlabel.append(key)
# collections!!!
if self.meta.get('is_collection'):
clabels = ['collection']
for fqcn in self.meta['collection_fqcns']:
clabel = 'collection:%s' % fqcn
clabels.append(clabel)
for clabel in clabels:
exists = clabel in iw.labels
unlabeled = iw.history.was_unlabeled(clabel)
# add it if a human did not remove it
if not exists and not unlabeled:
actions.newlabel.append(clabel)
# collections!!!
if not self.meta.get('needs_collection_redirect') is True:
if 'needs_collection_redirect' in iw.labels:
actions.unlabel.append('needs_collection_redirect')
else:
if 'needs_collection_redirect' not in iw.labels:
actions.newlabel.append('needs_collection_redirect')
if self.botmeta['collection_redirect'] is True:
actions.close = True
actions.newlabel.append('bot_closed')
if self.meta.get('needs_collection_redirect'):
comment = render_boilerplate(
self.meta,
boilerplate='collection_migration'
)
actions.comments.append(comment)
# collections!!!
if self.meta.get('collection_fqcn_label_remove'):
for fqcn in self.meta['collection_fqcn_label_remove']:
actions.unlabel.append('collection:%s'% fqcn)
# spam!!!
if self.meta.get('spam_comment_ids'):
for commentid in self.meta['spam_comment_ids']:
actions.uncomment.append(commentid)
# auto close waiting_on_contributor issues/PRs
# that have been labeled for WAITING_ON_CONTRIBUTOR_EXPIRE days
if 'waiting_on_contributor' in iw.labels:
label_applied_at = iw.history.label_last_applied('waiting_on_contributor')
now = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=datetime.timezone.utc)
if (now - label_applied_at).days > C.DEFAULT_WAITING_ON_CONTRIBUTOR_EXPIRE:
# Waiting for Godot!
actions.close = True
actions.newlabel.append('bot_closed')
actions.unlabel.append('waiting_on_contributor')
actions.comments.append(
render_boilerplate(
self.meta,
boilerplate='waiting_on_contributor_close'
)
)
actions.newlabel = sorted({to_text(to_bytes(x, 'ascii'), 'ascii') for x in actions.newlabel})
actions.unlabel = sorted({to_text(to_bytes(x, 'ascii'), 'ascii') for x in actions.unlabel})
# check for waffling
labels = sorted(set(actions.newlabel + actions.unlabel))
for label in labels:
if label in self.meta['label_waffling_overrides']:
continue
if iw.history.label_is_waffling(label):
if label in actions.newlabel or label in actions.unlabel:
msg = f'"{label}" label is waffling on {iw.html_url}'
logging.error(msg)
raise LabelWafflingError(msg)
elif label in actions.newlabel and label in actions.unlabel:
msg = f'"{label}" label is waffling on {iw.html_url}'
logging.error(msg)
raise LabelWafflingError(msg)
def post_actions_to_receiver(self, iw, actions, processed_meta):
namespace, reponame = iw.repo_full_name.split('/', 1)
processed_actions = {name: value for (name, value) in vars(actions).items() if value}
data = processed_actions
data['meta'] = processed_meta
post_to_receiver(
'actions',
{'user': namespace, 'repo': reponame, 'number': iw.number},
data,
)
def process(self, iw, valid_labels):
'''Do initial processing of the issue'''
# clear the actions+meta
self.meta = {}
self.meta['state'] = iw.state
self.meta['submitter'] = iw.submitter
# set the issue type
issue_type = iw.template_data.get('issue type')
if issue_type in self.ISSUE_TYPES:
self.meta['issue_type'] = issue_type
else:
# look for best match?
for key in self.ISSUE_TYPES.keys():
if iw.body and key in iw.body.lower():
self.meta['issue_type'] = key
break
else:
self.meta['issue_type'] = None
# needed for bot status
self.meta['is_issue'] = iw.is_issue()
self.meta['is_pullrequest'] = iw.is_pullrequest()
# get ansible version
if iw.is_issue():
try:
self.meta['ansible_version'] = self.version_indexer.version_by_issue(iw)
except ValueError:
self.meta['ansible_version'] = self.version_indexer.version_by_date(iw.created_at)
else:
self.meta['ansible_version'] = self.version_indexer.version_by_commit(iw.pullrequest.base.sha)
self.meta['ansible_label_version'] = get_version_major_minor(self.meta['ansible_version'])
logging.info('ansible version: %s' % self.meta['ansible_version'])
# what component(s) is this about?
self.meta.update(
get_component_match_facts(
iw,
self.component_matcher,
valid_labels
)
)
# collections?
self.meta.update(
get_collection_facts(
iw,
self.component_matcher,
self.meta,
)
)
# backports
self.meta.update(get_backport_facts(iw))
# traceback
self.meta.update(get_traceback_facts(iw))
# small_patch
self.meta.update(get_small_patch_facts(iw))
# docs_only
self.meta.update(get_docs_facts(iw))
# shipit?
self.meta.update(
get_needs_revision_facts(
iw,
self.meta,
self.ci,
self.maintainer_team,
C.DEFAULT_BOT_NAMES,
)
)
# needs_contributor?
self.meta.update(get_needs_contributor_facts(iw.history.history, C.DEFAULT_BOT_NAMES))
# who needs to be notified or assigned?
self.meta.update(get_notification_facts(iw, self.meta, botmeta=self.botmeta))
# ci_verified and test results
self.meta.update(
get_ci_run_facts(iw, self.meta, self.ci)
)
# needsinfo?
self.meta['is_needs_info'] = is_needsinfo(iw, C.DEFAULT_BOT_NAMES)
self.meta.update(self.process_comment_commands(iw, self.meta))
self.meta.update(needs_info_template_facts(iw, self.meta))
self.meta.update(needs_info_timeout_facts(iw.history, self.meta))
# shipit?
self.meta.update(
get_shipit_facts(
iw, self.meta, self.botmeta['files'],
maintainer_team=self.maintainer_team, botnames=C.DEFAULT_BOT_NAMES,
)
)
# bot_status needed?
self.meta.update(get_bot_status_facts(iw, self.module_indexer.all_maintainers, maintainer_team=self.maintainer_team, bot_names=C.DEFAULT_BOT_NAMES))
# who is this waiting on?
wo = 'maintainer'
if self.meta['is_needs_info']:
wo = iw.submitter
if iw.is_issue():
if self.meta['is_needs_contributor']:
wo = 'contributor'
else:
if self.meta['is_needs_revision'] or self.meta['is_needs_rebase']:
wo = iw.submitter
else:
wo = 'ansible'
self.meta.update({'waiting_on': wo})
# community label manipulation
self.meta.update(
get_label_command_facts(
iw,
self.module_indexer.all_maintainers,
maintainer_team=self.maintainer_team,
valid_labels=valid_labels
)
)
# waffling overrides [label_waffling_overrides]
self.meta.update(
get_waffling_overrides(
iw,
self.module_indexer.all_maintainers,
maintainer_team=self.maintainer_team,
)
)
# filament
self.meta.update(get_filament_facts(iw, self.meta))
# test_support_plugins
self.meta.update(
get_test_support_plugins_facts(iw, self.component_matcher)
)
# ci
self.meta.update(get_ci_facts(iw, self.ci))
# ci rebuilds
self.meta.update(get_rebuild_facts(iw, self.meta))
# ci rebuild + merge
self.meta.update(
get_rebuild_merge_facts(
iw,
self.meta,
self.maintainer_team,
self.ci,
)
)
# ci rebuild requested?
self.meta.update(
get_rebuild_command_facts(
iw,
self.meta,
self.ci,
)
)
# first time contributor?
self.meta.update(get_contributor_facts(iw))
# is it deprecated?
self.meta.update(get_deprecation_facts(self.meta))
# does it have a pr or does it have an issue?
self.meta.update(get_cross_reference_facts(iw))
# need these keys to always exist
if 'merge_commits' not in self.meta:
self.meta['merge_commits'] = []
if 'is_bad_pr' not in self.meta:
self.meta['is_bad_pr'] = False
# spam!
self.meta.update(get_spam_facts(iw))
# automerge
self.meta.update(get_automerge_facts(iw, self.meta))
# community working groups
self.meta.update(get_community_workgroup_facts(iw, self.meta))
def process_comment_commands(self, issuewrapper, meta):
vcommands = [x for x in self.VALID_COMMANDS]
# these are handled by other fact gathering functions
vcommands.remove('bot_status')
vcommands.remove('needs_info')
vcommands.remove('!needs_info')
vcommands.remove('shipit')
vcommands.remove('needs_rebase')
vcommands.remove('!needs_rebase')
vcommands.remove('needs_revision')
vcommands.remove('!needs_revision')
vcommands.remove('needs_contributor')
vcommands.remove('!needs_contributor')
vcommands.remove('waiting_on_contributor')
vcommands.remove('!waiting_on_contributor')
iw = issuewrapper
maintainers = []
maintainers += meta.get('component_authors', [])
maintainers += meta.get('component_maintainers', [])
maintainers += meta.get('component_notifiers', [])
maintainers += [x.login for x in iw.repo.assignees]
maintainers = sorted(set(maintainers))
meta['maintainer_commands'] = iw.history.get_commands(
maintainers,
vcommands,
uselabels=False,
)
meta['submitter_commands'] = iw.history.get_commands(
iw.submitter,
vcommands,
uselabels=False,
)
# JIMI_SKIP!!!
if issuewrapper.submitter in ['jimi-c']:
if 'bot_skip' not in meta['maintainer_commands']:
meta['maintainer_commands'].append('bot_skip')
if '!bot_skip' in meta['maintainer_commands']:
meta['maintainer_commands'].remove('!bot_skip')
if '!bot_skip' in meta['submitter_commands']:
meta['submitter_commands'].remove('!bot_skip')
negative_commands = \
[x for x in self.VALID_COMMANDS if x.startswith('!')]
negative_commands = [x.replace('!', '') for x in negative_commands]
for x in negative_commands:
meta['maintainer_commands'] = self.negate_command(
x,
meta['maintainer_commands']
)
meta['submitter_commands'] = self.negate_command(
x,
meta['submitter_commands']
)
# resolved_by_pr is special
if 'resolved_by_pr' in meta['maintainer_commands']:
mc = iw.history.get_user_comments(maintainers)
mc = [x for x in mc if 'resolved_by_pr' in x]
pr_number = extract_pr_number_from_comment(mc[-1])
if pr_number is None:
logging.warning("Invalid resolved_by_pr command in '%s'", mc[-1])
else:
merged = iw.repo.is_pr_merged(pr_number)
meta['resolved_by_pr'] = {
'number': pr_number,
'merged': merged
}
return meta
def negate_command(self, command, commands):
# negate bot_broken ... bot_broken vs. !bot_broken
positive = command
negative = '!' + command
bb = [x for x in commands if positive in x]
if bb:
for x in bb:
if x == negative:
if positive in commands:
commands.remove(positive)
if negative in commands:
commands.remove(negative)
return commands
def execute_actions(self, iw, actions):
"""Turns the actions into API calls"""
self.post_actions_to_receiver(iw, actions, self.processed_meta)
super().execute_actions(iw, actions)
if actions.rebuild:
runid = self.meta.get('ci_run_number')
if runid:
logging.info('Rebuilding CI %s for #%s' % (runid, iw.number))
self.ci.rebuild(runid)
else:
logging.error(
f'rebuild: no CI runid for {iw.number}'
)
elif actions.rebuild_failed:
runid = self.meta.get('ci_run_number')
if runid:
logging.info('Rebuilding CI %s for #%s' % (runid, iw.number))
self.ci.rebuild_failed(runid)
else:
logging.error(
f'rebuild: no CI runid for {iw.number}'
)
if actions.cancel_ci:
runid = self.meta.get('ci_run_number')
if runid:
logging.info('Cancelling CI %s for #%s' % (runid, iw.number))
self.ci.cancel(runid)
else:
logging.error(
f'cancel: no CI runid for {iw.number}'
)
if actions.cancel_ci_branch:
branch = iw.pullrequest.head.repo
self.ci.cancel_on_branch(branch)
@classmethod
def create_parser(cls):
parser = DefaultTriager.create_parser()
parser.description = "Triage issue and pullrequest queues for Ansible.\n" \
" (NOTE: only useful if you have commit access to" \
" the repo in question.)"
parser.add_argument("--skip_no_update", action="store_true",
help="skip processing if updated_at hasn't changed")
parser.add_argument("--collect_only", action="store_true",
help="stop after caching issues")
parser.add_argument("--ignore_bot_broken", action="store_true",
help="Do not skip processing bot_broken|bot_skip issues")
parser.add_argument("--ignore_module_commits", action="store_true",
help="Do not enumerate module commit logs")
parser.add_argument('--commit', dest='ansible_commit',
help="Use a specific commit for the indexers")
parser.add_argument('--ignore_galaxy', action='store_true',
help='do not index or search for components in galaxy')
parser.add_argument("--ci", type=str, choices=VALID_CI_PROVIDERS,
default=C.DEFAULT_CI_PROVIDER,
help="Specify a CI provider that repo uses")
return parser
================================================
FILE: ansibullbot/ci/__init__.py
================================================
================================================
FILE: ansibullbot/ci/azp.py
================================================
import hashlib
import logging
import json
import os.path
import pickle
import re
from datetime import timezone
from io import BytesIO
from zipfile import ZipFile
import ansibullbot.constants as C
from ansibullbot._text_compat import to_bytes
from ansibullbot.ci.base import BaseCI
from ansibullbot.exceptions import NoCIError
from ansibullbot.utils.net_tools import fetch
from ansibullbot.utils.timetools import strip_time_safely
DETAILS_URL_RE = \
re.compile(
r'https://dev\.azure\.com/(?P<organization>[^/]+)/(?P<project>[^/]+)/_build/results\?buildId=(?P<buildId>[0-9]+)'
)
TIMELINE_URL_FMT = \
'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds/%s/timeline/?api-version=6.0'
ARTIFACTS_URL_FMT = \
'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds/%s/artifacts?api-version=6.0'
STAGE_URL_FMT = \
'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds/%s/stages/%s?api-version=6.0-preview.1'
NEW_BUILD = 'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds?api-version=6.0'
TIMEOUT = 5 # seconds
HEADERS = {
'Content-Type': 'application/json',
}
class AzurePipelinesCI(BaseCI):
name = 'azp'
def __init__(self, cachedir, iw):
self._cachedir = os.path.join(cachedir, 'azp.runs')
self._iw = iw
self._build_id = None
self._jobs = None
self._state = None
self._updated_at = None
self._stages = None
self._artifacts = None
self.last_run = None
self.created_at = None
try:
self.created_at = min(
(strip_time_safely(j['startTime']) for j in self.jobs if j['startTime'] is not None)
)
except ValueError:
self.created_at = self.updated_at
if self.state and self.build_id and self.jobs:
self.last_run = {
'state': self.state,
'created_at': self.created_at.replace(tzinfo=timezone.utc),
'updated_at': self.updated_at.replace(tzinfo=timezone.utc),
'run_id': self.build_id,
}
@property
def build_id(self):
if self._build_id is None:
build_ids = set()
for check_run in self._iw.pullrequest_check_runs:
match = re.match(DETAILS_URL_RE, check_run.details_url)
if not match:
continue
org, project, buildid = match.groups()
if org == C.DEFAULT_AZP_ORG and project == C.DEFAULT_AZP_PROJECT:
build_ids.add(int(buildid))
# FIXME more than one Pipeline
logging.debug('Azure Pipelines build IDs found: %s' % build_ids)
try:
self._build_id = max(build_ids)
except ValueError:
self._build_id = None
return self._build_id
@property
def jobs(self):
if not self.build_id:
return []
if self._jobs is None:
self._jobs = []
self._updated_at = strip_time_safely('1970-01-01')
self._stages = []
if not os.path.isdir(self._cachedir):
os.makedirs(self._cachedir)
cache_file = os.path.join(self._cachedir, u'timeline_%s.pickle' % self.build_id)
url = TIMELINE_URL_FMT % self.build_id
resp = fetch(url, timeout=TIMEOUT)
if resp is None:
raise Exception('Unable to GET %s' % url)
if resp.status_code == 404:
data = None
if os.path.isfile(cache_file):
logging.info(u'timeline was probably removed, load it from cache')
with open(cache_file, 'rb') as f:
data = pickle.load(f)
else:
data = resp.json()
data = (strip_time_safely(data['lastChangedOn']), data)
logging.info(u'writing %s' % cache_file)
with open(cache_file, 'wb') as f:
pickle.dump(data, f)
if data is not None:
data = data[1]
self._jobs = [r for r in data['records'] if r['type'] == 'Job']
self._updated_at = strip_time_safely(data['lastChangedOn'])
self._stages = [r for r in data['records'] if r['type'] == 'Stage']
state = list({j['state'] for j in self.jobs}) # pending, completed, inProgress
result = list({j['result'] for j in self.jobs}) # succeeded, failed, None
if 'canceled' in result or 'cancelled' in result:
self._state = 'failure'
elif len(state) == 1 and 'completed' in state:
if len(result) == 1 and 'succeeded' in result:
self._state = 'success'
elif 'failed' in result:
self._state = 'failure'
elif 'pending' in state or 'inProgress' in state:
self._state = 'pending'
else:
raise ValueError(
'Unknown state for buildId: %s, state: %s' % (self.build_id, state)
)
return self._jobs
@property
def state(self):
return self._state
@property
def updated_at(self):
return self._updated_at
@property
def stages(self):
return self._stages
def get_last_full_run_date(self):
# FIXME fix the method name, it makes sense for shippable but not for azp
if (self.state is None and self.build_id is None) or self.created_at is None:
raise NoCIError
return self.created_at
@property
def artifacts(self):
if self._artifacts is None and self._jobs:
if not os.path.isdir(self._cachedir):
os.makedirs(self._cachedir)
data = None
cache_file = os.path.join(self._cachedir, 'artifacts_%s.pickle' % self.build_id)
if os.path.isfile(cache_file):
logging.info('load artifacts cache')
with open(cache_file, 'rb') as f:
data = pickle.load(f)
if data is None or (data and data[0] < self.updated_at) or not data[1]:
if data:
logging.info('fetching artifacts: stale, previous from %s' % data[0])
else:
logging.info('fetching artifacts: stale, no previous data')
url = ARTIFACTS_URL_FMT % self.build_id
resp = fetch(url, timeout=TIMEOUT)
if resp is None:
raise Exception('Unable to GET %s' % url)
if resp.status_code != 404:
data = [a for a in resp.json()['value'] if a['name'].startswith('Bot')]
data = (self.updated_at, data)
logging.info('writing %s' % cache_file)
with open(cache_file, 'wb') as f:
pickle.dump(data, f)
if data:
self._artifacts = data[1]
return self._artifacts
def _get_artifact(self, name, url):
if not os.path.isdir(self._cachedir):
os.makedirs(self._cachedir)
data = None
cache_file = os.path.join(self._cachedir, '%s_%s.pickle' % (name.replace(' ', '-'), self.build_id))
if os.path.isfile(cache_file):
logging.info('loading %s' % cache_file)
with open(cache_file, 'rb') as f:
data = pickle.load(f)
if data is None or (data and data[0] < self.updated_at) or not data[1]:
if data:
logging.info('fetching artifacts: stale, previous from %s' % data[0])
else:
logging.info('fetching artifacts: stale, no previous data')
resp = fetch(url, timeout=TIMEOUT, stream=True)
if resp is None:
raise Exception('Unable to GET %s' % url)
if resp.status_code != 404:
with BytesIO() as data:
for chunk in resp.iter_content(chunk_size=128):
data.write(chunk)
artifact_zip = ZipFile(data)
artifact_data = []
for fn in artifact_zip.namelist():
if 'ansible-test-' not in fn:
continue
with artifact_zip.open(fn) as f:
artifact_data.append(json.load(f))
data = (self.updated_at, artifact_data)
logging.info('writing %s' % cache_file)
with open(cache_file, 'wb') as f:
pickle.dump(data, f)
if data:
return data[1]
def get_test_results(self):
if self.state in ('pending', 'inProgress', None):
return [], False
failed_jobs = [j for j in self.jobs if j['result'] == 'failed']
if not failed_jobs:
return [], False
results = []
ci_verified = True
failed_jobs_with_artifact = 0
for job in failed_jobs:
for artifact in self.artifacts:
if job['id'] != artifact['source']:
continue
failed_jobs_with_artifact += 1
for artifact_json in self._get_artifact(artifact['name'], artifact['resource']['downloadUrl']):
if not artifact_json['verified']:
ci_verified = False
result_data = ''.join(
(result['message'] + result['output'] for result in artifact_json['results'])
)
results.append({
'contents': {
'results': artifact_json['results'],
},
'run_id': self.build_id,
'job_id': hashlib.md5(to_bytes(result_data)).hexdigest(),
'path': None,
})
if ci_verified and len(failed_jobs) != failed_jobs_with_artifact:
ci_verified = False
return results, ci_verified
def rebuild(self, run_id, failed_only=False):
data = {'state': 'retry'}
if failed_only:
api_version = '6.0-preview.1'
stages = [s['identifier'] for s in self.stages if s['result'] != 'succeeded']
else:
api_version = '6.1-preview.1'
data['forceRetryAllJobs'] = True
stages = [s['identifier'] for s in self.stages]
for stage in stages:
url = 'https://dev.azure.com/' + C.DEFAULT_AZP_ORG + '/' + C.DEFAULT_AZP_PROJECT + '/_apis/build/builds/%s/stages/%s?api-version=%s' % (run_id, stage, api_version)
resp = fetch(
url,
verb='patch',
headers=HEADERS,
data=json.dumps(data),
timeout=TIMEOUT,
auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN),
)
if resp is not None and resp.status_code == 404:
self._rebuild_old()
break
def _rebuild_old(self):
data = json.dumps({
'definition': {
'id': C.DEFAULT_AZP_DEFINITION,
},
'reason': 'pullRequest',
'sourceBranch': 'refs/pull/%s/merge' % self._iw.number,
'repository': {'type': 'github'},
'triggerInfo': {
'pr.sourceBranch': self._iw._pr.head.ref,
'pr.sourceSha': self._iw._pr.head.sha,
'pr.id': self._iw._pr.id,
'pr.title': self._iw._pr.title,
'pr.number': self._iw._pr.number,
'pr.isFork': self._iw.from_fork,
'pr.draft': self._iw._pr.draft,
'pr.sender.name': self._iw._pr.user.login,
'pr.sender.avatarUrl': self._iw._pr.user.avatar_url,
'pr.providerId': 'github',
'pr.autoCancel': 'true',
},
'parameters': json.dumps({
'system.pullRequest.pullRequestId': self._iw._pr.id,
'system.pullRequest.pullRequestNumber': self._iw._pr.number,
'system.pullRequest.mergedAt': '',
'system.pullRequest.sourceBranch': self._iw._pr.head.ref,
'system.pullRequest.targetBranch': self._iw._pr.base.ref,
'system.pullRequest.sourceRepositoryUri': 'https://github.com/' + self._iw.repo.repo.full_name ,
'system.pullRequest.sourceCommitId': self._iw._pr.head.sha,
}),
})
resp = fetch(
NEW_BUILD,
verb='post',
headers=HEADERS,
data=data,
timeout=30,
auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN),
)
if not resp:
raise Exception('Unable to POST %r to %r' % (data, NEW_BUILD))
def rebuild_failed(self, run_id):
self.rebuild(run_id, failed_only=True)
def cancel(self, run_id):
if self.state != 'pending':
logging.info('Could not cancel CI as it is not running')
return
stages_in_progress = (
s['identifier'] for s in self.stages if s['state'] != 'completed'
)
for stage in stages_in_progress:
if stage == 'Summary':
continue
url = STAGE_URL_FMT % (run_id, stage)
data = json.dumps({'state': 'cancel'})
resp = fetch(
url,
verb='patch',
headers=HEADERS,
data=data,
timeout=TIMEOUT,
auth=(C.DEFAULT_AZP_USER, C.DEFAULT_AZP_TOKEN),
)
if not resp:
raise Exception('Unable to PATCH %r to %r' % (data, url))
def cancel_on_branch(self, branch):
# FIXME cancel() should be enough?
pass
================================================
FILE: ansibullbot/ci/base.py
================================================
from abc import ABCMeta, abstractmethod
class BaseCI(metaclass=ABCMeta):
"""An interface that each CI provider that bot could use must implement.
"""
name = None
@property
@abstractmethod
def state(self):
"""
:rtype: str or None
"""
raise NotImplementedError
@property
@abstractmethod
def updated_at(self):
"""Timestamp of last job completion for given PR number.
:rtype: datetime.datetime
"""
raise NotImplementedError
@abstractmethod
def get_last_full_run_date(self):
"""Timestamp of last full run. Maps partial re-runs back to their full
run.
:rtype: datetime.datetime
"""
raise NotImplementedError
@abstractmethod
def get_test_results(self):
"""Get test results of given run_id and figure out a ci_verified out
of it.
:rtype: tuple(bool, list)
"""
raise NotImplementedError
@abstractmethod
def rebuild(self, run_id, failed_only=False):
"""Rebuild jobs. All by default, optionally failed jobs only.
:type run_id: str
:type failed_only: bool
"""
raise NotImplementedError
def rebuild_failed(self, run_id):
self.rebuild(run_id, failed_only=True)
@abstractmethod
def cancel(self, run_id):
"""Cancel jobs.
:type run_id: str
"""
raise NotImplementedError
@abstractmethod
def cancel_on_branch(self, branch):
"""Cancel all jobs on a given branch.
:type branch: str
"""
raise NotImplementedError
================================================
FILE: ansibullbot/constants.py
================================================
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# The following code is a derivative work of the code from the Ansible project,
# which is licensed GPLv3. This code therefore is also licensed under the terms
# of the GNU Public License, version 3.
import configparser
import sys
import tempfile
import os
import subprocess
from ._text_compat import to_text
from .utils.feature_flags import FeatureFlags
PROG_NAME = 'ansibullbot'
BOOL_TRUE = frozenset(["true", "t", "y", "1", "yes", "on"])
def mk_boolean(value):
ret = value
if not isinstance(value, bool):
ret = (to_text(value).lower() in BOOL_TRUE)
return ret
def unquote(value):
return value.replace('"', '').replace("'", '')
def shell_expand(path, expand_relative_paths=False):
'''
shell_expand is needed as os.path.expanduser does not work
when path is None, which is the default for ANSIBLE_PRIVATE_KEY_FILE
'''
if path:
path = os.path.expanduser(os.path.expandvars(path))
if expand_relative_paths and not path.startswith('/'):
# paths are always 'relative' to the config?
if 'CONFIG_FILE' in globals():
CFGDIR = os.path.dirname(CONFIG_FILE)
path = os.path.join(CFGDIR, path)
path = os.path.abspath(path)
return path
def get_config(p, section, key, env_var, default,
value_type=None, expand_relative_paths=False):
''' return a configuration variable with casting
:arg p: A ConfigParser object to look for the configuration in
:arg section: A section of the ini config to examine.
:arg key: The config key to get this config from
:arg env_var: An Environment variable to check for the config var. If
this is set to None then no environment variable will be used.
:arg default: A default value to assign to the config var.
:kwarg value_type: The type of the value. This can be any of:
:boolean: sets the value to a True or False value
:integer: Sets the value to an integer or raises a ValueType error
:float: Sets the value to a float or raises a ValueType error
:list: Treats the value as a comma separated list. Split the value
and return it as a python list.
:none: Sets the value to None
:path: Expands any environment variables and tilde's in the value.
:tmp_path: Create a unique temporary directory inside of the directory
specified by value and return its path.
:pathlist: Treat the value as a typical PATH string. (On POSIX, this
means colon separated strings.) Split the value and then expand
each part for environment variables and tildes.
:kwarg expand_relative_paths: for pathlist and path types, if this is set
to True then also change any relative paths into absolute paths. The
default is False.
'''
value = _get_config(p, section, key, env_var, default)
if value_type == 'boolean':
value = mk_boolean(value)
elif value:
if value_type == 'integer' or value_type == 'int':
if value != 'None':
value = int(value)
else:
value = None
elif value_type == 'float':
value = float(value)
elif value_type == 'list':
if isinstance(value, str):
value = [x.strip() for x in value.split(',')]
elif value_type == 'none':
if value == "None":
value = None
elif value_type == 'path':
value = shell_expand(
value,
expand_relative_paths=expand_relative_paths
)
elif value_type == 'tmppath':
value = shell_expand(value)
if not os.path.exists(value):
os.makedirs(value)
prefix = '%s-local-%s' % (PROG_NAME, os.getpid())
value = tempfile.mkdtemp(prefix=prefix, dir=value)
elif value_type == 'pathlist':
if isinstance(value, str):
value = [
shell_expand(
x,
expand_relative_paths=expand_relative_paths
) for x in value.split(os.pathsep)]
elif isinstance(value, str):
value = unquote(value)
if value_type in ['integer', 'int', 'float', 'boolean']:
return value
else:
return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
def _get_config(p, section, key, env_var, default):
''' helper function for get_config '''
value = default
if p is not None:
try:
value = p.get(section, key, raw=True)
except:
pass
if env_var is not None:
env_value = os.environ.get(env_var, None)
if env_value is not None:
value = env_value
return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
def load_config_file():
''' Load Config File order(first found is used):
ENV,CWD,HOME, /etc/ansible '''
p = configparser.ConfigParser()
path0 = os.getenv("%s_CONFIG" % PROG_NAME.upper(), None)
if path0 is not None:
path0 = os.path.expanduser(path0)
if os.path.isdir(path0):
path0 += "/%s.cfg" % PROG_NAME
try:
path1 = os.getcwd() + "/%s.cfg" % PROG_NAME
except OSError:
path1 = None
path2 = os.path.expanduser("~/.%s.cfg" % PROG_NAME)
path3 = "/etc/%s/%s.cfg" % (PROG_NAME, PROG_NAME)
for path in [path0, path1, path2, path3]:
if path is not None and os.path.exists(path):
try:
p.read(path)
except configparser.Error as e:
print(f"Error reading config file: \n{e}")
sys.exit(1)
return p, path
return None, ''
p, CONFIG_FILE = load_config_file()
# sections in config file
DEFAULTS = 'defaults'
AZP = 'azp'
# who dat?
DEFAULT_BOT_NAMES = get_config(
p,
DEFAULTS,
'bot_names',
'%s_DEBUG' % PROG_NAME.upper(),
['ansibot', 'ansibotdev', 'ansibullbot'],
value_type='list'
)
# the sqlite database unc
DEFAULT_DATABASE_UNC = get_config(
p,
DEFAULTS,
'database_unc',
'%s_DEBUG' % PROG_NAME.upper(),
'sqlite:///~/.ansibullbot/ansibullbot.db',
value_type='string'
)
# Use or don't use the ratelimiting decorator
DEFAULT_RATELIMIT = get_config(
p,
DEFAULTS,
'ratelimit',
'%s_RATELIMIT' % PROG_NAME.upper(),
True,
value_type='boolean'
)
DEFAULT_GITHUB_URL = get_config(
p,
DEFAULTS,
'github_url',
'%s_GITHUB_URL' % PROG_NAME.upper(),
'https://api.github.com',
value_type='string'
)
DEFAULT_GITHUB_USERNAME = get_config(
p,
DEFAULTS,
'github_username',
'%s_GITHUB_USERNAME' % PROG_NAME.upper(),
'',
value_type='string'
)
DEFAULT_GITHUB_PASSWORD = get_config(
p,
DEFAULTS,
'github_password',
'%s_GITHUB_PASSWORD' % PROG_NAME.upper(),
'',
value_type='string'
)
DEFAULT_GITHUB_TOKEN = get_config(
p,
DEFAULTS,
'github_token',
'%s_GITHUB_TOKEN' % PROG_NAME.upper(),
'',
value_type='string'
)
DEFAULT_GITHUB_REPOS = get_config(
p,
DEFAULTS,
'github_repos',
'%s_GITHUB_REPOS' % PROG_NAME.upper(),
['ansible/ansible'],
value_type='list'
)
# The maintainer teams including the organization where the team is located
DEFAULT_GITHUB_MAINTAINERS = get_config(
p,
DEFAULTS,
'github_maintainers',
'%s_GITHUB_MAINTAINERS' % PROG_NAME.upper(),
['ansible/ansible-commit'],
value_type='list'
)
DEFAULT_CI_PROVIDER = get_config(
p,
DEFAULTS,
'ci_provider',
'%s_CI_PROVIDER' % PROG_NAME.upper(),
'azp',
value_type='string'
)
DEFAULT_WAITING_ON_CONTRIBUTOR_EXPIRE = get_config(
p,
DEFAULTS,
'waiting_on_contributor_expire',
'%s_WAITING_ON_CONTRIBUTOR_EXPIRE' % PROG_NAME.upper(),
365,
value_type='int'
)
DEFAULT_NEEDS_INFO_WARN = get_config(
p,
'needs_info',
'warn',
'%s_NEEDS_INFO_WARN' % PROG_NAME.upper(),
30,
value_type='int'
)
DEFAULT_NEEDS_INFO_EXPIRE = get_config(
p,
'needs_info',
'expire',
'%s_NEEDS_INFO_EXPIRE' % PROG_NAME.upper(),
60,
value_type='int'
)
# How many days till a re-triage is forced
DEFAULT_STALE_WINDOW = get_config(
p,
DEFAULTS,
'stale_window',
'%s_STALE_WINDOW' % PROG_NAME.upper(),
7,
value_type='int'
)
# Pickle the issue objects?
DEFAULT_PICKLE_ISSUES = get_config(
p,
DEFAULTS,
'requests_cache',
'%s_PICKLE_ISSUES' % PROG_NAME.upper(),
True,
value_type='boolean'
)
###########################################
# AZURE PIPELINES
###########################################
DEFAULT_AZP_ORG = get_config(
p,
AZP,
'org',
'%s_AZP_ORG' % PROG_NAME.upper(),
'',
value_type='string'
)
DEFAULT_AZP_PROJECT = get_config(
p,
AZP,
'project',
'%s_AZP_PROJECT' % PROG_NAME.upper(),
'',
value_type='string'
)
DEFAULT_AZP_USER = get_config(
p,
AZP,
'user',
'%s_AZP_USER' % PROG_NAME.upper(),
'',
value_type='string'
)
DEFAULT_AZP_TOKEN = get_config(
p,
AZP,
'token',
'%s_AZP_TOKEN' % PROG_NAME.upper(),
'',
value_type='string'
)
DEFAULT_AZP_DEFINITION = get_config(
p,
AZP,
'definition',
'%s_AZP_DEFINITION' % PROG_NAME.upper(),
None,
value_type='int'
)
###########################################
# METADATA RECEIVER
###########################################
DEFAULT_RECEIVER_HOST = get_config(
p,
'receiver',
'host',
'%s_RECEIVER_HOST' % PROG_NAME.upper(),
None,
value_type='str'
)
DEFAULT_RECEIVER_PORT = get_config(
p,
'receiver',
'port',
'%s_RECEIVER_PORT' % PROG_NAME.upper(),
None,
value_type='int'
)
###########################################
# SENTRY ERROR REPORTING
###########################################
# Ref:
# https://docs.sentry.io/error-reporting/configuration/?platform=python
###########################################
SENTRY_SECTION = 'sentry'
SENTRY_ENV_VAR_TMPL = 'SENTRY_{var_name}'
DEFAULT_SENTRY_DSN = get_config(
p,
SENTRY_SECTION,
'dsn',
SENTRY_ENV_VAR_TMPL.format(var_name='DSN'),
None,
value_type='string'
)
DEFAULT_SENTRY_ENV = get_config(
p,
SENTRY_SECTION,
'env',
SENTRY_ENV_VAR_TMPL.format(var_name='ENV'),
'prod',
value_type='string'
)
DEFAULT_SENTRY_TRACE = get_config(
p,
SENTRY_SECTION,
'trace',
SENTRY_ENV_VAR_TMPL.format(var_name='TRACE'),
False,
value_type='boolean'
)
DEFAULT_SENTRY_SERVER_NAME = get_config(
p,
SENTRY_SECTION,
'server_name',
SENTRY_ENV_VAR_TMPL.format(var_name='SERVER_NAME'),
'ansibullbot',
value_type='string'
)
def get_ansibullbot_version():
"""Return currently checked out Git revision."""
try:
return to_text(subprocess.check_output(('git', 'rev-parse', 'HEAD')).strip())
except subprocess.CalledProcessError:
return 'unknown'
ANSIBULLBOT_VERSION = get_ansibullbot_version()
features = FeatureFlags.from_config('features.yaml')
================================================
FILE: ansibullbot/defaulttriager.py
================================================
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import abc
import argparse
import datetime
import json
import logging
import os
import sys
import time
import typing as t
import requests
from jinja2 import Environment, FileSystemLoader
from ansibullbot import constants as C
from ansibullbot.utils.github import RateLimited
from ansibullbot.utils.gh_gql_client import GithubGraphQLClient
from ansibullbot.utils.git_tools import GitRepoWrapper
from ansibullbot.utils.logs import set_logger
from ansibullbot.utils.systemtools import run_command
from ansibullbot.utils.timetools import strip_time_safely
from ansibullbot.ghapiwrapper import GithubWrapper, RepoWrapper
basepath = os.path.dirname(__file__).split('/')
libindex = basepath[::-1].index('ansibullbot')
libindex = (len(basepath) - 1) - libindex
basepath = '/'.join(basepath[0:libindex])
_environment = Environment(
loader=FileSystemLoader(os.path.join(basepath, 'templates')),
trim_blocks=True
)
class DefaultActions:
def __init__(self):
self.newlabel = []
self.unlabel = []
self.comments = []
self.uncomment = []
self.close = False
self.open = False
self.merge = False
def count(self):
""" Return the number of actions that are to be performed """
count = 0
for value in vars(self).values():
if value:
if isinstance(value, bool):
count += 1
else:
count += len(value)
return count
def render_boilerplate(tvars: t.Dict[str, t.Any], boilerplate: str) -> str:
return _environment.get_template(f'{boilerplate}.j2').render(**tvars)
class DefaultTriager:
"""
How to use:
1. Create a new class which inherits from DefaultTriager
2. Implement 'Triager.run(self)' method:
- iterate over issues/pull requests
- for each issue
1. create 'actions = DefaultActions()'
2. define which action(s) should be done updating 'actions' instance
3. call parent 'apply_actions' methods: 'DefaultTriager.apply_actions(iw, actions)'
3. Run:
def main():
Triager().start()
"""
CLOSING_LABELS = []
def __init__(self, args=None):
parser = self.create_parser()
self.args = parser.parse_args(args)
set_logger(debug=self.args.debug, logfile=self.args.logfile)
logging.info('starting bot')
self.cachedir_base = os.path.expanduser(self.args.cachedir_base)
self.repos = {}
# resume is just an overload for the start-at argument
resume = self.get_resume()
if resume:
if self.args.sort == 'desc':
self.args.start_at = resume['number'] - 1
else:
self.args.start_at = resume['number'] + 1
logging.info('creating api wrapper')
self.ghw = GithubWrapper(
url=C.DEFAULT_GITHUB_URL,
user=C.DEFAULT_GITHUB_USERNAME,
passw=C.DEFAULT_GITHUB_PASSWORD,
token=C.DEFAULT_GITHUB_TOKEN,
cachedir=self.cachedir_base
)
logging.info('creating graphql client')
self.gqlc = GithubGraphQLClient(
C.DEFAULT_GITHUB_TOKEN,
server=C.DEFAULT_GITHUB_URL
)
self._maintainer_team = None
@property
def maintainer_team(self):
# Note: this assumes that the token used by the bot has access to check
# team privileges across potentially more than one organization
if self._maintainer_team is None:
self._maintainer_team = []
teams = C.DEFAULT_GITHUB_MAINTAINERS
for team in teams:
_org, _team = team.split('/')
self._maintainer_team.extend(self.gqlc.get_members(_org, _team))
return sorted(set(self._maintainer_team).difference(C.DEFAULT_BOT_NAMES))
@classmethod
def create_parser(cls):
parser = argparse.ArgumentParser()
parser.add_argument("--botmetafile", type=str, default=None, help="Use this filepath for botmeta instead of from the repo")
parser.add_argument("--cachedir", type=str, dest='cachedir_base', default='~/.ansibullbot/cache')
parser.add_argument("--daemonize", action="store_true", help="run in a continuos loop")
parser.add_argument("--daemonize_interval", type=int, default=(30 * 60), help="seconds to sleep between loop iterations")
parser.add_argument("--debug", "-d", action="store_true", help="Debug output")
parser.add_argument("--dry-run", "-n", action="store_true", help="Don't make any changes")
parser.add_argument("--dump_actions", action="store_true", help="serialize the actions to disk [/tmp/actions]")
parser.add_argument("--force", "-f", action="store_true", help="Do not ask questions")
parser.add_argument("--logfile", type=str, help="Send logging to this file")
parser.add_argument("--ignore_state", action="store_true", help="Do not skip processing closed issues")
parser.add_argument("--last", type=int, help="triage the last N issues or PRs")
parser.add_argument("--only_closed", action="store_true", help="Triage closed issues|prs only")
parser.add_argument("--only_issues", action="store_true", help="Triage issues only")
parser.add_argument("--only_prs", action="store_true", help="Triage pullrequests only")
parser.add_argument("--pause", "-p", action="store_true", dest="always_pause", help="Always pause between prs|issues")
parser.add_argument("--pr", "--id", type=str, help="Triage only the specified pr|issue (separated by commas)")
parser.add_argument("--resume", action="store_true", dest="resume_enabled", help="pickup right after where the bot last stopped")
parser.add_argument("--repo", "-r", type=str, help="Github repo to triage (defaults to all)")
parser.add_argument("--skiprepo", action='append', help="Github repo to skip triaging")
parser.add_argument("--start-at", type=int, help="Start triage at the specified pr|issue")
parser.add_argument("--sort", default='desc', choices=['asc', 'desc'], help="Direction to sort issues [desc=9-0 asc=0-9]")
return parser
def start(self):
if self.args.daemonize:
logging.info('starting daemonize loop')
while True:
self.run()
interval = self.args.daemonize_interval
logging.info('sleep %ss (%sm)' % (interval, interval / 60))
time.sleep(interval)
else:
logging.info('starting single run')
self.run()
logging.info('stopping bot')
@abc.abstractmethod
def run(self):
pass
def apply_actions(self, iw, actions):
action_meta = {'REDO': False}
if actions.count() > 0:
if self.args.dump_actions:
self.dump_action_dict(iw, actions.__dict__)
if self.args.dry_run:
print("Dry-run specified, skipping execution of actions")
else:
if self.args.force:
print("Running actions non-interactive as you forced.")
self.execute_actions(iw, actions)
return action_meta
cont = input("Take recommended actions (y/N/a/R/DEBUG)? ")
if cont in ('a', 'A'):
sys.exit(0)
if cont in ('Y', 'y'):
self.execute_actions(iw, actions)
if cont in ('r', 'R'):
action_meta['REDO'] = True
if cont == 'DEBUG':
# put the user into a breakpoint to do live debug
action_meta['REDO'] = True
import epdb; epdb.st()
elif self.args.always_pause:
print("Skipping, but pause.")
cont = input("Continue (Y/n/a/R/DEBUG)? ")
if cont in ('a', 'A', 'n', 'N'):
sys.exit(0)
elif cont in ('r', 'R'):
action_meta['REDO'] = True
elif cont == 'DEBUG':
# put the user into a breakpoint to do live debug
import epdb; epdb.st()
action_meta['REDO'] = True
else:
print("Skipping.")
# let the upper level code redo this issue
return action_meta
def execute_actions(self, iw, actions):
"""Turns the actions into API calls"""
for commentid in actions.uncomment:
iw.remove_comment_by_id(commentid)
for comment in actions.comments:
logging.i
gitextract_zad12un7/ ├── .coveragerc ├── .github/ │ ├── .codecov.yml │ └── CODE_OF_CONDUCT.md ├── .gitignore ├── .gitmodules ├── CONTRIBUTING.md ├── ISSUE_HELP.md ├── LICENSE ├── README.md ├── Vagrantfile ├── ansible.cfg ├── ansibullbot/ │ ├── __init__.py │ ├── _text_compat.py │ ├── ansibletriager.py │ ├── ci/ │ │ ├── __init__.py │ │ ├── azp.py │ │ └── base.py │ ├── constants.py │ ├── defaulttriager.py │ ├── exceptions.py │ ├── ghapiwrapper.py │ ├── historywrapper.py │ ├── issuewrapper.py │ ├── plugins/ │ │ ├── __init__.py │ │ ├── backports.py │ │ ├── botstatus.py │ │ ├── ci_rebuild.py │ │ ├── collection_facts.py │ │ ├── community_workgroups.py │ │ ├── component_matching.py │ │ ├── contributors.py │ │ ├── cross_references.py │ │ ├── deprecation.py │ │ ├── docs_info.py │ │ ├── filament.py │ │ ├── label_commands.py │ │ ├── needs_contributor.py │ │ ├── needs_info.py │ │ ├── needs_revision.py │ │ ├── notifications.py │ │ ├── shipit.py │ │ ├── small_patch.py │ │ ├── spam.py │ │ ├── test_support_plugins.py │ │ └── traceback.py │ └── utils/ │ ├── __init__.py │ ├── botmetadata.py │ ├── component_tools.py │ ├── extractors.py │ ├── feature_flags.py │ ├── galaxy.py │ ├── gh_gql_client.py │ ├── git_tools.py │ ├── github.py │ ├── logs.py │ ├── moduletools.py │ ├── net_tools.py │ ├── receiver_client.py │ ├── sentry.py │ ├── sqlite_utils.py │ ├── systemtools.py │ ├── timetools.py │ └── version_tools.py ├── azure-pipelines.yml ├── ci_output/ │ ├── codecoverage/ │ │ └── .gitdir │ └── testresults/ │ └── .gitdir ├── constraints.txt ├── docs/ │ ├── collection_migration.md │ └── contribution_tips.md ├── features.yaml ├── playbooks/ │ ├── bot-on-dev.yml │ ├── bot-on-prod.yml │ ├── files/ │ │ └── centos7.vimrc │ ├── group_vars/ │ │ ├── all.yml │ │ ├── ansibullbot.yml │ │ └── tower.yml │ ├── host_vars/ │ │ ├── ansibullbot-dev.eng.ansible.com.yml │ │ └── ansibullbot.eng.ansible.com.yml │ ├── hosts.yml │ ├── requirements.yml │ ├── setup-ansibullbot-dev.yml │ ├── setup-ansibullbot.yml │ ├── teardown-ansibullbot-dev.yml │ ├── tower.yml │ ├── update-ansibullbot.yml │ └── vagrant.yml ├── pytest.ini ├── requirements.txt ├── scripts/ │ ├── ansibot_actions.py │ ├── ansibot_receiver.py │ ├── ansibot_status.cgi │ ├── generate_issues_by_file.py │ └── slack-notice.py ├── setup.py ├── templates/ │ ├── bad_pr.j2 │ ├── bot_status.j2 │ ├── collection_migration.j2 │ ├── commit_msg_mentions.j2 │ ├── community_shipit_notify.j2 │ ├── community_workgroups.j2 │ ├── components_banner.j2 │ ├── docs_team_info.j2 │ ├── fork.j2 │ ├── incoming_ref_missing.j2 │ ├── issue_missing_data.j2 │ ├── merge_commit_notify.j2 │ ├── multiple_module_notify.j2 │ ├── needs_info_base.j2 │ ├── notify.j2 │ ├── shippable_test_result.j2 │ ├── test_support_plugins.j2 │ └── waiting_on_contributor_close.j2 ├── test-requirements.txt ├── tests/ │ ├── __init__.py │ ├── bin/ │ │ └── ansibot-test │ ├── component/ │ │ ├── __init__.py │ │ ├── module_matching.py │ │ ├── test_idempotence.py │ │ └── test_supershipit.py │ ├── fixtures/ │ │ ├── component_data/ │ │ │ ├── component_errors.json │ │ │ ├── component_expected_results.json │ │ │ ├── component_match_map.json │ │ │ └── component_skip.json │ │ ├── docs_info/ │ │ │ ├── 0_issue.yml │ │ │ ├── 1_issue.yml │ │ │ ├── 2_issue.yml │ │ │ ├── 3_issue.yml │ │ │ ├── 4_issue.yml │ │ │ ├── 5_issue.yml │ │ │ └── files/ │ │ │ ├── docsite_index.rst │ │ │ ├── hacks_fail_me.txt │ │ │ └── lib_ansible_foo.py │ │ ├── issue_template_meta.json │ │ ├── needs_contributor/ │ │ │ └── 0_issue.yml │ │ ├── needs_revision/ │ │ │ ├── 0_issue.yml │ │ │ ├── 0_prstatus.json │ │ │ ├── 0_reviews.json │ │ │ ├── 1_issue.yml │ │ │ ├── 1_reviews.json │ │ │ └── 2_issue.yml │ │ ├── rebuild/ │ │ │ ├── 0_issue.yml │ │ │ ├── 0_prstatus.json │ │ │ ├── 1_issue.yml │ │ │ ├── 1_prstatus.json │ │ │ ├── 2_issue.yml │ │ │ ├── 2_prstatus.json │ │ │ ├── 3_issue.yml │ │ │ └── 3_prstatus.json │ │ ├── rebuild_merge/ │ │ │ ├── 0_issue.yml │ │ │ ├── 0_prstatus.json │ │ │ ├── 1_issue.yml │ │ │ ├── 1_prstatus.json │ │ │ ├── 2_issue.yml │ │ │ ├── 2_prstatus.json │ │ │ ├── 3_issue.yml │ │ │ └── 3_prstatus.json │ │ └── shipit/ │ │ ├── 0_issue.yml │ │ ├── 0_prstatus.json │ │ ├── 1_issue.yml │ │ ├── 1_prstatus.json │ │ ├── 2_issue.yml │ │ └── 2_prstatus.json │ ├── manual/ │ │ ├── group_errors.py │ │ └── matching_test.py │ ├── unit/ │ │ ├── __init__.py │ │ ├── decorators/ │ │ │ ├── __init__.py │ │ │ └── test_github.py │ │ ├── parsers/ │ │ │ ├── __init__.py │ │ │ ├── metadata_1.yml │ │ │ └── test_bot_metadata_parser.py │ │ ├── triagers/ │ │ │ ├── __init__.py │ │ │ └── plugins/ │ │ │ ├── __init__.py │ │ │ ├── test_automerge.py │ │ │ ├── test_docs_info.py │ │ │ ├── test_needs_contributor.py │ │ │ ├── test_needs_info.py │ │ │ ├── test_needs_revision.py │ │ │ ├── test_notifications.py │ │ │ ├── test_rebuild.py │ │ │ ├── test_rebuild_merge.py │ │ │ └── test_shipit.py │ │ ├── utils/ │ │ │ ├── __init__.py │ │ │ ├── test_component_tools.py │ │ │ ├── test_extractors_pr_number.py │ │ │ ├── test_githubid_extractor.py │ │ │ ├── test_sqlite_tools.py │ │ │ ├── test_template_extractor.py │ │ │ ├── test_template_extractor_simple.py │ │ │ └── test_time_tools.py │ │ └── wrappers/ │ │ ├── __init__.py │ │ └── test_history_wrapper.py │ └── utils/ │ ├── __init__.py │ ├── componentmocks.py │ ├── helpers.py │ ├── issue_mock.py │ └── repo_mock.py ├── tox.ini ├── triage_ansible.py └── triage_ansible_mp.py
SYMBOL INDEX (706 symbols across 83 files)
FILE: ansibullbot/_text_compat.py
function to_bytes (line 50) | def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
function to_text (line 167) | def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
FILE: ansibullbot/ansibletriager.py
class AnsibleActions (line 69) | class AnsibleActions(DefaultActions):
method __init__ (line 70) | def __init__(self):
class AnsibleTriager (line 78) | class AnsibleTriager(DefaultTriager):
method __init__ (line 123) | def __init__(self, args=None):
method load_botmeta (line 137) | def load_botmeta(self, gitrepo):
method _should_skip_issue (line 146) | def _should_skip_issue(self, summary):
method run (line 163) | def run(self):
method save_meta (line 298) | def save_meta(self, issuewrapper, meta, actions):
method dump_meta (line 353) | def dump_meta(self, issuewrapper, meta):
method create_actions (line 364) | def create_actions(self, iw, actions, valid_labels):
method post_actions_to_receiver (line 1146) | def post_actions_to_receiver(self, iw, actions, processed_meta):
method process (line 1157) | def process(self, iw, valid_labels):
method process_comment_commands (line 1354) | def process_comment_commands(self, issuewrapper, meta):
method negate_command (line 1432) | def negate_command(self, command, commands):
method execute_actions (line 1448) | def execute_actions(self, iw, actions):
method create_parser (line 1489) | def create_parser(cls):
FILE: ansibullbot/ci/azp.py
class AzurePipelinesCI (line 37) | class AzurePipelinesCI(BaseCI):
method __init__ (line 41) | def __init__(self, cachedir, iw):
method build_id (line 70) | def build_id(self):
method jobs (line 90) | def jobs(self):
method state (line 145) | def state(self):
method updated_at (line 149) | def updated_at(self):
method stages (line 153) | def stages(self):
method get_last_full_run_date (line 156) | def get_last_full_run_date(self):
method artifacts (line 163) | def artifacts(self):
method _get_artifact (line 198) | def _get_artifact(self, name, url):
method get_test_results (line 239) | def get_test_results(self):
method rebuild (line 277) | def rebuild(self, run_id, failed_only=False):
method _rebuild_old (line 302) | def _rebuild_old(self):
method rebuild_failed (line 345) | def rebuild_failed(self, run_id):
method cancel (line 348) | def cancel(self, run_id):
method cancel_on_branch (line 374) | def cancel_on_branch(self, branch):
FILE: ansibullbot/ci/base.py
class BaseCI (line 4) | class BaseCI(metaclass=ABCMeta):
method state (line 12) | def state(self):
method updated_at (line 21) | def updated_at(self):
method get_last_full_run_date (line 29) | def get_last_full_run_date(self):
method get_test_results (line 38) | def get_test_results(self):
method rebuild (line 47) | def rebuild(self, run_id, failed_only=False):
method rebuild_failed (line 55) | def rebuild_failed(self, run_id):
method cancel (line 59) | def cancel(self, run_id):
method cancel_on_branch (line 67) | def cancel_on_branch(self, branch):
FILE: ansibullbot/constants.py
function mk_boolean (line 36) | def mk_boolean(value):
function unquote (line 43) | def unquote(value):
function shell_expand (line 47) | def shell_expand(path, expand_relative_paths=False):
function get_config (line 63) | def get_config(p, section, key, env_var, default,
function _get_config (line 142) | def _get_config(p, section, key, env_var, default):
function load_config_file (line 160) | def load_config_file():
function get_ansibullbot_version (line 456) | def get_ansibullbot_version():
FILE: ansibullbot/defaulttriager.py
class DefaultActions (line 51) | class DefaultActions:
method __init__ (line 52) | def __init__(self):
method count (line 61) | def count(self):
function render_boilerplate (line 74) | def render_boilerplate(tvars: t.Dict[str, t.Any], boilerplate: str) -> str:
class DefaultTriager (line 78) | class DefaultTriager:
method __init__ (line 94) | def __init__(self, args=None):
method maintainer_team (line 130) | def maintainer_team(self):
method create_parser (line 142) | def create_parser(cls):
method start (line 167) | def start(self):
method run (line 181) | def run(self):
method apply_actions (line 184) | def apply_actions(self, iw, actions):
method execute_actions (line 226) | def execute_actions(self, iw, actions):
method dump_action_dict (line 259) | def dump_action_dict(self, issue, actions):
method get_resume (line 270) | def get_resume(self):
method set_resume (line 285) | def set_resume(self, repo, number):
method eval_pr_param (line 297) | def eval_pr_param(self, pr):
method load_meta (line 339) | def load_meta(self, reponame: str, number: str) -> t.Dict[str, t.Any]:
method get_stale_numbers (line 358) | def get_stale_numbers(self, reponame: str, issue_summaries: t.Dict[str...
method _collect_repo (line 382) | def _collect_repo(self, repo, issuenums=None):
method collect_repos (line 529) | def collect_repos(self):
FILE: ansibullbot/exceptions.py
class LabelWafflingError (line 1) | class LabelWafflingError(Exception):
class RateLimitError (line 5) | class RateLimitError(Exception):
class NoCIError (line 9) | class NoCIError(Exception):
FILE: ansibullbot/ghapiwrapper.py
class GithubWrapper (line 26) | class GithubWrapper:
method __init__ (line 27) | def __init__(self, url=None, user=None, passw=None, token=None, cached...
method _connect (line 34) | def _connect(self, url, user, passw, token):
method get_request (line 46) | def get_request(self, url):
class RepoWrapper (line 73) | class RepoWrapper:
method __init__ (line 74) | def __init__(self, gh, repo_path, cachedir='~/.ansibullbot/cache'):
method has_in_assignees (line 82) | def has_in_assignees(self, login):
method get_repo (line 87) | def get_repo(self, repo_path):
method get_issue (line 93) | def get_issue(self, number):
method get_pullrequest (line 112) | def get_pullrequest(self, number):
method is_pr_merged (line 116) | def is_pr_merged(self, number):
method labels (line 124) | def labels(self):
method assignees (line 130) | def assignees(self):
method get_issues (line 135) | def get_issues(self, since=None):
method load_issue (line 141) | def load_issue(self, number):
method save_issue (line 161) | def save_issue(self, issue):
method load_update_fetch (line 179) | def load_update_fetch(self, property_name):
method get_file_contents (line 228) | def get_file_contents(self, filepath):
FILE: ansibullbot/historywrapper.py
class HistoryWrapper (line 12) | class HistoryWrapper:
method __init__ (line 22) | def __init__(self, events, labels, last_updated, usecache=True, cached...
method validate_cache (line 45) | def validate_cache(self, cache):
method _load_cache (line 80) | def _load_cache(self):
method _dump_cache (line 96) | def _dump_cache(self):
method merge_commits (line 117) | def merge_commits(self, commits):
method merge_reviews (line 131) | def merge_reviews(self, reviews):
method _find_events_by_actor (line 167) | def _find_events_by_actor(self, eventname, actor=None, maxcount=1):
method get_user_comments (line 183) | def get_user_comments(self, username):
method search_user_comments (line 193) | def search_user_comments(self, username, searchterm):
method get_commands (line 203) | def get_commands(self, username, command_keys, timestamps=False, usela...
method get_component_commands (line 252) | def get_component_commands(self, command_key='!component'):
method was_assigned (line 271) | def was_assigned(self, username):
method was_subscribed (line 276) | def was_subscribed(self, username):
method last_notified (line 281) | def last_notified(self, username):
method last_comment (line 300) | def last_comment(self, username):
method label_last_applied (line 313) | def label_last_applied(self, label):
method label_last_removed (line 323) | def label_last_removed(self, label):
method was_labeled (line 333) | def was_labeled(self, label, bots=None):
method was_unlabeled (line 349) | def was_unlabeled(self, label, bots=None):
method get_boilerplate_comments (line 365) | def get_boilerplate_comments(self, dates=False, content=True):
method get_boilerplate_comments_content (line 390) | def get_boilerplate_comments_content(self):
method last_date_for_boilerplate (line 395) | def last_date_for_boilerplate(self, boiler):
method last_commit_date (line 404) | def last_commit_date(self):
method get_changed_labels (line 411) | def get_changed_labels(self, prefix=None, bots=None):
method label_is_waffling (line 427) | def label_is_waffling(self, label, limit=20):
method command_status (line 442) | def command_status(self, command):
FILE: ansibullbot/issuewrapper.py
class UnsetValue (line 34) | class UnsetValue:
method __str__ (line 35) | def __str__(self):
class IssueWrapper (line 39) | class IssueWrapper:
method __init__ (line 40) | def __init__(self, github=None, repo=None, issue=None, cachedir=None, ...
method url (line 67) | def url(self):
method comments (line 71) | def comments(self):
method events (line 75) | def events(self):
method _parse_events (line 81) | def _parse_events(self, events):
method _get_timeline (line 148) | def _get_timeline(self):
method load_update_fetch_files (line 200) | def load_update_fetch_files(self):
method get_labels (line 246) | def get_labels(self):
method template_data (line 254) | def template_data(self):
method add_label (line 260) | def add_label(self, label=None):
method remove_label (line 265) | def remove_label(self, label=None):
method add_comment (line 270) | def add_comment(self, comment=None):
method remove_comment_by_id (line 275) | def remove_comment_by_id(self, commentid):
method assignees (line 302) | def assignees(self):
method is_pullrequest (line 307) | def is_pullrequest(self):
method is_issue (line 310) | def is_issue(self):
method age (line 314) | def age(self):
method title (line 321) | def title(self):
method repo_full_name (line 325) | def repo_full_name(self):
method html_url (line 342) | def html_url(self):
method created_at (line 346) | def created_at(self):
method updated_at (line 350) | def updated_at(self):
method updated_at (line 363) | def updated_at(self, value):
method closed_at (line 367) | def closed_at(self):
method merged_at (line 371) | def merged_at(self):
method state (line 375) | def state(self):
method github_type (line 379) | def github_type(self):
method number (line 386) | def number(self):
method submitter (line 390) | def submitter(self):
method pullrequest (line 401) | def pullrequest(self):
method update_pullrequest (line 407) | def update_pullrequest(self):
method pullrequest_check_runs (line 417) | def pullrequest_check_runs(self):
method pullrequest_raw_data (line 425) | def pullrequest_raw_data(self):
method pr_files (line 432) | def pr_files(self):
method files (line 438) | def files(self):
method new_files (line 444) | def new_files(self):
method new_modules (line 450) | def new_modules(self):
method body (line 467) | def body(self):
method labels (line 471) | def labels(self):
method reviews (line 477) | def reviews(self):
method history (line 490) | def history(self):
method commits (line 501) | def commits(self):
method mergeable (line 509) | def mergeable(self):
method mergeable_state (line 513) | def mergeable_state(self):
method wip (line 538) | def wip(self):
method incoming_repo_exists (line 546) | def incoming_repo_exists(self):
method incoming_repo_slug (line 550) | def incoming_repo_slug(self):
method from_fork (line 557) | def from_fork(self):
method get_commit_login (line 563) | def get_commit_login(self, commit):
method merge_commits (line 571) | def merge_commits(self):
method committer_emails (line 580) | def committer_emails(self):
method committer_logins (line 588) | def committer_logins(self):
method merge (line 595) | def merge(self):
method renamed_files (line 623) | def renamed_files(self):
FILE: ansibullbot/plugins/backports.py
function get_backport_facts (line 1) | def get_backport_facts(issuewrapper):
FILE: ansibullbot/plugins/botstatus.py
function get_bot_status_facts (line 1) | def get_bot_status_facts(issuewrapper, all_maintainers, maintainer_team=...
FILE: ansibullbot/plugins/ci_rebuild.py
function get_ci_facts (line 4) | def get_ci_facts(iw, ci):
function get_rebuild_facts (line 18) | def get_rebuild_facts(iw, meta, force=False):
function _get_last_command (line 52) | def _get_last_command(iw, command, username):
function get_rebuild_merge_facts (line 67) | def get_rebuild_merge_facts(iw, meta, maintainer_team, ci):
function get_rebuild_command_facts (line 115) | def get_rebuild_command_facts(iw, meta, ci):
FILE: ansibullbot/plugins/collection_facts.py
function get_collection_facts (line 4) | def get_collection_facts(iw, component_matcher, meta):
FILE: ansibullbot/plugins/community_workgroups.py
function get_community_workgroup_facts (line 1) | def get_community_workgroup_facts(issuewrapper, meta):
FILE: ansibullbot/plugins/component_matching.py
function get_component_match_facts (line 5) | def get_component_match_facts(iw, component_matcher, valid_labels):
function reconcile_component_commands (line 189) | def reconcile_component_commands(iw, component_matcher, CM_MATCHES):
function get_pr_quality_facts (line 241) | def get_pr_quality_facts(issuewrapper):
FILE: ansibullbot/plugins/contributors.py
function get_contributor_facts (line 4) | def get_contributor_facts(issuewrapper):
FILE: ansibullbot/plugins/cross_references.py
function get_cross_reference_facts (line 1) | def get_cross_reference_facts(issuewrapper):
FILE: ansibullbot/plugins/deprecation.py
function get_deprecation_facts (line 4) | def get_deprecation_facts(meta):
FILE: ansibullbot/plugins/docs_info.py
class ParsedFunc (line 18) | class ParsedFunc:
class ParsedClass (line 27) | class ParsedClass:
method find_function (line 36) | def find_function(self, lineno):
class ParsedModule (line 43) | class ParsedModule:
method find_class (line 52) | def find_class(self, lineno):
class CommitFile (line 58) | class CommitFile:
method __init__ (line 59) | def __init__(self, raw_data):
method filename (line 63) | def filename(self):
method status (line 67) | def status(self):
method patch (line 71) | def patch(self):
method raw_url (line 75) | def raw_url(self):
method file_content (line 79) | def file_content(self):
function _is_docs_path (line 85) | def _is_docs_path(filename):
function _get_diff_info (line 92) | def _get_diff_info(diff_text):
function _get_ast_info (line 140) | def _get_ast_info(content):
function _is_diff_docs_only (line 214) | def _is_diff_docs_only(file_content, diff):
function _is_docs_only (line 260) | def _is_docs_only(changed_file):
function get_docs_facts (line 287) | def get_docs_facts(iw):
FILE: ansibullbot/plugins/filament.py
function get_filament_facts (line 1) | def get_filament_facts(issuewrapper, meta):
FILE: ansibullbot/plugins/label_commands.py
function get_label_command_facts (line 1) | def get_label_command_facts(iw, all_maintainers, maintainer_team=None, v...
function get_waffling_overrides (line 66) | def get_waffling_overrides(iw, all_maintainers, maintainer_team=None):
FILE: ansibullbot/plugins/needs_contributor.py
function get_needs_contributor_facts (line 1) | def get_needs_contributor_facts(events, botnames=None):
FILE: ansibullbot/plugins/needs_info.py
function is_needsinfo (line 7) | def is_needsinfo(iw, botnames=None):
function needs_info_template_facts (line 43) | def needs_info_template_facts(iw, meta):
function needs_info_timeout_facts (line 89) | def needs_info_timeout_facts(history, meta):
FILE: ansibullbot/plugins/needs_revision.py
function get_needs_revision_facts (line 12) | def get_needs_revision_facts(iw, meta, ci, maintainer_team=None, botname...
function _changes_requested_by (line 344) | def _changes_requested_by(user_reviews, shipits, last_commit, ready_for_...
function _get_review_state (line 373) | def _get_review_state(reviews, submitter):
function get_ci_run_facts (line 421) | def get_ci_run_facts(iw, meta, ci):
FILE: ansibullbot/plugins/notifications.py
function get_notification_facts (line 4) | def get_notification_facts(issuewrapper, meta, botmeta=None):
FILE: ansibullbot/plugins/shipit.py
function replace_ansible (line 6) | def replace_ansible(maintainers, ansible_members, bots=None):
function is_approval (line 21) | def is_approval(body):
function is_rebuild_merge (line 28) | def is_rebuild_merge(body):
function get_automerge_facts (line 35) | def get_automerge_facts(issuewrapper, meta):
function needs_community_review (line 136) | def needs_community_review(meta):
function get_shipit_facts (line 174) | def get_shipit_facts(issuewrapper, inmeta, botmeta_files, maintainer_tea...
function get_submitter_facts (line 435) | def get_submitter_facts(issuewrapper, meta, emails_cache, component_matc...
FILE: ansibullbot/plugins/small_patch.py
class CommitFile (line 10) | class CommitFile:
method __init__ (line 11) | def __init__(self, raw_data):
method filename (line 15) | def filename(self):
method changes (line 19) | def changes(self):
function get_small_patch_facts (line 23) | def get_small_patch_facts(iw):
FILE: ansibullbot/plugins/spam.py
function get_spam_facts (line 4) | def get_spam_facts(issuewrapper):
FILE: ansibullbot/plugins/test_support_plugins.py
function get_test_support_plugins_facts (line 11) | def get_test_support_plugins_facts(iw, component_matcher):
FILE: ansibullbot/plugins/traceback.py
function get_traceback_facts (line 7) | def get_traceback_facts(iw):
FILE: ansibullbot/utils/botmetadata.py
class NoAliasDumper (line 13) | class NoAliasDumper(yaml.Dumper):
method ignore_aliases (line 14) | def ignore_aliases(self, data):
function compute_file_children (line 18) | def compute_file_children(filenames):
class BotMetadataParser (line 42) | class BotMetadataParser:
method parse_yaml (line 45) | def parse_yaml(data):
FILE: ansibullbot/utils/component_tools.py
function make_prefixes (line 88) | def make_prefixes(filename):
class AnsibleComponentMatcher (line 98) | class AnsibleComponentMatcher:
method __init__ (line 187) | def __init__(self, gitrepo=None, botmeta=None, usecache=False, cachedi...
method update (line 205) | def update(self, email_cache=None, botmeta=None):
method get_module_meta (line 213) | def get_module_meta(self, checkoutdir, filename):
method index_files (line 280) | def index_files(self):
method cache_keywords (line 369) | def cache_keywords(self):
method clean_body (line 377) | def clean_body(self, body, internal=False):
method match (line 393) | def match(self, issuewrapper):
method match_components (line 403) | def match_components(self, title, body, component, files=None):
method search_ecosystem (line 481) | def search_ecosystem(self, component):
method _match_component (line 502) | def _match_component(self, title, component):
method search_by_module_name (line 593) | def search_by_module_name(self, component):
method search_by_keywords (line 613) | def search_by_keywords(self, component, exact=True):
method search_by_regex_urls (line 640) | def search_by_regex_urls(self, body):
method search_by_regex_modules (line 683) | def search_by_regex_modules(self, body):
method search_by_regex_module_globs (line 791) | def search_by_regex_module_globs(self, body):
method search_by_regex_generic (line 867) | def search_by_regex_generic(self, body):
method search_by_filepath (line 936) | def search_by_filepath(self, body, partial=False, context=None):
method reduce_filepaths (line 1071) | def reduce_filepaths(self, matches):
method include_modules_from_test_targets (line 1098) | def include_modules_from_test_targets(self, matches):
method _filenames_to_keys (line 1117) | def _filenames_to_keys(self, filenames):
method get_labels_for_files (line 1128) | def get_labels_for_files(self, files):
method get_meta_for_file (line 1136) | def get_meta_for_file(self, filename):
method find_module_match (line 1417) | def find_module_match(self, pattern, exact=False):
method _find_module_match (line 1477) | def _find_module_match(self, pattern, exact=False):
FILE: ansibullbot/utils/extractors.py
function extract_template_sections (line 21) | def extract_template_sections(body, header=TEMPLATE_HEADER):
function fuzzy_find_sections (line 43) | def fuzzy_find_sections(body, sections):
function find_sections (line 136) | def find_sections(body):
function extract_template_data (line 166) | def extract_template_data(body, issue_class='issue', sections=None):
function clean_bad_characters (line 323) | def clean_bad_characters(raw_text, exclude=None):
function remove_markdown_comments (line 350) | def remove_markdown_comments(rawtext):
function extract_pr_number_from_comment (line 370) | def extract_pr_number_from_comment(rawtext):
class ModuleExtractor (line 384) | class ModuleExtractor:
method __init__ (line 391) | def __init__(self, filepath, filedata=None, email_cache=None):
method filedata (line 397) | def filedata(self):
method authors (line 407) | def authors(self):
method docs (line 413) | def docs(self):
method get_module_authors (line 445) | def get_module_authors(self):
method extract_github_id (line 465) | def extract_github_id(self, author):
function get_template_data (line 502) | def get_template_data(iw):
FILE: ansibullbot/utils/feature_flags.py
class FeatureFlags (line 7) | class FeatureFlags:
method __init__ (line 8) | def __init__(self, config_obj):
method is_enabled (line 11) | def is_enabled(self, feature):
method is_disabled (line 14) | def is_disabled(self, feature):
method flags (line 18) | def flags(self):
method from_config (line 22) | def from_config(cls, config_path):
FILE: ansibullbot/utils/galaxy.py
class GalaxyQueryTool (line 64) | class GalaxyQueryTool:
method __init__ (line 66) | def __init__(self, cachedir=None):
method _get_cached_url (line 77) | def _get_cached_url(self, url, days=0):
method search_galaxy (line 103) | def search_galaxy(self, component):
method fuzzy_search_galaxy (line 176) | def fuzzy_search_galaxy(self, component):
FILE: ansibullbot/utils/gh_gql_client.py
class GithubGraphQLClient (line 136) | class GithubGraphQLClient:
method __init__ (line 139) | def __init__(self, token, server=None):
method get_members (line 149) | def get_members(self, org, team):
method get_issue_summaries (line 160) | def get_issue_summaries(self, repo_url):
method get_all_summaries (line 185) | def get_all_summaries(self, owner, repo):
method get_summaries (line 224) | def get_summaries(self, owner, repo, otype='issues', last=None, first=...
method get_summary (line 285) | def get_summary(self, repo_url, otype, number):
method update_node (line 322) | def update_node(self, node, node_type, owner, repo):
method get_usernames_from_filename_blame (line 344) | def get_usernames_from_filename_blame(self, owner, repo, branch, filep...
method post_request (line 393) | def post_request(self, payload):
FILE: ansibullbot/utils/git_tools.py
class GitRepoWrapper (line 13) | class GitRepoWrapper:
method __init__ (line 14) | def __init__(self, cachedir, repo, commit=None, rebase=True, context=N...
method exists (line 40) | def exists(self, filename):
method isgit (line 44) | def isgit(self):
method isdir (line 47) | def isdir(self, filename):
method files (line 55) | def files(self):
method module_files (line 65) | def module_files(self):
method create_checkout (line 68) | def create_checkout(self):
method update (line 97) | def update(self, force=False):
method update_checkout (line 105) | def update_checkout(self):
method manage_checkout (line 146) | def manage_checkout(self):
method get_files (line 158) | def get_files(self, force=False):
method get_files_by_commit (line 183) | def get_files_by_commit(self, commit):
method get_commits_by_email (line 195) | def get_commits_by_email(self, email):
method get_last_rev_for_file (line 239) | def get_last_rev_for_file(self, filepath):
method existed (line 253) | def existed(self, filepath):
method get_file_content (line 264) | def get_file_content(self, filepath, follow=False):
method find (line 293) | def find(self, pattern):
method list_files_by_branch (line 304) | def list_files_by_branch(self, branch):
FILE: ansibullbot/utils/github.py
function get_rate_limit (line 26) | def get_rate_limit():
function get_reset_time (line 72) | def get_reset_time():
function RateLimited (line 96) | def RateLimited(fn):
FILE: ansibullbot/utils/logs.py
function set_logger (line 5) | def set_logger(debug=False, logfile=None):
FILE: ansibullbot/utils/moduletools.py
class Blame (line 22) | class Blame(Base):
class Email (line 31) | class Email(Base):
class ModuleIndexer (line 38) | class ModuleIndexer:
method __init__ (line 62) | def __init__(self, commits=True, blames=True, botmeta=None, gh_client=...
method update (line 93) | def update(self, botmeta=None):
method get_ansible_modules (line 98) | def get_ansible_modules(self):
method populate_modules (line 164) | def populate_modules(self, matches):
method get_module_commits (line 215) | def get_module_commits(self):
method last_commit_for_file (line 283) | def last_commit_for_file(self, filepath):
method get_module_blames (line 294) | def get_module_blames(self):
method set_maintainers (line 378) | def set_maintainers(self):
method split_topics_from_path (line 453) | def split_topics_from_path(self, module_file):
method set_module_imports (line 474) | def set_module_imports(self):
method get_module_imports (line 481) | def get_module_imports(self, module_file):
method all_maintainers (line 505) | def all_maintainers(self):
method get_maintainers_for_namespace (line 511) | def get_maintainers_for_namespace(self, namespace):
FILE: ansibullbot/utils/net_tools.py
function fetch (line 18) | def fetch(url, verb='get', **kwargs):
FILE: ansibullbot/utils/receiver_client.py
function post_to_receiver (line 8) | def post_to_receiver(path, params, data):
function get_receiver_summaries (line 38) | def get_receiver_summaries(username, reponame, state=None, number=None):
function get_receiver_metadata (line 81) | def get_receiver_metadata(username, reponame, number=None, keys=None):
FILE: ansibullbot/utils/sentry.py
function initialize_sentry (line 5) | def initialize_sentry():
FILE: ansibullbot/utils/sqlite_utils.py
class Blame (line 18) | class Blame(Base):
class Email (line 27) | class Email(Base):
class RateLimit (line 34) | class RateLimit(Base):
class GithubApiRequest (line 45) | class GithubApiRequest(Base):
class AnsibullbotDatabase (line 57) | class AnsibullbotDatabase:
method __init__ (line 65) | def __init__(self, cachedir='/tmp'):
method delete_db_file (line 87) | def delete_db_file(self):
method create_tables (line 90) | def create_tables(self):
method get_github_api_request_meta (line 105) | def get_github_api_request_meta(self, url, token=None):
method set_github_api_request_meta (line 129) | def set_github_api_request_meta(self, url, headers, datafile, token=No...
method set_rate_limit (line 157) | def set_rate_limit(self, username=None, token=None, rawjson=None):
method get_rate_limit_remaining (line 180) | def get_rate_limit_remaining(self, username=None, token=None):
method get_rate_limit_rawjson (line 203) | def get_rate_limit_rawjson(self, username=None, token=None):
method get_rate_limit_query_counter (line 233) | def get_rate_limit_query_counter(self, username=None, token=None):
method reset_rate_limit_query_counter (line 241) | def reset_rate_limit_query_counter(self, username=None, token=None):
FILE: ansibullbot/utils/systemtools.py
function run_command (line 6) | def run_command(cmd, cwd=None, env=None):
FILE: ansibullbot/utils/timetools.py
function strip_time_safely (line 7) | def strip_time_safely(tstring):
FILE: ansibullbot/utils/version_tools.py
function _is_valid_version (line 15) | def _is_valid_version(vstring):
function get_version_major_minor (line 22) | def get_version_major_minor(vstring: str) -> str:
class AnsibleVersionIndexer (line 29) | class AnsibleVersionIndexer:
method __init__ (line 30) | def __init__(self, checkoutdir):
method commits_by_date (line 37) | def commits_by_date(self) -> None:
method _get_devel_version (line 45) | def _get_devel_version(self) -> str:
method _get_valid_versions (line 55) | def _get_valid_versions(self) -> t.Dict[str, str]:
method is_valid_version (line 75) | def is_valid_version(self, version: str) -> bool:
method strip_ansible_version (line 85) | def strip_ansible_version(self, rawtext):
method version_by_commit (line 233) | def version_by_commit(self, commithash: str) -> str:
method version_by_date (line 267) | def version_by_date(self, dateobj) -> str:
method version_by_issue (line 290) | def version_by_issue(self, iw) -> str:
FILE: scripts/ansibot_actions.py
function main (line 66) | def main():
FILE: scripts/ansibot_receiver.py
function get_summary_numbers_for_repo (line 25) | def get_summary_numbers_for_repo(org, repo, collection_name=None):
function get_summary_numbers_with_state_for_repo (line 42) | def get_summary_numbers_with_state_for_repo(org, repo, collection_name=N...
function store_action (line 59) | def store_action():
function list_actions (line 80) | def list_actions():
function dedupe_summaries (line 108) | def dedupe_summaries():
function metadata (line 137) | def metadata():
function summaries (line 204) | def summaries():
function strip_line_json (line 317) | def strip_line_json(line):
function logs (line 363) | def logs(issue=None):
FILE: scripts/generate_issues_by_file.py
function main (line 13) | def main():
FILE: scripts/slack-notice.py
function get_config (line 14) | def get_config(args):
function parse_args (line 29) | def parse_args():
function main (line 41) | def main():
FILE: tests/component/module_matching.py
class TestModuleMatching (line 8) | class TestModuleMatching(unittest.TestCase):
method test_module_matching (line 11) | def test_module_matching(self):
FILE: tests/component/test_idempotence.py
class TestIdempotence (line 13) | class TestIdempotence:
method test_no_actions_on_second_run (line 16) | def test_no_actions_on_second_run(self, *args, **kwargs):
FILE: tests/component/test_supershipit.py
class TestSuperShipit (line 14) | class TestSuperShipit:
method test_presupershipit (line 16) | def test_presupershipit(self):
method test_supershipit (line 21) | def test_supershipit(self, *args, **kwargs):
FILE: tests/fixtures/docs_info/files/lib_ansible_foo.py
class Bar (line 13) | class Bar:
method baz (line 16) | def baz(self):
FILE: tests/manual/group_errors.py
function get_matches (line 9) | def get_matches(errors, pattern):
FILE: tests/manual/matching_test.py
class IssueWrapperMock (line 31) | class IssueWrapperMock:
method __init__ (line 32) | def __init__(self, meta):
method is_issue (line 35) | def is_issue(self):
method is_pullrequest (line 38) | def is_pullrequest(self):
method html_url (line 42) | def html_url(self):
method title (line 46) | def title(self):
method body (line 50) | def body(self):
method template_data (line 55) | def template_data(self):
function extract_metafiles (line 59) | def extract_metafiles():
function clean_metafiles (line 72) | def clean_metafiles(filenames):
function load_expected (line 77) | def load_expected():
function save_expected (line 84) | def save_expected(data):
function load_match_map (line 90) | def load_match_map():
function save_match_map (line 96) | def save_match_map(data):
function load_skip (line 101) | def load_skip():
function save_skip (line 108) | def save_skip(data):
function set_logger (line 114) | def set_logger():
function main (line 126) | def main():
FILE: tests/unit/decorators/test_github.py
class RequestsResponseMock (line 6) | class RequestsResponseMock:
method __init__ (line 9) | def __init__(self, url):
method json (line 12) | def json(self):
function SleepMock (line 17) | def SleepMock(duration):
function test_get_rate_limit (line 27) | def test_get_rate_limit(mock_requests_get):
FILE: tests/unit/parsers/test_bot_metadata_parser.py
class TestBotMetaIndexerBase (line 72) | class TestBotMetaIndexerBase(unittest.TestCase):
method setUp (line 73) | def setUp(self):
class TestBotMetadataParserProperties (line 80) | class TestBotMetadataParserProperties(TestBotMetaIndexerBase):
method runTest (line 81) | def runTest(self):
class TestBotMetadataParserEx1 (line 85) | class TestBotMetadataParserEx1(TestBotMetaIndexerBase):
method runTest (line 86) | def runTest(self):
class TestBotMetadataParserFileExample1 (line 124) | class TestBotMetadataParserFileExample1(TestBotMetaIndexerBase):
method runTest (line 125) | def runTest(self):
class TestBotMetadataPropagation (line 134) | class TestBotMetadataPropagation(TestBotMetaIndexerBase):
method test_keywords (line 139) | def test_keywords(self):
class TestBotMetadataParserAnchors (line 242) | class TestBotMetadataParserAnchors(TestBotMetaIndexerBase):
method runTest (line 243) | def runTest(self):
FILE: tests/unit/triagers/plugins/test_automerge.py
class HistoryWrapperMock (line 6) | class HistoryWrapperMock:
method __init__ (line 8) | def __init__(self):
class IssueWrapperMock (line 12) | class IssueWrapperMock:
method __init__ (line 19) | def __init__(self, org, repo, number):
method is_pullrequest (line 25) | def is_pullrequest(self):
method add_comment (line 28) | def add_comment(self, user, body):
method add_file (line 32) | def add_file(self, filename, content):
method wip (line 37) | def wip(self):
method files (line 41) | def files(self):
method history (line 45) | def history(self):
method submitter (line 49) | def submitter(self):
method html_url (line 53) | def html_url(self):
class MockFile (line 60) | class MockFile:
method __init__ (line 61) | def __init__(self, name, content=''):
class TestAutomergeFacts (line 66) | class TestAutomergeFacts(unittest.TestCase):
method test_automerge_if_shipit (line 68) | def test_automerge_if_shipit(self):
method test_not_automerge_if_not_shipit (line 103) | def test_not_automerge_if_not_shipit(self):
FILE: tests/unit/triagers/plugins/test_docs_info.py
function datafile_id (line 39) | def datafile_id(datafile):
function iw_fixture (line 43) | def iw_fixture(request):
function test_docs_facts (line 50) | def test_docs_facts(iw_fixture):
FILE: tests/unit/triagers/plugins/test_needs_contributor.py
function test_needs_contributor_command (line 4) | def test_needs_contributor_command():
function test_not_needs_contributor_command (line 14) | def test_not_needs_contributor_command():
function test_waiting_on_contributor_label (line 23) | def test_waiting_on_contributor_label():
FILE: tests/unit/triagers/plugins/test_needs_info.py
class TestNeedsInfoTimeoutFacts (line 11) | class TestNeedsInfoTimeoutFacts(unittest.TestCase):
method setUp (line 12) | def setUp(self):
method test_warn (line 25) | def test_warn(self):
method test_close (line 36) | def test_close(self):
method test_no_action (line 55) | def test_no_action(self):
method test_close_1 (line 68) | def test_close_1(self):
method test_too_quick_close (line 87) | def test_too_quick_close(self):
method test_too_quick_close2 (line 106) | def test_too_quick_close2(self):
method test_warn_template (line 143) | def test_warn_template(self):
FILE: tests/unit/triagers/plugins/test_needs_revision.py
class CIMock (line 12) | class CIMock:
method __init__ (line 13) | def __init__(self):
method get_last_full_run_date (line 16) | def get_last_full_run_date(*args, **kwargs):
class TestNeedsRevisionFacts (line 20) | class TestNeedsRevisionFacts(TestCase):
method setUp (line 22) | def setUp(self):
method test_shipit_overrides_changes_requested_github_review (line 32) | def test_shipit_overrides_changes_requested_github_review(self):
method test_shipit_removes_needs_revision (line 59) | def test_shipit_removes_needs_revision(self):
method test_shipit_removes_needs_revision_multiple_users (line 85) | def test_shipit_removes_needs_revision_multiple_users(self):
class TestReviewMethods (line 112) | class TestReviewMethods(TestCase):
method test_reviews (line 113) | def test_reviews(self):
method test_review_older_than_ready_for_review (line 158) | def test_review_older_than_ready_for_review(self):
method test_ready_for_review_older_than_review (line 181) | def test_ready_for_review_older_than_review(self):
method test_review_older_than_ready_for_review_PR_not_updated (line 204) | def test_review_older_than_ready_for_review_PR_not_updated(self):
method make_time (line 229) | def make_time(data):
FILE: tests/unit/triagers/plugins/test_notifications.py
function meta (line 9) | def meta():
function statusfile (line 17) | def statusfile():
function iw (line 22) | def iw(meta, statusfile):
function test_notify_authors (line 31) | def test_notify_authors(iw, meta):
FILE: tests/unit/triagers/plugins/test_rebuild.py
function test_rebuild_command (line 9) | def test_rebuild_command():
function test_rebuild_failed_command (line 28) | def test_rebuild_failed_command():
function test_rebuild_and_rebuild_failed_commands (line 47) | def test_rebuild_and_rebuild_failed_commands():
function test_rebuild_failed_and_rebuild_commands (line 66) | def test_rebuild_failed_and_rebuild_commands():
FILE: tests/unit/triagers/plugins/test_rebuild_merge.py
function test0 (line 8) | def test0():
function test1 (line 27) | def test1():
function test2 (line 46) | def test2():
function test3 (line 64) | def test3():
FILE: tests/unit/triagers/plugins/test_shipit.py
class ComponentMatcherMock (line 19) | class ComponentMatcherMock:
method match (line 24) | def match(self, issuewrapper):
class HistoryWrapperMock (line 28) | class HistoryWrapperMock:
method __init__ (line 30) | def __init__(self):
class IssueWrapperMock (line 34) | class IssueWrapperMock:
method __init__ (line 42) | def __init__(self, org, repo, number):
method is_pullrequest (line 48) | def is_pullrequest(self):
method add_comment (line 51) | def add_comment(self, user, body):
method add_file (line 55) | def add_file(self, filename, content):
method wip (line 60) | def wip(self):
method files (line 64) | def files(self):
method history (line 68) | def history(self):
method submitter (line 72) | def submitter(self):
method html_url (line 76) | def html_url(self):
class GitRepoWrapperMock (line 83) | class GitRepoWrapperMock:
method existed (line 86) | def existed(self, filename):
class MockFile (line 90) | class MockFile:
method __init__ (line 91) | def __init__(self, name, content=''):
class MockRepo (line 99) | class MockRepo:
method __init__ (line 100) | def __init__(self, repo_path):
method get_pullrequest (line 103) | def get_pullrequest(self, issueid):
class GithubWrapperMock (line 107) | class GithubWrapperMock:
method get_request (line 108) | def get_request(self, url):
class TestSuperShipit (line 112) | class TestSuperShipit(unittest.TestCase):
method test_supershipit_shipit_facts (line 114) | def test_supershipit_shipit_facts(self):
method test_supershipit_shipit_on_all_files (line 137) | def test_supershipit_shipit_on_all_files(self):
method test_supershipit_shipit_not_all_files (line 163) | def test_supershipit_shipit_not_all_files(self):
method test_maintainer_is_not_supershipit (line 188) | def test_maintainer_is_not_supershipit(self):
method test_core_is_not_supershipit (line 214) | def test_core_is_not_supershipit(self):
method test_automerge_community_only (line 239) | def test_automerge_community_only(self):
method test_supershipit_changelogs (line 274) | def test_supershipit_changelogs(self):
method test_supershipit_deletion_from_sanity_ignore (line 304) | def test_supershipit_deletion_from_sanity_ignore(self):
method test_supershipit_addition_to_sanity_ignore (line 337) | def test_supershipit_addition_to_sanity_ignore(self):
class TestShipitRebuildMerge (line 369) | class TestShipitRebuildMerge(unittest.TestCase):
method test_shipit_with_core_rebuild_merge (line 371) | def test_shipit_with_core_rebuild_merge(self):
method test_shipit_with_noncore_rebuild_merge (line 401) | def test_shipit_with_noncore_rebuild_merge(self):
class TestShipitFacts (line 432) | class TestShipitFacts(unittest.TestCase):
method setUp (line 434) | def setUp(self):
method test_submitter_is_maintainer (line 446) | def test_submitter_is_maintainer(self):
method test_submitter_is_maintainer_team_and_maintainer (line 468) | def test_submitter_is_maintainer_team_and_maintainer(self):
method needs_rebase_or_revision_prevent_shipit (line 491) | def needs_rebase_or_revision_prevent_shipit(self, meta):
method test_needs_rebase_prevent_shipit (line 505) | def test_needs_rebase_prevent_shipit(self):
method test_needs_revision_prevent_shipit (line 513) | def test_needs_revision_prevent_shipit(self):
class TestIsApproval (line 522) | class TestIsApproval(unittest.TestCase):
method test_is_approval (line 524) | def test_is_approval(self):
class TestOwnerPR (line 544) | class TestOwnerPR(unittest.TestCase):
method setUp (line 546) | def setUp(self):
method test_owner_pr_submitter_is_maintainer_one_module_utils_file_updated (line 552) | def test_owner_pr_submitter_is_maintainer_one_module_utils_file_update...
method test_owner_pr_submitter_is_maintainer_one_modules_file_updated (line 586) | def test_owner_pr_submitter_is_maintainer_one_modules_file_updated(self):
method test_owner_pr_submitter_is_maintainer_new_module (line 619) | def test_owner_pr_submitter_is_maintainer_new_module(self):
method test_owner_pr_submitter_is_not_maintainer_of_all_updated_files (line 650) | def test_owner_pr_submitter_is_not_maintainer_of_all_updated_files(self):
method test_owner_pr_module_utils_and_modules_updated_submitter_maintainer_1 (line 700) | def test_owner_pr_module_utils_and_modules_updated_submitter_maintaine...
method test_owner_pr_module_utils_and_modules_updated_submitter_maintainer_2 (line 750) | def test_owner_pr_module_utils_and_modules_updated_submitter_maintaine...
method test_owner_pr_submitter_is_maintainer_one_module_file_updated_changelog (line 795) | def test_owner_pr_submitter_is_maintainer_one_module_file_updated_chan...
class TestAutomergeFacts (line 833) | class TestAutomergeFacts(unittest.TestCase):
method test_automerge_changelog_fragment (line 835) | def test_automerge_changelog_fragment(self):
method test_automerge_deletion_from_ignore (line 869) | def test_automerge_deletion_from_ignore(self):
method test_automerge_addition_to_ignore (line 902) | def test_automerge_addition_to_ignore(self):
FILE: tests/unit/utils/test_component_tools.py
class TestMakePrefixes (line 13) | class TestMakePrefixes(TestCase):
method test_simple_path_is_split_correctly (line 15) | def test_simple_path_is_split_correctly(self):
class GitShallowRepo (line 24) | class GitShallowRepo(GitRepoWrapper):
method create_checkout (line 27) | def create_checkout(self):
method update_checkout (line 34) | def update_checkout(self):
class TestComponentMatcher (line 38) | class TestComponentMatcher(TestCase):
method setUpClass (line 41) | def setUpClass(cls):
method tearDownClass (line 50) | def tearDownClass(cls):
method test_get_meta_for_file_wildcard (line 55) | def test_get_meta_for_file_wildcard(self):
method test_get_meta_for_file_wildcard_multiple (line 69) | def test_get_meta_for_file_wildcard_multiple(self):
method test_get_meta_for_file_pyfile (line 91) | def test_get_meta_for_file_pyfile(self):
method test_get_meta_support_core_from_module (line 116) | def test_get_meta_support_core_from_module(self):
method test_get_meta_support_core_filter_plugin (line 128) | def test_get_meta_support_core_filter_plugin(self):
method test_get_meta_support_new_filter_plugin (line 144) | def test_get_meta_support_new_filter_plugin(self):
method test_get_meta_for_file_powershell (line 161) | def test_get_meta_for_file_powershell(self):
method test_reduce_filepaths (line 177) | def test_reduce_filepaths(self):
method test_search_by_filepath (line 184) | def test_search_by_filepath(self):
method test_search_by_filepath_with_context (line 303) | def test_search_by_filepath_with_context(self):
method test_search_by_regex_module_globs (line 352) | def test_search_by_regex_module_globs(self):
method test_search_by_keywords (line 383) | def test_search_by_keywords(self):
method test_search_by_regex_modules (line 397) | def test_search_by_regex_modules(self):
class TestComponentMatcherInheritance (line 482) | class TestComponentMatcherInheritance(TestCase):
method setUpClass (line 485) | def setUpClass(cls):
method tearDownClass (line 494) | def tearDownClass(cls):
method test_get_meta_for_known_file (line 498) | def test_get_meta_for_known_file(self):
method test_get_meta_for_unknown_extension (line 539) | def test_get_meta_for_unknown_extension(self):
method test_get_meta_support_inheritance (line 580) | def test_get_meta_support_inheritance(self):
FILE: tests/unit/utils/test_extractors_pr_number.py
function test_extract_pr_number_from_comment (line 22) | def test_extract_pr_number_from_comment(test_input, expected):
FILE: tests/unit/utils/test_githubid_extractor.py
class TestGitHubIdExtractor (line 6) | class TestGitHubIdExtractor(unittest.TestCase):
method test_extract (line 7) | def test_extract(self):
method test_notfound (line 25) | def test_notfound(self):
method test_extract_email (line 35) | def test_extract_email(self):
FILE: tests/unit/utils/test_sqlite_tools.py
function test_db_file_endswith_version (line 9) | def test_db_file_endswith_version():
function test_db_file_corrupted (line 22) | def test_db_file_corrupted():
function test_set_and_get_rate_limit (line 42) | def test_set_and_get_rate_limit():
FILE: tests/unit/utils/test_template_extractor.py
class TestTemplateExtraction (line 5) | class TestTemplateExtraction(unittest.TestCase):
method test_0 (line 6) | def test_0(self):
method test_1 (line 25) | def test_1(self):
method test_2 (line 48) | def test_2(self):
method test_3 (line 72) | def test_3(self):
method test_4 (line 96) | def test_4(self):
method test_5 (line 127) | def test_5(self):
FILE: tests/unit/utils/test_template_extractor_simple.py
class TestTemplateExtractionSimple (line 5) | class TestTemplateExtractionSimple(unittest.TestCase):
method test_generic_template_with_no_input_sections (line 7) | def test_generic_template_with_no_input_sections(self):
FILE: tests/unit/utils/test_time_tools.py
class TestTimeStrip (line 7) | class TestTimeStrip(TestCase):
method test_strip_one (line 9) | def test_strip_one(self):
method test_strip_two (line 16) | def test_strip_two(self):
method test_strip_three (line 23) | def test_strip_three(self):
method test_strip_four (line 30) | def test_strip_four(self):
FILE: tests/unit/wrappers/test_history_wrapper.py
function test_get_component_commands (line 9) | def test_get_component_commands():
function test_get_no_component_commands (line 31) | def test_get_no_component_commands():
function test_ignore_events_without_dates_on_last_methods (line 54) | def test_ignore_events_without_dates_on_last_methods():
FILE: tests/utils/componentmocks.py
function get_timestamp (line 118) | def get_timestamp():
function get_custom_timestamp (line 122) | def get_custom_timestamp(months=-1, days=-1):
function unquote (line 130) | def unquote(string):
class IssueDatabase (line 145) | class IssueDatabase:
method __init__ (line 155) | def __init__(self, cachedir):
method load_cache (line 165) | def load_cache(self):
method save_cache (line 178) | def save_cache(self):
method get_url (line 189) | def get_url(self, url, method=None, headers=None, data=None):
method merge_pull (line 442) | def merge_pull(self, org=None, repo=None, number=None, data=None, logi...
method get_pull_statuses (line 474) | def get_pull_statuses(self, org, repo, sid):
method shippable_response (line 494) | def shippable_response(self, url):
method graphql_response (line 501) | def graphql_response(self, data):
method _get_members (line 606) | def _get_members(self):
method _get_teams (line 651) | def _get_teams(self):
method _get_repo (line 676) | def _get_repo(self, repo):
method _get_new_issue_id (line 697) | def _get_new_issue_id(self):
method _get_new_event_id (line 704) | def _get_new_event_id(self):
method _get_issue_index (line 712) | def _get_issue_index(self, org=None, repo=None, number=None, itype=None):
method get_issue (line 728) | def get_issue(self, org=None, repo=None, number=None, itype=None):
method get_issue_property (line 735) | def get_issue_property(self, property_name, org=None, repo=None, numbe...
method get_raw_data (line 741) | def get_raw_data(self, issue, schema='issue'):
method get_comment (line 838) | def get_comment(self, org=None, repo=None, commentid=None):
method get_commits (line 849) | def get_commits(self, org=None, repo=None, number=None):
method get_commit (line 854) | def get_commit(self, org=None, repo=None, chash=None):
method get_git_commit (line 871) | def get_git_commit(self, org=None, repo=None, chash=None):
method get_files (line 890) | def get_files(self, org=None, repo=None, number=None):
method get_file_conent (line 895) | def get_file_conent(self, org=None, repo=None, filename=None):
method set_issue_body (line 914) | def set_issue_body(self, body, org=None, repo=None, number=None):
method set_issue_title (line 919) | def set_issue_title(self, title, org=None, repo=None, number=None):
method add_reaction (line 924) | def add_reaction(self, reaction, login=None, created_at=None, org=None...
method add_issue_label (line 940) | def add_issue_label(self, label, login=None, created_at=None, org=None...
method add_issue_comment (line 971) | def add_issue_comment(self, comment, login=None, created_at=None, org=...
method remove_issue_label (line 1016) | def remove_issue_label(self, label, login=None, created_at=None, org=N...
method add_cross_reference (line 1046) | def add_cross_reference(self, login=None, created_at=None, org=None, r...
method add_issue_file (line 1075) | def add_issue_file(
method _get_empty_stub (line 1115) | def _get_empty_stub(self):
method add_issue (line 1137) | def add_issue(
class MockRequests (line 1285) | class MockRequests:
method __init__ (line 1287) | def __init__(self, issuedb):
method get (line 1290) | def get(self, url, headers=None, data=None):
method post (line 1293) | def post(self, url, headers=None, data=None):
method Session (line 1296) | def Session(self):
class MockRequestsSession (line 1301) | class MockRequestsSession:
method __init__ (line 1302) | def __init__(self, issuedb):
method get (line 1305) | def get(self, url, allow_redirects=False, data=None, headers=None, tim...
method post (line 1308) | def post(self, url, allow_redirects=False, data=None, headers=None, ti...
method delete (line 1311) | def delete(self, url, allow_redirects=False, data=None, headers=None, ...
method put (line 1314) | def put(self, url, allow_redirects=False, data=None, headers=None, tim...
class MockRequestsResponse (line 1319) | class MockRequestsResponse:
method __init__ (line 1321) | def __init__(self, url, inheaders=None, indata=None, method='GET', iss...
method ok (line 1337) | def ok(self):
method text (line 1341) | def text(self):
method headers (line 1350) | def headers(self):
method status_code (line 1354) | def status_code(self):
method json (line 1357) | def json(self):
class BotMockManager (line 1361) | class BotMockManager:
method __init__ (line 1369) | def __init__(self):
method __enter__ (line 1375) | def __enter__(self):
method __exit__ (line 1422) | def __exit__(self, type, value, traceback):
FILE: tests/utils/helpers.py
function get_issue (line 11) | def get_issue(datafile, statusfile):
FILE: tests/utils/issue_mock.py
class ActorMock (line 5) | class ActorMock:
class CommitterMock (line 10) | class CommitterMock:
method __init__ (line 11) | def __init__(self, date=None, login=None):
class CommitBottomMock (line 16) | class CommitBottomMock:
method __init__ (line 17) | def __init__(self, committer_date=None, committer_login=None, message=...
class CommitMock (line 22) | class CommitMock:
method __init__ (line 23) | def __init__(self, **kwargs):
class CommitFileMock (line 30) | class CommitFileMock:
method __init__ (line 31) | def __init__(self, filename="", status="", patch="", src_filepath=""):
method raw_data (line 38) | def raw_data(self):
method file_content (line 42) | def file_content(self):
class LabelMock (line 47) | class LabelMock:
class RequesterMock (line 51) | class RequesterMock:
method requestJson (line 54) | def requestJson(self, method, url, headers=None):
class IssueMock (line 58) | class IssueMock:
method __init__ (line 61) | def __init__(self, datafile):
method commits (line 67) | def commits(self):
method comments (line 91) | def comments(self):
method labels (line 95) | def labels(self):
method load_data (line 113) | def load_data(self, datafile):
method add_to_labels (line 155) | def add_to_labels(self, *labels):
method create_comment (line 158) | def create_comment(self, body):
method edit (line 161) | def edit(self, title=None, body=None, assignee=None, state=None, miles...
method get_events (line 164) | def get_events(self):
method get_labels (line 168) | def get_labels(self):
method get_pullrequest (line 171) | def get_pullrequest(self):
method remove_from_labels (line 174) | def remove_from_labels(self, label):
method set_labels (line 177) | def set_labels(self, *labels):
method is_pullrequest (line 180) | def is_pullrequest(self):
method pr_files (line 184) | def pr_files(self):
FILE: tests/utils/repo_mock.py
class SubRepo (line 4) | class SubRepo:
method __init__ (line 5) | def __init__(self, assignees=None):
method has_in_assignees (line 8) | def has_in_assignees(self, user):
class RepoMock (line 12) | class RepoMock:
method __init__ (line 15) | def __init__(self, assignees=None):
method get_issue (line 18) | def get_issue(self, issueid):
method get_pullrequest (line 21) | def get_pullrequest(self, issueid):
method get_issues (line 24) | def get_issues(self):
method has_in_assignees (line 27) | def has_in_assignees(self, login):
FILE: triage_ansible.py
function handle_exception (line 27) | def handle_exception(exc_type, exc_value, exc_traceback):
function main (line 37) | def main():
FILE: triage_ansible_mp.py
function run_triage_worker (line 33) | def run_triage_worker(numbers):
function grouper (line 54) | def grouper(n, iterable, padvalue=None):
function main (line 58) | def main():
Condensed preview — 199 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (8,009K chars).
[
{
"path": ".coveragerc",
"chars": 21,
"preview": "[run]\nomit = tests/*\n"
},
{
"path": ".github/.codecov.yml",
"chars": 697,
"preview": "coverage:\n precision: 2\n round: nearest\n range: \"40..100\"\n status:\n # Only consider coverage of the code snippet "
},
{
"path": ".github/CODE_OF_CONDUCT.md",
"chars": 162,
"preview": "# Community Code of Conduct\n Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/ansibl"
},
{
"path": ".gitignore",
"chars": 6483,
"preview": "# Created by https://www.gitignore.io/api/git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs\n"
},
{
"path": ".gitmodules",
"chars": 1424,
"preview": "[submodule \"playbooks/roles/ansibullbot\"]\n\tpath = playbooks/roles/ansibullbot\n\turl = https://github.com/mkrizek/ansible-"
},
{
"path": "CONTRIBUTING.md",
"chars": 2920,
"preview": "# Ansibullbot Contributor's Guide\n\n## Python compatibility\n\nAnsibullbot is compatible with Python 3.8+.\n\n## Getting star"
},
{
"path": "ISSUE_HELP.md",
"chars": 21736,
"preview": "# Ansibullbot Help\n\nMaking progress in resolving issues for modules depends upon your interaction! Please be sure to res"
},
{
"path": "LICENSE",
"chars": 35149,
"preview": " GNU GENERAL PUBLIC LICENSE\n Version 3, 29 June 2007\n\n Copyright (C) 2007 Free "
},
{
"path": "README.md",
"chars": 3625,
"preview": "[](htt"
},
{
"path": "Vagrantfile",
"chars": 2358,
"preview": "# -*- mode: ruby -*-\n# vi: set ft=ruby :\n\n$script = <<SCRIPT\n# AUTHENTICATION\necho 'root:vagrant' | chpasswd\necho 'vagra"
},
{
"path": "ansible.cfg",
"chars": 14138,
"preview": "# config file for ansible -- http://ansible.com/\n# ==============================================\n\n# nearly all paramete"
},
{
"path": "ansibullbot/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "ansibullbot/_text_compat.py",
"chars": 11614,
"preview": "# This code is part of Ansible, but is an independent component.\n# This particular file snippet, and this file snippet o"
},
{
"path": "ansibullbot/ansibletriager.py",
"chars": 60317,
"preview": "# Key features:\n# * daemonize mode that can continuously loop and process w/out scripts\n# * maintainers can be assig"
},
{
"path": "ansibullbot/ci/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "ansibullbot/ci/azp.py",
"chars": 14199,
"preview": "import hashlib\nimport logging\nimport json\nimport os.path\nimport pickle\nimport re\n\nfrom datetime import timezone\nfrom io "
},
{
"path": "ansibullbot/ci/base.py",
"chars": 1645,
"preview": "from abc import ABCMeta, abstractmethod\n\n\nclass BaseCI(metaclass=ABCMeta):\n \"\"\"An interface that each CI provider tha"
},
{
"path": "ansibullbot/constants.py",
"chars": 11848,
"preview": "# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms "
},
{
"path": "ansibullbot/defaulttriager.py",
"chars": 20692,
"preview": "# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms "
},
{
"path": "ansibullbot/exceptions.py",
"chars": 238,
"preview": "class LabelWafflingError(Exception):\n \"\"\"Label has been added/removed too many times\"\"\"\n\n\nclass RateLimitError(Except"
},
{
"path": "ansibullbot/ghapiwrapper.py",
"chars": 6883,
"preview": "import logging\nimport os\nimport pickle\nimport shutil\nfrom datetime import datetime\n\nimport requests\nfrom github import G"
},
{
"path": "ansibullbot/historywrapper.py",
"chars": 16221,
"preview": "import datetime\nimport logging\nimport os\nimport pickle\nfrom collections.abc import Sequence\nfrom operator import itemget"
},
{
"path": "ansibullbot/issuewrapper.py",
"chars": 20316,
"preview": "# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms "
},
{
"path": "ansibullbot/plugins/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "ansibullbot/plugins/backports.py",
"chars": 399,
"preview": "def get_backport_facts(issuewrapper):\n # https://github.com/ansible/ansibullbot/issues/367\n iw = issuewrapper\n\n "
},
{
"path": "ansibullbot/plugins/botstatus.py",
"chars": 904,
"preview": "def get_bot_status_facts(issuewrapper, all_maintainers, maintainer_team=None, bot_names=None):\n if bot_names is None:"
},
{
"path": "ansibullbot/plugins/ci_rebuild.py",
"chars": 4681,
"preview": "import datetime\n\n\ndef get_ci_facts(iw, ci):\n cifacts = {\n 'ci_run_number': None\n }\n\n if not iw.is_pullre"
},
{
"path": "ansibullbot/plugins/collection_facts.py",
"chars": 3586,
"preview": "import copy\n\n\ndef get_collection_facts(iw, component_matcher, meta):\n # Skip redirection of backports or <2.10 issues"
},
{
"path": "ansibullbot/plugins/community_workgroups.py",
"chars": 1321,
"preview": "def get_community_workgroup_facts(issuewrapper, meta):\n # https://github.com/ansible/ansibullbot/issues/820\n # htt"
},
{
"path": "ansibullbot/plugins/component_matching.py",
"chars": 9841,
"preview": "import logging\nimport re\n\n\ndef get_component_match_facts(iw, component_matcher, valid_labels):\n '''High level abstrac"
},
{
"path": "ansibullbot/plugins/contributors.py",
"chars": 660,
"preview": "import logging\n\n\ndef get_contributor_facts(issuewrapper):\n\n # https://github.com/blog/2397-making-it-easier-to-grow-c"
},
{
"path": "ansibullbot/plugins/cross_references.py",
"chars": 896,
"preview": "def get_cross_reference_facts(issuewrapper):\n\n iw = issuewrapper\n\n crfacts = {\n 'has_pr': False,\n 'h"
},
{
"path": "ansibullbot/plugins/deprecation.py",
"chars": 709,
"preview": "import os\n\n\ndef get_deprecation_facts(meta):\n # https://github.com/ansible/ansibullbot/issues/29\n\n deprecated = Fa"
},
{
"path": "ansibullbot/plugins/docs_info.py",
"chars": 9852,
"preview": "import ast\nimport dataclasses\nimport logging\nimport re\n\nimport requests\n\nDOCS_PATH_PATTERNS = [\n \"docs/\",\n \"exampl"
},
{
"path": "ansibullbot/plugins/filament.py",
"chars": 420,
"preview": "def get_filament_facts(issuewrapper, meta):\n # https://github.com/ansible/ansible/pull/26921\n\n iw = issuewrapper\n "
},
{
"path": "ansibullbot/plugins/label_commands.py",
"chars": 3002,
"preview": "def get_label_command_facts(iw, all_maintainers, maintainer_team=None, valid_labels=None):\n if valid_labels is None:\n"
},
{
"path": "ansibullbot/plugins/needs_contributor.py",
"chars": 1031,
"preview": "def get_needs_contributor_facts(events, botnames=None):\n if botnames is None:\n botnames = []\n needs_contrib"
},
{
"path": "ansibullbot/plugins/needs_info.py",
"chars": 4886,
"preview": "import datetime\nimport logging\n\nimport ansibullbot.constants as C\n\n\ndef is_needsinfo(iw, botnames=None):\n if botnames"
},
{
"path": "ansibullbot/plugins/needs_revision.py",
"chars": 17735,
"preview": "import datetime\nimport logging\n\nfrom ansibullbot.exceptions import NoCIError\nfrom ansibullbot.plugins.shipit import is_a"
},
{
"path": "ansibullbot/plugins/notifications.py",
"chars": 1904,
"preview": "import logging\n\n\ndef get_notification_facts(issuewrapper, meta, botmeta=None):\n '''Build facts about mentions/pings''"
},
{
"path": "ansibullbot/plugins/shipit.py",
"chars": 16177,
"preview": "import itertools\nimport logging\nfrom fnmatch import fnmatch\n\n\ndef replace_ansible(maintainers, ansible_members, bots=Non"
},
{
"path": "ansibullbot/plugins/small_patch.py",
"chars": 1735,
"preview": "import re\n\n\nFILE_MAX_CHANGED_LINES = 6\nSMALL_CHUNKS_MAX_COUNT = 2\n\nRE_CHUNK = r'@@ -\\d+,\\d+ \\+\\d+,\\d+ @@'\n\n\nclass Commit"
},
{
"path": "ansibullbot/plugins/spam.py",
"chars": 1180,
"preview": "import ansibullbot.constants as C\n\n\ndef get_spam_facts(issuewrapper):\n\n iw = issuewrapper\n\n sfacts = {\n 'sp"
},
{
"path": "ansibullbot/plugins/test_support_plugins.py",
"chars": 1277,
"preview": "import os.path\n\n\n# https://github.com/ansible/ansible/pull/46028\n# https://github.com/ansible/ansible/pull/68449\n# https"
},
{
"path": "ansibullbot/plugins/traceback.py",
"chars": 470,
"preview": "import re\n\n\nRE_FILE_LINE = r'file \"(.*)\", line \\d+, in'\n\n\ndef get_traceback_facts(iw):\n tfacts = {\n 'has_trace"
},
{
"path": "ansibullbot/utils/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "ansibullbot/utils/botmetadata.py",
"chars": 7621,
"preview": "import logging\nimport os\n\nfrom string import Template\n\nimport yaml\n\nfrom ansibullbot._text_compat import to_text\n\n\n\n# ht"
},
{
"path": "ansibullbot/utils/component_tools.py",
"chars": 63411,
"preview": "import copy\nimport difflib\nimport json\nimport logging\nimport os\nimport re\n\nfrom collections import OrderedDict\n\nfrom ans"
},
{
"path": "ansibullbot/utils/extractors.py",
"chars": 22397,
"preview": "import logging\nimport operator\nimport re\nfrom string import Template\n\nimport yaml\n\nimport ansibullbot.constants as C\nfro"
},
{
"path": "ansibullbot/utils/feature_flags.py",
"chars": 522,
"preview": "import yaml\n\n\n__metadata__ = type\n\n\nclass FeatureFlags:\n def __init__(self, config_obj):\n self._flags = config"
},
{
"path": "ansibullbot/utils/galaxy.py",
"chars": 7551,
"preview": "import datetime\nimport json\nimport logging\nimport os\n\nimport requests\n\nfrom ansibullbot.utils.timetools import strip_tim"
},
{
"path": "ansibullbot/utils/gh_gql_client.py",
"chars": 12488,
"preview": "import json\nimport logging\nimport time\n\nfrom collections import defaultdict\nfrom operator import itemgetter\nfrom string "
},
{
"path": "ansibullbot/utils/git_tools.py",
"chars": 10599,
"preview": "import logging\nimport os\nimport shutil\nimport tarfile\nimport tempfile\n\nimport requests\n\nfrom ansibullbot._text_compat im"
},
{
"path": "ansibullbot/utils/github.py",
"chars": 8085,
"preview": "# https://github.com/octokit/octokit.net/issues/638#issuecomment-67795998\n\n# FIXME\n# - [Errno -5] No address associate"
},
{
"path": "ansibullbot/utils/logs.py",
"chars": 638,
"preview": "import logging\nimport logging.handlers\n\n\ndef set_logger(debug=False, logfile=None):\n root_logger = logging.getLogger("
},
{
"path": "ansibullbot/utils/moduletools.py",
"chars": 19601,
"preview": "import copy\nimport logging\nimport os\nimport pickle\n\nfrom sqlalchemy import create_engine\nfrom sqlalchemy import Column\nf"
},
{
"path": "ansibullbot/utils/net_tools.py",
"chars": 807,
"preview": "import logging\nimport time\n\nimport requests\n\n\n# FIXME should we only retry 5xx?\n_DONT_RETRY_STATUSES = [\n 200, # OK\n"
},
{
"path": "ansibullbot/utils/receiver_client.py",
"chars": 3264,
"preview": "import logging\n\nimport requests\n\nimport ansibullbot.constants as C\n\n\ndef post_to_receiver(path, params, data):\n if no"
},
{
"path": "ansibullbot/utils/sentry.py",
"chars": 352,
"preview": "from .. import constants\nimport sentry_sdk\n\n\ndef initialize_sentry():\n sentry_sdk.init(\n dsn=constants.DEFAULT"
},
{
"path": "ansibullbot/utils/sqlite_utils.py",
"chars": 7684,
"preview": "import json\nimport logging\nimport os\n\nfrom sqlalchemy import create_engine\nfrom sqlalchemy import Column\nfrom sqlalchemy"
},
{
"path": "ansibullbot/utils/systemtools.py",
"chars": 434,
"preview": "import copy\nimport os\nimport subprocess\n\n\ndef run_command(cmd, cwd=None, env=None):\n if env:\n _env = copy.deep"
},
{
"path": "ansibullbot/utils/timetools.py",
"chars": 1039,
"preview": "import datetime\nimport logging\n\nfrom ansibullbot._text_compat import to_text\n\n\ndef strip_time_safely(tstring):\n \"\"\"Tr"
},
{
"path": "ansibullbot/utils/version_tools.py",
"chars": 11472,
"preview": "import os\nimport re\nimport typing as t\n\nfrom ansibullbot._text_compat import to_text\nfrom ansibullbot.utils.systemtools "
},
{
"path": "azure-pipelines.yml",
"chars": 1530,
"preview": "trigger:\n - devel\n\npr:\n - devel\n\npool:\n vmImage: 'ubuntu-20.04'\n\njobs:\n - job: python\n displayName: Python\n va"
},
{
"path": "ci_output/codecoverage/.gitdir",
"chars": 0,
"preview": ""
},
{
"path": "ci_output/testresults/.gitdir",
"chars": 0,
"preview": ""
},
{
"path": "constraints.txt",
"chars": 1443,
"preview": "#\n# This file is autogenerated by pip-compile with python 3.11\n# To update, run:\n#\n# pip-compile --output-file=constr"
},
{
"path": "docs/collection_migration.md",
"chars": 6118,
"preview": "# COLLECTIONS\n\n## What is this all about?\n\nHere are some links to read through and familiarize yourself with ...\n\n* [Blo"
},
{
"path": "docs/contribution_tips.md",
"chars": 2702,
"preview": "# Contribution Tips\n\n## Who is this document for?\n\nAnyone who wants to write a patch for the bot\n\n## Development Setup\n\n"
},
{
"path": "features.yaml",
"chars": 29,
"preview": "close_missing_ref_prs: false\n"
},
{
"path": "playbooks/bot-on-dev.yml",
"chars": 504,
"preview": "- name: Stop prod bot\n hosts: ansibullbot.eng.ansible.com\n become: yes\n\n tasks:\n - name: PROD | Stop ansibullbot s"
},
{
"path": "playbooks/bot-on-prod.yml",
"chars": 505,
"preview": "- name: Stop dev bot\n hosts: ansibullbot-dev\n become: yes\n\n tasks:\n - name: DEV | Stop ansibullbot services\n "
},
{
"path": "playbooks/files/centos7.vimrc",
"chars": 2118,
"preview": "if v:lang =~ \"utf8$\" || v:lang =~ \"UTF-8$\"\n set fileencodings=ucs-bom,utf-8,latin1\nendif\n\nset nocompatible\t\" Use Vim d"
},
{
"path": "playbooks/group_vars/all.yml",
"chars": 1947,
"preview": "yumcron_download_updates: 'yes'\nyumcron_apply_updates:\n hourly: 'no'\n daily: 'yes'\n\n# Settings for EC2 instance\nbotins"
},
{
"path": "playbooks/group_vars/ansibullbot.yml",
"chars": 6002,
"preview": "ansible_user: centos\nansibullbot_receiver_enabled: true\n\nansibullbot_github_password: !vault |\n $ANSIBLE_VAULT;"
},
{
"path": "playbooks/group_vars/tower.yml",
"chars": 1020,
"preview": "firewall_allowed_tcp_ports:\n - 22\n - 443\n - 80\n\nauthorized_keys:\n - user: centos\n comment: mkrizek@x240\n key:\n"
},
{
"path": "playbooks/host_vars/ansibullbot-dev.eng.ansible.com.yml",
"chars": 2915,
"preview": "ansibullbot_fqdn: ansibullbot-dev.eng.ansible.com\nansibullbot_sentry_server_name: ansibullbot-dev\nansibullbot_sentry_env"
},
{
"path": "playbooks/host_vars/ansibullbot.eng.ansible.com.yml",
"chars": 2617,
"preview": "ansibullbot_sentry_server_name: ansibullbot\nansibullbot_fqdn: ansibullbot.eng.ansible.com\nansibullbot_ci_provider: azp\na"
},
{
"path": "playbooks/hosts.yml",
"chars": 168,
"preview": "ansibullbot:\n hosts:\n ansibullbot.eng.ansible.com:\n ansibullbot-dev.eng.ansible.com:\n\ntower:\n hosts:\n tower2."
},
{
"path": "playbooks/requirements.yml",
"chars": 189,
"preview": "collections:\n - name: amazon.aws\n version: 1.5.0\n\n - name: ansible.posix\n version: 1.3.0\n\n - name: community.aw"
},
{
"path": "playbooks/setup-ansibullbot-dev.yml",
"chars": 557,
"preview": "- name: Create dev ansibullbot instance\n hosts: localhost\n connection: local\n gather_facts: no\n become: no\n\n vars:\n"
},
{
"path": "playbooks/setup-ansibullbot.yml",
"chars": 361,
"preview": "- name: Create ansibullbot instance\n hosts: localhost\n connection: local\n gather_facts: no\n become: no\n\n roles:\n "
},
{
"path": "playbooks/teardown-ansibullbot-dev.yml",
"chars": 421,
"preview": "- name: Teardown dev ansibullbot instance\n hosts: localhost\n connection: local\n gather_facts: no\n become: no\n\n vars"
},
{
"path": "playbooks/tower.yml",
"chars": 230,
"preview": "- hosts: tower\n become: yes\n\n tasks:\n - name: Set hostname\n hostname:\n name: \"{{ inventory_hostname }}\""
},
{
"path": "playbooks/update-ansibullbot.yml",
"chars": 129,
"preview": "- name: Update Ansibullbot\n hosts: ansibullbot\n become: yes\n\n roles:\n - role: ansibullbot\n ansibullbot_action"
},
{
"path": "playbooks/vagrant.yml",
"chars": 207,
"preview": "- name: Install ansibullbot\n hosts: ansibullbot.eng.ansible.com\n become: yes\n\n roles:\n - repo_epel\n - yum_cron\n"
},
{
"path": "pytest.ini",
"chars": 2061,
"preview": "[pytest]\naddopts =\n # `pytest-xdist` == -n auto:\n #--numprocesses=auto # FIXME: uncomment once available\n\n # s"
},
{
"path": "requirements.txt",
"chars": 74,
"preview": "-c constraints.txt\n\njinja2\npygithub\nPyYAML\nrequests\nsentry-sdk\nsqlalchemy\n"
},
{
"path": "scripts/ansibot_actions.py",
"chars": 2938,
"preview": "#!/usr/bin/env python\n\nimport datetime\nimport pprint\n\nimport requests\n\nfrom bson.json_util import loads\n\n\nVALID_ACTIONS "
},
{
"path": "scripts/ansibot_receiver.py",
"chars": 13702,
"preview": "#!/usr/bin/env python\n\n# $ curl -v -X POST --header \"Content-Type: application/json\" -d@summaries.json 'http://localhost"
},
{
"path": "scripts/ansibot_status.cgi",
"chars": 4989,
"preview": "#!/usr/bin/env python3\n# Ansible managed. Any local changes will be overwritten.\n\nimport glob\nimport pwd\nimport subproce"
},
{
"path": "scripts/generate_issues_by_file.py",
"chars": 4376,
"preview": "#!/usr/bin/env python\n\nimport json\nimport os\nimport sys\n\nfrom ansibullbot._text_compat import to_bytes\nfrom ansibullbot."
},
{
"path": "scripts/slack-notice.py",
"chars": 1330,
"preview": "#!/usr/bin/env python\n# Post message to Slack\n\nimport argparse\nimport os\nimport requests\n\ntry:\n import configparser\ne"
},
{
"path": "setup.py",
"chars": 478,
"preview": "# -*- coding: utf-8 -*-\n\nfrom setuptools import setup, find_packages\n\n\nwith open('README.md') as f:\n readme = f.read("
},
{
"path": "templates/bad_pr.j2",
"chars": 413,
"preview": "@{{ submitter }} This PR was evaluated as a potentially problematic PR for the following reasons:\n{% for reason in is_ba"
},
{
"path": "templates/bot_status.j2",
"chars": 1989,
"preview": "<details><summary><b>Components</b></summary>\n\n{% for component in component_matches %}\n{{ '[' + component['repo_filenam"
},
{
"path": "templates/collection_migration.j2",
"chars": 521,
"preview": "Thank you very much for your interest in Ansible. Ansible has migrated much of the content into separate repositories to"
},
{
"path": "templates/commit_msg_mentions.j2",
"chars": 363,
"preview": "@{{ submitter }} This PR contains `@` mentions in at least one commit message. Those mentions can cause cascading notifi"
},
{
"path": "templates/community_shipit_notify.j2",
"chars": 669,
"preview": "{{ '@' + (community_usernames | reject(\"equalto\", submitter) | join(' @')) }}\n\n\n{% if is_new_module %}\nAs a maintainer o"
},
{
"path": "templates/community_workgroups.j2",
"chars": 440,
"preview": "@{{ submitter }}, just so you are aware we have a dedicated Working Group for {{ wg['workgroup'] }}.\nYou can find other "
},
{
"path": "templates/components_banner.j2",
"chars": 770,
"preview": "Files identified in the description:\n{% if meta['component_matches'] %}\n{% for x in meta['component_matches'] %}\n{% if x"
},
{
"path": "templates/docs_team_info.j2",
"chars": 687,
"preview": "Thanks for your Ansible docs contribution! We talk about Ansible documentation on matrix at [#docs:ansible.im](https://m"
},
{
"path": "templates/fork.j2",
"chars": 98,
"preview": "@{{ submitter }} Please create a new PR using a branch in your fork.\n<!--- boilerplate: fork --->\n"
},
{
"path": "templates/incoming_ref_missing.j2",
"chars": 1793,
"preview": "Hello,\nwe're closing this pull request because the original PR-bound branch\nwent missing. (This PR's fork repository or "
},
{
"path": "templates/issue_missing_data.j2",
"chars": 798,
"preview": "@{{ submitter }}: Greetings! Thanks for taking the time to open this {{ itype }}. In order for the community to handle y"
},
{
"path": "templates/merge_commit_notify.j2",
"chars": 390,
"preview": "@{{ submitter }} this PR contains the following merge commits:\n\n{% for commit in merge_commits %}\n* {{ commit }}\n{% endf"
},
{
"path": "templates/multiple_module_notify.j2",
"chars": 412,
"preview": "@{{ submitter }} this PR contains more than one new module.\n\nPlease submit only one new module per pull request. For a d"
},
{
"path": "templates/needs_info_base.j2",
"chars": 787,
"preview": "{% if action == 'warn' %}\n{% if template_missing or template_missing_sections %}\n@{{ submitter }} This {{ itype }} is wa"
},
{
"path": "templates/notify.j2",
"chars": 220,
"preview": "{% if notify|length > 1 %}\ncc @{{ notify|join(' @') }}\n{% else %}\ncc @{{ notify[0] }}\n{% endif %}\n[click here for bot he"
},
{
"path": "templates/shippable_test_result.j2",
"chars": 484,
"preview": "{% for x in data %}\n{% for fd in x['contents']['failureDetails'] %}\n{{ fd['message'] }}\n```\n{{ fd['full'] }}\n```\n{% endf"
},
{
"path": "templates/test_support_plugins.j2",
"chars": 993,
"preview": "@{{ submitter }} The following file(s) in this pull request are bundled copies of modules used to support incidental tes"
},
{
"path": "templates/waiting_on_contributor_close.j2",
"chars": 1200,
"preview": "Thank you very much for your submission to Ansible. It means a lot to us that you've taken time to contribute.\n\nUnfortun"
},
{
"path": "test-requirements.txt",
"chars": 52,
"preview": "-r requirements.txt\n\npytest\npytest-cov\npytest-sugar\n"
},
{
"path": "tests/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/bin/ansibot-test",
"chars": 10293,
"preview": "#!/usr/bin/env python\n\n# docker build -t jctanner/githubsim -f github_sim_container/Dockerfile .\n# docker run -v $(pwd):"
},
{
"path": "tests/component/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/component/module_matching.py",
"chars": 1732,
"preview": "import json\nimport unittest\n\nfrom ansibullbot._text_compat import to_text\nfrom ansibullbot.ansibletriager import Ansible"
},
{
"path": "tests/component/test_idempotence.py",
"chars": 3310,
"preview": "import glob\nimport json\nimport logging\nimport os\n\nimport pytest\n\nfrom tests.utils.componentmocks import BotMockManager\n\n"
},
{
"path": "tests/component/test_supershipit.py",
"chars": 3939,
"preview": "import glob\nimport json\nimport os\n\nimport pytest\nimport yaml\n\nfrom tests.utils.componentmocks import BotMockManager\nfrom"
},
{
"path": "tests/fixtures/component_data/component_errors.json",
"chars": 55160,
"preview": "[\n {\n \"component\": \"dellos*_* network modules\", \n \"component_raw\": \"dellos*_* network modules\", \n \"expected\": "
},
{
"path": "tests/fixtures/component_data/component_expected_results.json",
"chars": 327841,
"preview": "{\n \"https://github.com/ansible/ansible/issues/10282\": [\n \"contrib/inventory\", \n \"lib/ansible/inventory\"\n ], \n \""
},
{
"path": "tests/fixtures/component_data/component_match_map.json",
"chars": 1686250,
"preview": "{\n \"\\\"/tmp/ansible_MuXmOF/ansible_module_sysctl.py\": [], \n \"'Realpath' filter.\": [\n \"lib/ansible/plugins/filter\"\n "
},
{
"path": "tests/fixtures/component_data/component_skip.json",
"chars": 5671,
"preview": "[\n \"https://github.com/ansible/ansible/issues/13406\", \n \"https://github.com/ansible/ansible/issues/17950\", \n \"https:/"
},
{
"path": "tests/fixtures/docs_info/0_issue.yml",
"chars": 551,
"preview": "html_url: https://github.com/ansible/ansible/pull/21313\nnumber: 21313\ngithub_repo: ansible\nsubmitter: sommersoft\ncreated"
},
{
"path": "tests/fixtures/docs_info/1_issue.yml",
"chars": 638,
"preview": "html_url: https://github.com/ansible/ansible/pull/21313\nnumber: 21313\ngithub_repo: ansible\nsubmitter: sommersoft\ncreated"
},
{
"path": "tests/fixtures/docs_info/2_issue.yml",
"chars": 675,
"preview": "html_url: https://github.com/ansible/ansible/pull/21313\nnumber: 21313\ngithub_repo: ansible\nsubmitter: sommersoft\ncreated"
},
{
"path": "tests/fixtures/docs_info/3_issue.yml",
"chars": 529,
"preview": "html_url: https://github.com/ansible/ansible/pull/21313\nnumber: 21313\ngithub_repo: ansible\nsubmitter: sommersoft\ncreated"
},
{
"path": "tests/fixtures/docs_info/4_issue.yml",
"chars": 718,
"preview": "html_url: https://github.com/ansible/ansible/pull/21313\nnumber: 21313\ngithub_repo: ansible\nsubmitter: sommersoft\ncreated"
},
{
"path": "tests/fixtures/docs_info/5_issue.yml",
"chars": 1449,
"preview": "html_url: https://github.com/ansible/ansible/pull/21313\nnumber: 21313\ngithub_repo: ansible\nsubmitter: sommersoft\ncreated"
},
{
"path": "tests/fixtures/docs_info/files/docsite_index.rst",
"chars": 21,
"preview": ".. toctree::\n api\n"
},
{
"path": "tests/fixtures/docs_info/files/hacks_fail_me.txt",
"chars": 12,
"preview": "please fail\n"
},
{
"path": "tests/fixtures/docs_info/files/lib_ansible_foo.py",
"chars": 279,
"preview": "DOCUMENTATION = '''\nThis is the documentation.\nEnjoy!\n'''\n\nEXAMPLES = '''\n- example1:\n foo: bar\n- example2:\n foo: "
},
{
"path": "tests/fixtures/issue_template_meta.json",
"chars": 4505307,
"preview": "{\n \"10445\": {\n \"ansible_version\": \"1.8.4\", \n \"body\": \"##### ISSUE TYPE\\r\\n- Bug Report\\r\\n\\r\\n##### COMPONENT NAM"
},
{
"path": "tests/fixtures/needs_contributor/0_issue.yml",
"chars": 518,
"preview": "html_url: https://github.com/ansible/ansible/pull/21313\nnumber: 21313\ngithub_repo: ansible\nsubmitter: mkrizek\ncreated_at"
},
{
"path": "tests/fixtures/needs_revision/0_issue.yml",
"chars": 1037,
"preview": "# https://api.github.com/repos/ansible/ansible/pull/21313/events\nhtml_url: https://github.com/ansible/ansible/pull/21313"
},
{
"path": "tests/fixtures/needs_revision/0_prstatus.json",
"chars": 4303,
"preview": "[\n {\n \"url\": \"https://api.github.com/repos/ansible/ansible/statuses/c19a95a315680a3f68b157054aa54c0ecc31ee28\",\n \""
},
{
"path": "tests/fixtures/needs_revision/0_reviews.json",
"chars": 6956,
"preview": "[\n {\n \"id\": 21412980,\n \"user\": {\n \"login\": \"robinro\",\n \"id\": 107032,\n \"avatar_url\": \"https://avata"
},
{
"path": "tests/fixtures/needs_revision/1_issue.yml",
"chars": 1025,
"preview": "# https://api.github.com/repos/ansible/ansible/pull/21313/events\nhtml_url: https://github.com/ansible/ansible/pull/21313"
},
{
"path": "tests/fixtures/needs_revision/1_reviews.json",
"chars": 4937,
"preview": "[\n {\n \"id\": 21412980,\n \"user\": {\n \"login\": \"robinro\",\n \"id\": 107032,\n \"avatar_url\": \"https://avata"
},
{
"path": "tests/fixtures/needs_revision/2_issue.yml",
"chars": 1163,
"preview": "# https://api.github.com/repos/ansible/ansible/pull/21313/events\nhtml_url: https://github.com/ansible/ansible/pull/21313"
},
{
"path": "tests/fixtures/rebuild/0_issue.yml",
"chars": 546,
"preview": "# https://api.github.com/repos/ansible/ansible/issues/68152/events\nhtml_url: https://github.com/ansible/ansible/pull/000"
},
{
"path": "tests/fixtures/rebuild/0_prstatus.json",
"chars": 4448,
"preview": "[\n {\n \"target_url\": \"https://app.shippable.com/github/ansible/ansible/runs/32228/summary\",\n \"description\": \"Run 3"
},
{
"path": "tests/fixtures/rebuild/1_issue.yml",
"chars": 553,
"preview": "# https://api.github.com/repos/ansible/ansible/issues/68152/events\nhtml_url: https://github.com/ansible/ansible/pull/000"
},
{
"path": "tests/fixtures/rebuild/1_prstatus.json",
"chars": 4448,
"preview": "[\n {\n \"target_url\": \"https://app.shippable.com/github/ansible/ansible/runs/32228/summary\",\n \"description\": \"Run 3"
},
{
"path": "tests/fixtures/rebuild/2_issue.yml",
"chars": 680,
"preview": "# https://api.github.com/repos/ansible/ansible/issues/68152/events\nhtml_url: https://github.com/ansible/ansible/pull/000"
},
{
"path": "tests/fixtures/rebuild/2_prstatus.json",
"chars": 4448,
"preview": "[\n {\n \"target_url\": \"https://app.shippable.com/github/ansible/ansible/runs/32228/summary\",\n \"description\": \"Run 3"
},
{
"path": "tests/fixtures/rebuild/3_issue.yml",
"chars": 680,
"preview": "# https://api.github.com/repos/ansible/ansible/issues/68152/events\nhtml_url: https://github.com/ansible/ansible/pull/000"
},
{
"path": "tests/fixtures/rebuild/3_prstatus.json",
"chars": 4448,
"preview": "[\n {\n \"target_url\": \"https://app.shippable.com/github/ansible/ansible/runs/32228/summary\",\n \"description\": \"Run 3"
},
{
"path": "tests/fixtures/rebuild_merge/0_issue.yml",
"chars": 934,
"preview": "# https://api.github.com/repos/ansible/ansible-modules-extras/issues/2562/events\nhtml_url: https://github.com/ansible/an"
},
{
"path": "tests/fixtures/rebuild_merge/0_prstatus.json",
"chars": 4447,
"preview": "[\n {\n \"target_url\": \"https://app.shippable.com/github/ansible/ansible/runs/32228/summary\",\n \"description\": \"Run 3"
},
{
"path": "tests/fixtures/rebuild_merge/1_issue.yml",
"chars": 934,
"preview": "# https://api.github.com/repos/ansible/ansible-modules-extras/issues/2562/events\nhtml_url: https://github.com/ansible/an"
},
{
"path": "tests/fixtures/rebuild_merge/1_prstatus.json",
"chars": 1866,
"preview": "[\n {\n \"target_url\": \"https://app.shippable.com/github/ansible/ansible/runs/32229\",\n \"description\": \"Run 32229 sta"
},
{
"path": "tests/fixtures/rebuild_merge/2_issue.yml",
"chars": 934,
"preview": "# https://api.github.com/repos/ansible/ansible-modules-extras/issues/2562/events\nhtml_url: https://github.com/ansible/an"
},
{
"path": "tests/fixtures/rebuild_merge/2_prstatus.json",
"chars": 1866,
"preview": "[\n {\n \"target_url\": \"https://app.shippable.com/github/ansible/ansible/runs/32229\",\n \"description\": \"Run 32229 sta"
},
{
"path": "tests/fixtures/rebuild_merge/3_issue.yml",
"chars": 1046,
"preview": "# https://api.github.com/repos/ansible/ansible-modules-extras/issues/2562/events\nhtml_url: https://github.com/ansible/an"
},
{
"path": "tests/fixtures/rebuild_merge/3_prstatus.json",
"chars": 1866,
"preview": "[\n {\n \"target_url\": \"https://app.shippable.com/github/ansible/ansible/runs/32229\",\n \"description\": \"Run 32229 sta"
},
{
"path": "tests/fixtures/shipit/0_issue.yml",
"chars": 611,
"preview": "# https://api.github.com/repos/ansible/ansible/issues/21620/events\nhtml_url: https://github.com/ansible/ansible/pull/216"
},
{
"path": "tests/fixtures/shipit/0_prstatus.json",
"chars": 4303,
"preview": "[\n {\n \"url\": \"https://api.github.com/repos/ansible/ansible/statuses/c19a95a315680a3f68b157054aa54c0ecc31ee28\",\n \""
},
{
"path": "tests/fixtures/shipit/1_issue.yml",
"chars": 736,
"preview": "# https://api.github.com/repos/ansible/ansible/issues/21620/events\nhtml_url: https://github.com/ansible/ansible/pull/216"
},
{
"path": "tests/fixtures/shipit/1_prstatus.json",
"chars": 4303,
"preview": "[\n {\n \"url\": \"https://api.github.com/repos/ansible/ansible/statuses/c19a95a315680a3f68b157054aa54c0ecc31ee28\",\n \""
},
{
"path": "tests/fixtures/shipit/2_issue.yml",
"chars": 326,
"preview": "html_url: https://github.com/ansible/ansible/pull/123\nnumber: 123\ngithub_repo: ansible\nsubmitter: ElsA\ncreated_at: 2016-"
},
{
"path": "tests/fixtures/shipit/2_prstatus.json",
"chars": 3,
"preview": "[]\n"
},
{
"path": "tests/manual/group_errors.py",
"chars": 1138,
"preview": "#!/usr/bin/env python\n\n\nimport json\nfrom fuzzywuzzy import fuzz\nfrom pprint import pprint\n\n\ndef get_matches(errors, patt"
},
{
"path": "tests/manual/matching_test.py",
"chars": 17153,
"preview": "#!/usr/bin/env python\n\n\nimport json\nimport glob\nimport logging\nimport os\nimport sys\nimport tempfile\n\nimport ansibullbot."
},
{
"path": "tests/unit/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/unit/decorators/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/unit/decorators/test_github.py",
"chars": 1533,
"preview": "from unittest.mock import patch\n\nfrom ansibullbot.utils.github import get_rate_limit\n\n\nclass RequestsResponseMock:\n u"
},
{
"path": "tests/unit/parsers/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/unit/parsers/metadata_1.yml",
"chars": 48873,
"preview": "files: \n $modules/cloud/amazon/:\n notify:\n - willthames\n $modules/cloud/amazon/aws_api_gateway.py: mikedlr willt"
},
{
"path": "tests/unit/parsers/test_bot_metadata_parser.py",
"chars": 8238,
"preview": "import os\nimport shutil\nimport unittest\n\nfrom ansibullbot.utils.botmetadata import BotMetadataParser\n\nEXAMPLE1 = \"\"\"\n---"
},
{
"path": "tests/unit/triagers/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/unit/triagers/plugins/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/unit/triagers/plugins/test_automerge.py",
"chars": 4009,
"preview": "import unittest\n\nfrom ansibullbot.plugins.shipit import get_automerge_facts\n\n\nclass HistoryWrapperMock:\n history = No"
},
{
"path": "tests/unit/triagers/plugins/test_docs_info.py",
"chars": 1668,
"preview": "import pytest\n\nfrom ansibullbot.plugins.docs_info import get_docs_facts\nfrom tests.utils.issue_mock import IssueMock\n\nda"
},
{
"path": "tests/unit/triagers/plugins/test_needs_contributor.py",
"chars": 985,
"preview": "from ansibullbot.plugins.needs_contributor import get_needs_contributor_facts\n\n\ndef test_needs_contributor_command():\n "
},
{
"path": "tests/unit/triagers/plugins/test_needs_info.py",
"chars": 7086,
"preview": "import datetime\nimport tempfile\nimport unittest\n\nfrom unittest import mock\n\nfrom ansibullbot.historywrapper import Histo"
},
{
"path": "tests/unit/triagers/plugins/test_needs_revision.py",
"chars": 10332,
"preview": "import datetime\nimport json\nfrom unittest import TestCase, mock\n\nimport github\n\nfrom tests.utils.helpers import get_issu"
},
{
"path": "tests/unit/triagers/plugins/test_notifications.py",
"chars": 1052,
"preview": "import pytest\n\nfrom ansibullbot.plugins.notifications import get_notification_facts\nfrom tests.utils.helpers import get_"
},
{
"path": "tests/unit/triagers/plugins/test_rebuild.py",
"chars": 3181,
"preview": "import pytest\n\nfrom ansibullbot.plugins.ci_rebuild import get_rebuild_command_facts\n\nfrom tests.utils.helpers import get"
},
{
"path": "tests/unit/triagers/plugins/test_rebuild_merge.py",
"chars": 3002,
"preview": "import pytest\n\nfrom ansibullbot.plugins.ci_rebuild import get_rebuild_merge_facts\nfrom tests.utils.helpers import get_is"
},
{
"path": "tests/unit/triagers/plugins/test_shipit.py",
"chars": 34837,
"preview": "import copy\nimport shutil\nimport tempfile\nimport unittest\n\nfrom collections import namedtuple\n\nimport pytest\n\nfrom tests"
},
{
"path": "tests/unit/utils/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/unit/utils/test_component_tools.py",
"chars": 25534,
"preview": "import shutil\nimport tempfile\nfrom unittest import TestCase\n\nimport pytest\n\nfrom ansibullbot.utils.component_tools impor"
},
{
"path": "tests/unit/utils/test_extractors_pr_number.py",
"chars": 998,
"preview": "import pytest\n\nfrom ansibullbot.utils.extractors import extract_pr_number_from_comment\n\n\n@pytest.mark.parametrize('test_"
},
{
"path": "tests/unit/utils/test_githubid_extractor.py",
"chars": 2230,
"preview": "import unittest\n\nfrom ansibullbot.utils.extractors import ModuleExtractor\n\n\nclass TestGitHubIdExtractor(unittest.TestCas"
},
{
"path": "tests/unit/utils/test_sqlite_tools.py",
"chars": 2065,
"preview": "import os\nimport tempfile\n\nfrom unittest import mock\n\nfrom ansibullbot.utils.sqlite_utils import AnsibullbotDatabase\n\n\nd"
},
{
"path": "tests/unit/utils/test_template_extractor.py",
"chars": 5679,
"preview": "import unittest\nfrom ansibullbot.utils.extractors import extract_template_data\n\n\nclass TestTemplateExtraction(unittest.T"
},
{
"path": "tests/unit/utils/test_template_extractor_simple.py",
"chars": 1896,
"preview": "import unittest\nfrom ansibullbot.utils.extractors import extract_template_data\n\n\nclass TestTemplateExtractionSimple(unit"
},
{
"path": "tests/unit/utils/test_time_tools.py",
"chars": 866,
"preview": "import pytest\n\nfrom unittest import TestCase\nfrom ansibullbot.utils.timetools import strip_time_safely\n\n\nclass TestTimeS"
},
{
"path": "tests/unit/wrappers/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/unit/wrappers/test_history_wrapper.py",
"chars": 3188,
"preview": "import datetime\nimport tempfile\n\nimport pytest\n\nfrom ansibullbot.historywrapper import HistoryWrapper\n\n\ndef test_get_com"
},
{
"path": "tests/utils/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/utils/componentmocks.py",
"chars": 49328,
"preview": "import datetime\nimport json\nimport os\nimport shutil\nimport subprocess\nimport tempfile\nimport uuid\n\nfrom unittest.mock im"
},
{
"path": "tests/utils/helpers.py",
"chars": 1178,
"preview": "from contextlib import contextmanager\nimport shutil\nimport tempfile\n\nfrom tests.utils.issue_mock import IssueMock\nfrom t"
},
{
"path": "tests/utils/issue_mock.py",
"chars": 5938,
"preview": "from operator import attrgetter\nimport yaml\n\n\nclass ActorMock:\n id = None\n login = None\n\n\nclass CommitterMock:\n "
},
{
"path": "tests/utils/repo_mock.py",
"chars": 648,
"preview": "from collections import namedtuple\n\n\nclass SubRepo:\n def __init__(self, assignees=None):\n self.assignees = ass"
},
{
"path": "tox.ini",
"chars": 446,
"preview": "[tox]\nminversion = 3.6.0\nenvlist = python\nskipdist = True\nskip_missing_interpreters = True\n\n[testenv]\nusedevelop = False"
},
{
"path": "triage_ansible.py",
"chars": 1310,
"preview": "#!/usr/bin/env python\n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modi"
},
{
"path": "triage_ansible_mp.py",
"chars": 2530,
"preview": "#!/usr/bin/env python\n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modi"
}
]
About this extraction
This page contains the full source code of the ansible/ansibullbot GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 199 files (7.2 MB), approximately 1.9M tokens, and a symbol index with 706 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.