main 0cf790ea573a cached
425 files
1.1 MB
333.7k tokens
110 symbols
1 requests
Download .txt
Showing preview only (1,242K chars total). Download the full file or copy to clipboard to get everything.
Repository: aws-samples/aws-auto-inventory
Branch: main
Commit: 0cf790ea573a
Files: 425
Total size: 1.1 MB

Directory structure:
gitextract_nae5be8a/

├── .devcontainer/
│   ├── README.md
│   ├── config.yaml
│   ├── devcontainer.json
│   └── post-create.sh
├── .github/
│   ├── ISSUE_TEMPLATE/
│   │   ├── bug_report.md
│   │   ├── feature_request.md
│   │   └── question.md
│   ├── actions/
│   │   └── .keep
│   ├── pull_request_template.md
│   └── workflows/
│       ├── build.yml
│       ├── codeql-analysis.yml
│       ├── hygiene.yml
│       ├── publish.yml
│       └── release.yml
├── .gitignore
├── .gitmodules
├── .pre-commit-config.yaml
├── .vscode/
│   ├── launch.json
│   └── tasks.json
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── Makefile
├── NOTICE
├── README.md
├── SECURITY.md
├── aws-auto-inventory-unified-architecture.md
├── aws_auto_inventory/
│   ├── __init__.py
│   ├── cli.py
│   ├── config/
│   │   ├── __init__.py
│   │   ├── loader.py
│   │   ├── models.py
│   │   └── validator.py
│   ├── core/
│   │   ├── __init__.py
│   │   ├── aws_client.py
│   │   ├── organization.py
│   │   ├── region.py
│   │   ├── scan_engine.py
│   │   └── service.py
│   └── utils/
│       ├── __init__.py
│       ├── logging.py
│       └── threading.py
├── doc/
│   └── habits.yaml
├── examples/
│   ├── config_example.json
│   ├── config_example.yaml
│   └── config_organization_example.yaml
├── organization_scanner.py
├── requirements-and-versions.txt
├── requirements.txt
├── scan/
│   └── sample/
│       ├── all_services.json
│       ├── list_emr_clusters_id_name.json
│       ├── list_of_bucket_names.json
│       ├── running_ec2.json
│       ├── running_ec2_names.json
│       ├── s3_buckets.json
│       ├── services/
│       │   ├── accessanalyzer.json
│       │   ├── account.json
│       │   ├── acm-pca.json
│       │   ├── acm.json
│       │   ├── alexaforbusiness.json
│       │   ├── amp.json
│       │   ├── amplify.json
│       │   ├── amplifybackend.json
│       │   ├── amplifyuibuilder.json
│       │   ├── apigateway.json
│       │   ├── apigatewaymanagementapi.json
│       │   ├── apigatewayv2.json
│       │   ├── appconfig.json
│       │   ├── appconfigdata.json
│       │   ├── appfabric.json
│       │   ├── appflow.json
│       │   ├── appintegrations.json
│       │   ├── application-autoscaling.json
│       │   ├── application-insights.json
│       │   ├── applicationcostprofiler.json
│       │   ├── appmesh.json
│       │   ├── apprunner.json
│       │   ├── appstream.json
│       │   ├── appsync.json
│       │   ├── arc-zonal-shift.json
│       │   ├── athena.json
│       │   ├── auditmanager.json
│       │   ├── autoscaling-plans.json
│       │   ├── autoscaling.json
│       │   ├── backup-gateway.json
│       │   ├── backup.json
│       │   ├── backupstorage.json
│       │   ├── batch.json
│       │   ├── billingconductor.json
│       │   ├── braket.json
│       │   ├── budgets.json
│       │   ├── ce.json
│       │   ├── chime-sdk-identity.json
│       │   ├── chime-sdk-media-pipelines.json
│       │   ├── chime-sdk-meetings.json
│       │   ├── chime-sdk-messaging.json
│       │   ├── chime-sdk-voice.json
│       │   ├── chime.json
│       │   ├── cleanrooms.json
│       │   ├── cloud9.json
│       │   ├── cloudcontrol.json
│       │   ├── clouddirectory.json
│       │   ├── cloudformation.json
│       │   ├── cloudfront.json
│       │   ├── cloudhsm.json
│       │   ├── cloudhsmv2.json
│       │   ├── cloudsearch.json
│       │   ├── cloudsearchdomain.json
│       │   ├── cloudtrail-data.json
│       │   ├── cloudtrail.json
│       │   ├── cloudwatch.json
│       │   ├── codeartifact.json
│       │   ├── codebuild.json
│       │   ├── codecatalyst.json
│       │   ├── codecommit.json
│       │   ├── codedeploy.json
│       │   ├── codeguru-reviewer.json
│       │   ├── codeguru-security.json
│       │   ├── codeguruprofiler.json
│       │   ├── codepipeline.json
│       │   ├── codestar-connections.json
│       │   ├── codestar-notifications.json
│       │   ├── codestar.json
│       │   ├── cognito-identity.json
│       │   ├── cognito-idp.json
│       │   ├── cognito-sync.json
│       │   ├── comprehend.json
│       │   ├── comprehendmedical.json
│       │   ├── compute-optimizer.json
│       │   ├── config.json
│       │   ├── connect-contact-lens.json
│       │   ├── connect.json
│       │   ├── connectcampaigns.json
│       │   ├── connectcases.json
│       │   ├── connectparticipant.json
│       │   ├── controltower.json
│       │   ├── cur.json
│       │   ├── customer-profiles.json
│       │   ├── databrew.json
│       │   ├── dataexchange.json
│       │   ├── datapipeline.json
│       │   ├── datasync.json
│       │   ├── dax.json
│       │   ├── detective.json
│       │   ├── devicefarm.json
│       │   ├── devops-guru.json
│       │   ├── directconnect.json
│       │   ├── discovery.json
│       │   ├── dlm.json
│       │   ├── dms.json
│       │   ├── docdb-elastic.json
│       │   ├── docdb.json
│       │   ├── drs.json
│       │   ├── ds.json
│       │   ├── dynamodb.json
│       │   ├── dynamodbstreams.json
│       │   ├── ebs.json
│       │   ├── ec2-instance-connect.json
│       │   ├── ec2.json
│       │   ├── ecr-public.json
│       │   ├── ecr.json
│       │   ├── ecs.json
│       │   ├── efs.json
│       │   ├── eks.json
│       │   ├── elastic-inference.json
│       │   ├── elasticache.json
│       │   ├── elasticbeanstalk.json
│       │   ├── elastictranscoder.json
│       │   ├── elb.json
│       │   ├── elbv2.json
│       │   ├── emr-containers.json
│       │   ├── emr-serverless.json
│       │   ├── emr.json
│       │   ├── entityresolution.json
│       │   ├── es.json
│       │   ├── events.json
│       │   ├── evidently.json
│       │   ├── finspace-data.json
│       │   ├── finspace.json
│       │   ├── firehose.json
│       │   ├── fis.json
│       │   ├── fms.json
│       │   ├── forecast.json
│       │   ├── forecastquery.json
│       │   ├── frauddetector.json
│       │   ├── fsx.json
│       │   ├── gamelift.json
│       │   ├── gamesparks.json
│       │   ├── glacier.json
│       │   ├── globalaccelerator.json
│       │   ├── glue.json
│       │   ├── grafana.json
│       │   ├── greengrass.json
│       │   ├── greengrassv2.json
│       │   ├── groundstation.json
│       │   ├── guardduty.json
│       │   ├── health.json
│       │   ├── healthlake.json
│       │   ├── honeycode.json
│       │   ├── iam.json
│       │   ├── identitystore.json
│       │   ├── imagebuilder.json
│       │   ├── importexport.json
│       │   ├── inspector.json
│       │   ├── inspector2.json
│       │   ├── internetmonitor.json
│       │   ├── iot-data.json
│       │   ├── iot-jobs-data.json
│       │   ├── iot-roborunner.json
│       │   ├── iot.json
│       │   ├── iot1click-devices.json
│       │   ├── iot1click-projects.json
│       │   ├── iotanalytics.json
│       │   ├── iotdeviceadvisor.json
│       │   ├── iotevents-data.json
│       │   ├── iotevents.json
│       │   ├── iotfleethub.json
│       │   ├── iotfleetwise.json
│       │   ├── iotsecuretunneling.json
│       │   ├── iotsitewise.json
│       │   ├── iotthingsgraph.json
│       │   ├── iottwinmaker.json
│       │   ├── iotwireless.json
│       │   ├── ivs-realtime.json
│       │   ├── ivs.json
│       │   ├── ivschat.json
│       │   ├── kafka.json
│       │   ├── kafkaconnect.json
│       │   ├── kendra-ranking.json
│       │   ├── kendra.json
│       │   ├── keyspaces.json
│       │   ├── kinesis-video-archived-media.json
│       │   ├── kinesis-video-media.json
│       │   ├── kinesis-video-signaling.json
│       │   ├── kinesis-video-webrtc-storage.json
│       │   ├── kinesis.json
│       │   ├── kinesisanalytics.json
│       │   ├── kinesisanalyticsv2.json
│       │   ├── kinesisvideo.json
│       │   ├── kms.json
│       │   ├── lakeformation.json
│       │   ├── lambda.json
│       │   ├── lex-models.json
│       │   ├── lex-runtime.json
│       │   ├── lexv2-models.json
│       │   ├── lexv2-runtime.json
│       │   ├── license-manager-linux-subscriptions.json
│       │   ├── license-manager-user-subscriptions.json
│       │   ├── license-manager.json
│       │   ├── lightsail.json
│       │   ├── location.json
│       │   ├── logs.json
│       │   ├── lookoutequipment.json
│       │   ├── lookoutmetrics.json
│       │   ├── lookoutvision.json
│       │   ├── m2.json
│       │   ├── machinelearning.json
│       │   ├── macie.json
│       │   ├── macie2.json
│       │   ├── managedblockchain-query.json
│       │   ├── managedblockchain.json
│       │   ├── marketplace-catalog.json
│       │   ├── marketplace-entitlement.json
│       │   ├── marketplacecommerceanalytics.json
│       │   ├── mediaconnect.json
│       │   ├── mediaconvert.json
│       │   ├── medialive.json
│       │   ├── mediapackage-vod.json
│       │   ├── mediapackage.json
│       │   ├── mediapackagev2.json
│       │   ├── mediastore-data.json
│       │   ├── mediastore.json
│       │   ├── mediatailor.json
│       │   ├── medical-imaging.json
│       │   ├── memorydb.json
│       │   ├── meteringmarketplace.json
│       │   ├── mgh.json
│       │   ├── mgn.json
│       │   ├── migration-hub-refactor-spaces.json
│       │   ├── migrationhub-config.json
│       │   ├── migrationhuborchestrator.json
│       │   ├── migrationhubstrategy.json
│       │   ├── mobile.json
│       │   ├── mq.json
│       │   ├── mturk.json
│       │   ├── mwaa.json
│       │   ├── neptune.json
│       │   ├── network-firewall.json
│       │   ├── networkmanager.json
│       │   ├── nimble.json
│       │   ├── oam.json
│       │   ├── omics.json
│       │   ├── opensearch.json
│       │   ├── opensearchserverless.json
│       │   ├── opsworks.json
│       │   ├── opsworkscm.json
│       │   ├── organizations.json
│       │   ├── osis.json
│       │   ├── outposts.json
│       │   ├── panorama.json
│       │   ├── payment-cryptography-data.json
│       │   ├── payment-cryptography.json
│       │   ├── personalize-events.json
│       │   ├── personalize-runtime.json
│       │   ├── personalize.json
│       │   ├── pi.json
│       │   ├── pinpoint-email.json
│       │   ├── pinpoint-sms-voice-v2.json
│       │   ├── pinpoint-sms-voice.json
│       │   ├── pinpoint.json
│       │   ├── pipes.json
│       │   ├── polly.json
│       │   ├── pricing.json
│       │   ├── privatenetworks.json
│       │   ├── proton.json
│       │   ├── qldb-session.json
│       │   ├── qldb.json
│       │   ├── quicksight.json
│       │   ├── ram.json
│       │   ├── rbin.json
│       │   ├── rds-data.json
│       │   ├── rds.json
│       │   ├── redshift-data.json
│       │   ├── redshift-serverless.json
│       │   ├── redshift.json
│       │   ├── rekognition.json
│       │   ├── resiliencehub.json
│       │   ├── resource-explorer-2.json
│       │   ├── resource-groups.json
│       │   ├── resourcegroupstaggingapi.json
│       │   ├── robomaker.json
│       │   ├── rolesanywhere.json
│       │   ├── route53-recovery-cluster.json
│       │   ├── route53-recovery-control-config.json
│       │   ├── route53-recovery-readiness.json
│       │   ├── route53.json
│       │   ├── route53domains.json
│       │   ├── route53resolver.json
│       │   ├── rum.json
│       │   ├── s3.json
│       │   ├── s3control.json
│       │   ├── s3outposts.json
│       │   ├── sagemaker-a2i-runtime.json
│       │   ├── sagemaker-edge.json
│       │   ├── sagemaker-featurestore-runtime.json
│       │   ├── sagemaker-geospatial.json
│       │   ├── sagemaker-metrics.json
│       │   ├── sagemaker-runtime.json
│       │   ├── sagemaker.json
│       │   ├── savingsplans.json
│       │   ├── scheduler.json
│       │   ├── schemas.json
│       │   ├── sdb.json
│       │   ├── secretsmanager.json
│       │   ├── securityhub.json
│       │   ├── securitylake.json
│       │   ├── serverlessrepo.json
│       │   ├── service-quotas.json
│       │   ├── servicecatalog-appregistry.json
│       │   ├── servicecatalog.json
│       │   ├── servicediscovery.json
│       │   ├── ses.json
│       │   ├── sesv2.json
│       │   ├── shield.json
│       │   ├── signer.json
│       │   ├── simspaceweaver.json
│       │   ├── sms-voice.json
│       │   ├── sms.json
│       │   ├── snow-device-management.json
│       │   ├── snowball.json
│       │   ├── sns.json
│       │   ├── sqs.json
│       │   ├── ssm-contacts.json
│       │   ├── ssm-incidents.json
│       │   ├── ssm-sap.json
│       │   ├── ssm.json
│       │   ├── sso-admin.json
│       │   ├── sso-oidc.json
│       │   ├── sso.json
│       │   ├── stepfunctions.json
│       │   ├── storagegateway.json
│       │   ├── sts.json
│       │   ├── support-app.json
│       │   ├── support.json
│       │   ├── swf.json
│       │   ├── synthetics.json
│       │   ├── textract.json
│       │   ├── timestream-query.json
│       │   ├── timestream-write.json
│       │   ├── tnb.json
│       │   ├── transcribe.json
│       │   ├── transfer.json
│       │   ├── translate.json
│       │   ├── verifiedpermissions.json
│       │   ├── voice-id.json
│       │   ├── vpc-lattice.json
│       │   ├── waf-regional.json
│       │   ├── waf.json
│       │   ├── wafv2.json
│       │   ├── wellarchitected.json
│       │   ├── wisdom.json
│       │   ├── workdocs.json
│       │   ├── worklink.json
│       │   ├── workmail.json
│       │   ├── workmailmessageflow.json
│       │   ├── workspaces-web.json
│       │   ├── workspaces.json
│       │   └── xray.json
│       └── tagged_emrs.json
├── scan.py
├── scan_builder.py
├── setup.py
├── test_requirements.txt
└── tests/
    ├── __init__.py
    ├── conftest.py
    ├── test_api_calls.py
    ├── test_config/
    │   ├── __init__.py
    │   └── test_loader.py
    ├── test_core/
    │   └── __init__.py
    ├── test_organization.py
    ├── test_organization_scanner.py
    ├── test_output/
    │   └── __init__.py
    ├── test_role_assumption.py
    └── test_service_scanning.py

================================================
FILE CONTENTS
================================================

================================================
FILE: .devcontainer/README.md
================================================
```json
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/python
{
	"name": "Python 3",
	// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
	"image": "mcr.microsoft.com/devcontainers/python:1-3.11-bullseye",
	"features": {
		"ghcr.io/devcontainers/features/aws-cli:1": {}
	},

	// Features to add to the dev container. More info: https://containers.dev/features.
	// "features": {},

	// Use 'forwardPorts' to make a list of ports inside the container available locally.
	// "forwardPorts": [],

	// Use 'postCreateCommand' to run commands after the container is created.
	"postCreateCommand": "pip3 install --user -r requirements.txt",

	// Configure tool-specific properties.
	// "customizations": {},

	// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
	// "remoteUser": "root"
}

```


================================================
FILE: .devcontainer/config.yaml
================================================
inventories:
  - name: learning
    aws:
      profile: your-aws-profile
      region:
        - us-east-1
    excel:
      transpose: true
    sheets:
      - name: EC2 # sheet name on Excel
        service: ec2 # the boto3 client of an AWS service
        function: describe_instances # the client method of the service defined above
        result_key: Reservations # [optional]: The first key of the response dict
      - name: EBS
        service: ec2
        function: describe_volumes
        result_key: Volumes
  - name: your-second-inventory
    aws:
      profile: your-second-aws-profile
      region:
        - us-east-1
    sheets:
      - name: EC2
        service: ec2
        function: describe_instances
        result_key: Reservations
      - name: EBS
        service: ec2
        function: describe_volumes
        result_key: Volumes


================================================
FILE: .devcontainer/devcontainer.json
================================================
{
  "customizations": {
    "vscode": {
      "extensions": [
        "ms-python.python",
        "ms-python.vscode-pylance"
      ]
    }
  },
  "features": {
    "ghcr.io/devcontainers/features/aws-cli:1": {}
  },
  "image": "mcr.microsoft.com/devcontainers/python:1-3.11-bullseye",
  "name": "AWS Auto Inventory",
  "postCreateCommand": "sh .devcontainer/post-create.sh"
}


================================================
FILE: .devcontainer/post-create.sh
================================================
#!/usr/bin/env bash

set -eu pipefail

export DEBIAN_FRONTEND=noninteractive

sudo apt-get update
sudo apt-get -y install --no-install-recommends \
	bash-completion \
    make

make pre-commit/install

pip3 install --user -r requirements.txt

clear
devcontainer-info


================================================
FILE: .github/ISSUE_TEMPLATE/bug_report.md
================================================
---
name: 🐛 Bug Report
about: Create a new ticket for a bug.
labels: bug
---

<!-- Please search existing issues to avoid creating duplicates. -->

- Version:
- Local OS Version:
- Local chip architecture: <x86, arm64, Apple Silicon>
- Reproduces in: <environment, AWS Account ID>

Steps to Reproduce:

1.
2.


================================================
FILE: .github/ISSUE_TEMPLATE/feature_request.md
================================================
---
name: 💡 Feature Request
about: Create a new ticket for a new feature request
labels: enhancement
---

Type your idea here.


================================================
FILE: .github/ISSUE_TEMPLATE/question.md
================================================
---
name: ❔ Question
about: What is your question about?
labels: question
---

Add more context here.


================================================
FILE: .github/actions/.keep
================================================


================================================
FILE: .github/pull_request_template.md
================================================
## 🧠 Pull Request

### Changes

<!-- What changes are being made? Is this a change a bugfix or new functionality?  Have you added Screenshot for UI tasks-->

### Type of change

<!--
* Bug fix (non-breaking change which fixes an issue)
* New feature (non-breaking change which adds functionality)
* Breaking change (fix or feature that would cause existing functionality to not work as expected)
* Documentation update
* Examples (adding tests or stories)
-->

## Why

<!-- Why are these changes needed? A link to the Jira issue may be sufficient -->

## How (Optional)

<!-- How were these changes implemented? -->

## Checklist

<!-- Have you done all of these things?  -->
<!-- to check an item, place an "x" in the box like so: "- [x] Automated tests" -->

- [ ] Jest unit tests (as needed)
- [ ] Integration tests (as needed)
- [ ] Storybook stories (as needed)
- [ ] Run storybook locally
- [ ] Acceptance Criteria met
- [ ] Screenshot added to Summary for UI ticket
- [ ] Check test coverage of new or updated components (Reduce Banlist)

<!-- Also consider:
* A reference to a related issue, if any.
* @mentions of the person or team responsible for reviewing proposed changes.
* Label the pull request accordingly as `enhancement`, `bug`, etc.
-->


================================================
FILE: .github/workflows/build.yml
================================================
name: Build
on:
  push:
    branches:
    - 'feat/**'
    - 'fix/**'

permissions:
  contents: read

jobs:
  build:
    runs-on: ${{ matrix.os }}
    strategy:
      matrix:
        os: [ubuntu-latest, macos-latest, windows-latest]
        python-version: [3.10]
    steps:
    - uses: actions/checkout@v3

    - uses: actions/setup-python@v4
      with:
        python-version: "3.10"

    - run: pip install -r requirements.txt
    - run: pip install pyinstaller
    - run: pyinstaller --name aws-auto-inventory --onefile scan.py


================================================
FILE: .github/workflows/codeql-analysis.yml
================================================
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"

on:
  push:
    branches: [ main ]
  pull_request:
    # The branches below must be a subset of the branches above
    branches: [ main ]
  schedule:
    - cron: '0 1 * * 1'

jobs:
  analyze:
    name: Analyze
    runs-on: ubuntu-latest
    permissions:
      actions: read
      contents: read
      security-events: write

    strategy:
      fail-fast: false
      matrix:
        language: [ 'python' ]
        # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
        # Learn more:
        # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed

    steps:
    - name: Checkout repository
      uses: actions/checkout@v3

    # Initializes the CodeQL tools for scanning.
    - name: Initialize CodeQL
      uses: github/codeql-action/init@v2
      with:
        languages: ${{ matrix.language }}
        # If you wish to specify custom queries, you can do so here or in a config file.
        # By default, queries listed here will override any specified in a config file.
        # Prefix the list here with "+" to use these queries and those in the config file.
        # queries: ./path/to/local/query, your-org/your-repo/queries@main

    # Autobuild attempts to build any compiled languages  (C/C++, C#, or Java).
    # If this step fails, then you should remove it and run the build manually (see below)
    - name: Autobuild
      uses: github/codeql-action/autobuild@v2

    # ℹ️ Command-line programs to run using the OS shell.
    # 📚 https://git.io/JvXDl

    # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
    #    and modify them (or add more) to build your code if your project
    #    uses a compiled language

    #- run: |
    #   make bootstrap
    #   make release

    - name: Perform CodeQL Analysis
      uses: github/codeql-action/analyze@v2


================================================
FILE: .github/workflows/hygiene.yml
================================================
name: Hygiene

on:
  push:
    branches:
      - 'feat/**'
      - 'fix/**'

permissions:
  contents: read

jobs:
  code-hygiene:
    runs-on: ubuntu-latest

    steps:
      - uses: actions/checkout@v3

      - run: python -m pip install --upgrade pip
      - run: pip install pre-commit
      - run: pre-commit run --all-files

  # doc-hygiene:
  #   needs: code-hygiene
  #   runs-on: ubuntu-latest

  #   steps:
  #     - uses: actions/checkout@v3
  #       with:
  #         submodules: "true"

  #     - name: Build documentation
  #       run: make doc/build

  #     - name: Check documentation status
  #       run: git diff --exit-code # exit 1 if there's a difference between what was pushed with what was produced by doc/build.


================================================
FILE: .github/workflows/publish.yml
================================================
name: Publish
on:
  push:
    tags:
      - "v*.*.*"

permissions:
  contents: write

jobs:
  build:
    runs-on: ${{ matrix.os }}
    strategy:
      matrix:
        os: [ubuntu-latest, macos-latest, windows-latest]
        python-version: [3.9]
    steps:
      - uses: actions/checkout@v3

      - uses: actions/setup-python@v4
        with:
          python-version: '3.10'

      - run: python3 --version
      - run: pip install -r requirements.txt
      - run: pip install pyinstaller
      - run: pyinstaller --name aws-auto-inventory-${{ matrix.os }} --onefile scan.py

      - uses: actions/upload-artifact@v3
        if: matrix.os == 'windows-latest'
        with:
          name: aws-auto-inventory-${{ matrix.os }}
          path: dist\aws-auto-inventory-windows-latest.exe

      - uses: actions/upload-artifact@v3
        if: matrix.os != 'windows-latest'
        with:
          name: aws-auto-inventory-${{ matrix.os }}
          path: ./dist/aws-auto-inventory-${{ matrix.os }}

  release:
    needs: build
    runs-on: ubuntu-latest
    steps:
      - uses: actions/download-artifact@v4.1.7
        with:
          name: aws-auto-inventory-ubuntu-latest

      - uses: actions/download-artifact@v4.1.7
        with:
          name: aws-auto-inventory-macos-latest

      - uses: actions/download-artifact@v4.1.7
        with:
          name: aws-auto-inventory-windows-latest

      - run: mv aws-auto-inventory-ubuntu-latest aws-auto-inventory-ubuntu

      - run: mv aws-auto-inventory-macos-latest aws-auto-inventory-macos

      - run: mv aws-auto-inventory-windows-latest.exe aws-auto-inventory-windows.exe

      - name: Release
        uses: softprops/action-gh-release@v1
        if: startsWith(github.ref, 'refs/tags/')
        with:
          prerelease: true
          draft: true
          files: |
            aws-auto-inventory-ubuntu
            aws-auto-inventory-macos
            aws-auto-inventory-windows.exe
        env:
          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}


================================================
FILE: .github/workflows/release.yml
================================================
on:
  push:
    branches:
      - main

name: Release
permissions:
  contents: write
  pull-requests: write

jobs:
  release-please:
    runs-on: ubuntu-latest
    steps:
      - uses: google-github-actions/release-please-action@v3
        with:
          release-type: python
          package-name: aws-auto-inventory
          token: ${{ secrets.TOKEN }}


================================================
FILE: .gitignore
================================================
# Created by https://www.toptal.com/developers/gitignore/api/macos,windows,linux,visualstudiocode,python,node
# Edit at https://www.toptal.com/developers/gitignore?templates=macos,windows,linux,visualstudiocode,python,node

### Linux ###
*~

# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*

# KDE directory preferences
.directory

# Linux trash folder which might appear on any partition or disk
.Trash-*

# .nfs files are created when an open file is removed but is still being accessed
.nfs*

### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride

# Icon must end with two \r
Icon


# Thumbnails
._*

# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent

# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk

### macOS Patch ###
# iCloud generated files
*.icloud

### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*

# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json

# Runtime data
pids
*.pid
*.seed
*.pid.lock

# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov

# Coverage directory used by tools like istanbul
coverage
*.lcov

# nyc test coverage
.nyc_output

# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt

# Bower dependency directory (https://bower.io/)
bower_components

# node-waf configuration
.lock-wscript

# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release

# Dependency directories
node_modules/
jspm_packages/

# Snowpack dependency directory (https://snowpack.dev/)
web_modules/

# TypeScript cache
*.tsbuildinfo

# Optional npm cache directory
.npm

# Optional eslint cache
.eslintcache

# Optional stylelint cache
.stylelintcache

# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/

# Optional REPL history
.node_repl_history

# Output of 'npm pack'
*.tgz

# Yarn Integrity file
.yarn-integrity

# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local

# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache

# Next.js build output
.next
out

# Nuxt.js build / generate output
.nuxt
dist

# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public

# vuepress build output
.vuepress/dist

# vuepress v2.x temp and cache directory
.temp

# Docusaurus cache and generated files
.docusaurus

# Serverless directories
.serverless/

# FuseBox cache
.fusebox/

# DynamoDB Local files
.dynamodb/

# TernJS port file
.tern-port

# Stores VSCode versions used for testing VSCode extensions
.vscode-test

# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*

### Node Patch ###
# Serverless Webpack directories
.webpack/

# Optional stylelint cache

# SvelteKit build / generate output
.svelte-kit

### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
#  Usually these files are written by a python script from a template
#  before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/

# Translations
*.mo
*.pot

# Django stuff:
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
.pybuilder/
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
#   For a library or package, you might want to ignore these files since the code is
#   intended to run in multiple environments; otherwise, check them in:
# .python-version

# pipenv
#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
#   However, in case of collaboration, if having platform-specific dependencies or dependencies
#   having no cross-platform support, pipenv may install dependencies that don't work, or not
#   install all needed dependencies.
#Pipfile.lock

# poetry
#   Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
#   This is especially recommended for binary packages to ensure reproducibility, and is more
#   commonly ignored for libraries.
#   https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock

# pdm
#   Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
#   pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
#   in version control.
#   https://pdm.fming.dev/#use-with-ide
.pdm.toml

# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/

# pytype static type analyzer
.pytype/

# Cython debug symbols
cython_debug/

# PyCharm
#  JetBrains specific template is maintained in a separate JetBrains.gitignore that can
#  be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
#  and can be added to the global gitignore or merged into this file.  For a more nuclear
#  option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets

# Local History for Visual Studio Code
.history/

# Built Visual Studio Code Extensions
*.vsix

### VisualStudioCode Patch ###
# Ignore all local history of files
.history
.ionide

### Windows ###
# Windows thumbnail cache files
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db

# Dump file
*.stackdump

# Folder config file
[Dd]esktop.ini

# Recycle Bin used on file shares
$RECYCLE.BIN/

# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp

# Windows shortcuts
*.lnk

# End of https://www.toptal.com/developers/gitignore/api/macos,windows,linux,visualstudiocode,python,node
output/
logs/
*.xlsx


================================================
FILE: .gitmodules
================================================
[submodule "habits"]
	path = habits
	url = https://github.com/awslabs/aws-code-habits.git
	branch = main


================================================
FILE: .pre-commit-config.yaml
================================================
default_language_version:
  python: python3
fail_fast: true
minimum_pre_commit_version: 2.13.0
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
  rev: v4.4.0
  hooks:
    # Prevent giant files from being committed
    - id: check-added-large-files
    # Simply check whether the files parse as valid python.
    - id: check-ast
    # Require literal syntax when initializing empty or zero Python builtin types.
    - id: check-builtin-literals
    # Check for files that would conflict in case-insensitive filesystems
    - id: check-case-conflict
    # Checks a common error of defining a docstring after code.
    - id: check-docstring-first
    # Ensures that (non-binary) executables have a shebang.
    # - id: check-executables-have-shebangs
    # This hook checks json files for parseable syntax.
    - id: check-json
    # Ensures that (non-binary) files with a shebang are executable.
    - id: check-shebang-scripts-are-executable
    # This hook sets a standard for formatting JSON files.
    - id: pretty-format-json
      args: ['--autofix']
    # Check for files that contain merge conflict strings.
    - id: check-merge-conflict
    # Checks for symlinks which do not point to anything.
    - id: check-symlinks
    # This hook checks toml files for parseable syntax.
    - id: check-toml
    # Ensures that links to vcs websites are permalinks.
    - id: check-vcs-permalinks
    # This hook checks xml files for parseable syntax.
    - id: check-xml
    # This hook checks yaml files for parseable syntax.
    - id: check-yaml
    # Check for debugger imports and py37+ `breakpoint()` calls in python source.
    - id: debug-statements
    # Detects symlinks which are changed to regular files with a content of a path which that symlink was pointing to.
    - id: destroyed-symlinks
    # Detects the presence of private keys
    - id: detect-private-key
    # Ensures that a file is either empty, or ends with one newline.
    - id: end-of-file-fixer
    # Sort the lines in specified files (defaults to alphabetical). You must provide list of target files as input in your .pre-commit-config.yaml file.
    - id: file-contents-sorter
    # removes UTF-8 byte order marker
    - id: fix-byte-order-marker
    # Add # -*- coding: utf-8 -*- to the top of python files
    - id: fix-encoding-pragma
    # Prevent addition of new git submodules
    - id: forbid-new-submodules
    # Replaces or checks mixed line ending
    - id: mixed-line-ending
    # This verifies that test files are named correctly
    - id: name-tests-test
    # Sorts entries in requirements.txt
    - id: requirements-txt-fixer
    # Sorts simple YAML files which consist only of top-level keys, preserving comments and blocks.
    - id: sort-simple-yaml
    # This hook trims trailing whitespace.
    - id: trailing-whitespace
- repo: https://github.com/psf/black
  rev: '23.7.0'
  hooks:
    - id: black
# - repo: https://github.com/PyCQA/flake8
#   rev: '6.1.0'
#   hooks:
#     - id: flake8
#       args: ['--ignore=E501']
- repo: https://github.com/jendrikseipp/vulture
  rev: 'v2.9.1'  # or any later Vulture version
  hooks:
    - id: vulture
      args: ['scan.py', ]


================================================
FILE: .vscode/launch.json
================================================
{
  "configurations": [
    {
      "args": [
        "--name",
        "your-inventory-name"
      ],
      "console": "integratedTerminal",
      "name": "Empty",
      "program": "scan.py",
      "request": "launch",
      "type": "python"
    },
    {
      "args": [
        "--scan",
        "scan/sample/services/iam.json"
      ],
      "console": "integratedTerminal",
      "name": "Scan",
      "program": "scan.py",
      "request": "launch",
      "type": "python"
    }
  ],
  "version": "0.2.0"
}


================================================
FILE: .vscode/tasks.json
================================================
{
  "tasks": [
    {
      "command": "make build",
      "group": {
        "isDefault": true,
        "kind": "build"
      },
      "label": "Build",
      "problemMatcher": [],
      "type": "shell"
    }
  ],
  "version": "2.0.0"
}


================================================
FILE: CHANGELOG.md
================================================
# Changelog

## [1.1.1](https://github.com/aws-samples/aws-auto-inventory/compare/v1.1.0...v1.1.1) (2023-08-28)


### Bug Fixes

* log incorrect directory name ([a28cec7](https://github.com/aws-samples/aws-auto-inventory/commit/a28cec783ba51f4aa7c0604c01eb47fc1f9a21bc))

## [1.1.0](https://github.com/aws-samples/aws-auto-inventory/compare/v1.0.0...v1.1.0) (2023-08-03)


### Features

* get json from URL ([114fdf9](https://github.com/aws-samples/aws-auto-inventory/commit/114fdf9e78e76b202e43351051aa9bf2099ecf4b))

## [1.0.0](https://github.com/aws-samples/aws-auto-inventory/compare/v0.7.0...v1.0.0) (2023-08-03)


### ⚠ BREAKING CHANGES

* cleanup
* simplify business logic

### Features

* add argument max retry and retry delay ([5a15d5b](https://github.com/aws-samples/aws-auto-inventory/commit/5a15d5b5d426cf187d9a98c0f1e7a29305e35fdd))
* add info into log ([be25d7d](https://github.com/aws-samples/aws-auto-inventory/commit/be25d7de657e556490269bb65c8e60acf95ed738))
* add logging and handle get service data more effectively ([25b7fdd](https://github.com/aws-samples/aws-auto-inventory/commit/25b7fdd8d502784d31e871522b57ad4314420693))
* add more granularity to threading model ([b411dd9](https://github.com/aws-samples/aws-auto-inventory/commit/b411dd922900ea7646c57836bbda8ca0b09c6cfa))
* add threading ([0056396](https://github.com/aws-samples/aws-auto-inventory/commit/0056396d9174ec8432673bf96191780e9e5a7ed7))
* allow developer to set log level ([1301cd9](https://github.com/aws-samples/aws-auto-inventory/commit/1301cd9d939426418678fa756fc94160dacf7565))
* allow user to pass output directory path ([40bd611](https://github.com/aws-samples/aws-auto-inventory/commit/40bd611a6e319609c486ce1833c4e835b6cd867c))
* check aws credentials ([6496c9f](https://github.com/aws-samples/aws-auto-inventory/commit/6496c9f976f9c231d97f0607961b37ab471b5a65))
* cleanup ([7913af6](https://github.com/aws-samples/aws-auto-inventory/commit/7913af681b741d9eca7bec8946b4139d00025716))
* create script to build service sheet ([f418920](https://github.com/aws-samples/aws-auto-inventory/commit/f41892083e636e28ac06df85d527f19b8f3e0cc0))
* improve throttling ([b0c15b0](https://github.com/aws-samples/aws-auto-inventory/commit/b0c15b0721f7ae49e2514bff5fae16883e5aa781))
* include api call with retry ([f17887e](https://github.com/aws-samples/aws-auto-inventory/commit/f17887e11cc501546039d0cf1ef0eb55b9a00a75))
* include output file ([295064e](https://github.com/aws-samples/aws-auto-inventory/commit/295064eb83d9dd7f696fe8328ca8dc2e0b75b9fa))
* make executable ([23aab60](https://github.com/aws-samples/aws-auto-inventory/commit/23aab605794280d8b5423bce197046797b9673a4))
* save log inside output folder, allow user to provide regions ([d46f2df](https://github.com/aws-samples/aws-auto-inventory/commit/d46f2df8b06ee9f7c38f808916bae3949f9e47da))
* save per service ([e1670fd](https://github.com/aws-samples/aws-auto-inventory/commit/e1670fd2554ade17491c02b45cea73f4af41dc35))
* simplify business logic ([1937c75](https://github.com/aws-samples/aws-auto-inventory/commit/1937c75fe97e5f3fce6e29eb66245ceb24d09194))
* use latest python dev container image ([d0e3a70](https://github.com/aws-samples/aws-auto-inventory/commit/d0e3a70f4fb6c947ce5d7a1271de7a845d20c133))


### Bug Fixes

* serialize datetime into string format. ([b60a8dc](https://github.com/aws-samples/aws-auto-inventory/commit/b60a8dc926fad988c331ba1ab962c17c6d49df34))

## [0.7.0](https://github.com/aws-samples/aws-auto-inventory/compare/v0.6.0...v0.7.0) (2023-02-07)


### Features

* only build and hygiene if feat/ or fix/ branches ([042af58](https://github.com/aws-samples/aws-auto-inventory/commit/042af58f709b0cd10b7085674d868bae174ae10a))


### Bug Fixes

* correct glob pattern ([bbbb7e4](https://github.com/aws-samples/aws-auto-inventory/commit/bbbb7e443699a47e4a6b491f1713d028987680c2))
* use main branch, and fix workflow schedule ([acf19d5](https://github.com/aws-samples/aws-auto-inventory/commit/acf19d5b11fc4680c094d0e7d8a8fe2a09d60614))

## [0.6.0](https://github.com/aws-samples/aws-auto-inventory/compare/v0.5.3...v0.6.0) (2023-01-16)


### Features

* use personal access token ([062e5e3](https://github.com/aws-samples/aws-auto-inventory/commit/062e5e3b860ff2bd84861846ed44f917998f83fb))

## [0.5.2](https://github.com/aws-samples/aws-auto-inventory/compare/v0.5.1...v0.5.2) (2023-01-16)


### Bug Fixes

* use v* as tag prefix ([39d1937](https://github.com/aws-samples/aws-auto-inventory/commit/39d19374a350761202999c21b6b15b18672fcb11))

## [0.5.1](https://github.com/aws-samples/aws-auto-inventory/compare/v0.5.0...v0.5.1) (2023-01-16)


### Bug Fixes

* use correct tag format ([18da1b6](https://github.com/aws-samples/aws-auto-inventory/commit/18da1b6e2ad904a465c3e4841a369990d1264d86))

## [0.5.0](https://github.com/aws-samples/aws-auto-inventory/compare/v0.4.0...v0.5.0) (2023-01-16)


### Features

* publish binary once pr has been merged ([45fa76b](https://github.com/aws-samples/aws-auto-inventory/commit/45fa76b8ea7f7dfe758a23e08db4b2abb96043d2))
* publish binary once pr has been merged ([ef6337c](https://github.com/aws-samples/aws-auto-inventory/commit/ef6337c593dec7ef365f330d1733f55b556344e2))


### Bug Fixes

* restore publish workflow ([a16dae5](https://github.com/aws-samples/aws-auto-inventory/commit/a16dae598404b3e176c391b935ffaf5f50b238e7))

## [0.4.0](https://github.com/aws-samples/aws-auto-inventory/compare/v0.3.0...v0.4.0) (2023-01-13)


### Features

* create make target to perform hygiene tasks ([85a4f9d](https://github.com/aws-samples/aws-auto-inventory/commit/85a4f9d852dbee6b9f064b8c640a69692d2acc75))
* ignore output directory and excel files ([9f5a341](https://github.com/aws-samples/aws-auto-inventory/commit/9f5a341ce632a6727ffb07d00d4f7a1fd710d015))

## [0.3.0](https://github.com/aws-samples/aws-auto-inventory/compare/0.2.0...v0.3.0) (2023-01-12)


### Features

* create workflow to release project using gh actions release-please ([905cb08](https://github.com/aws-samples/aws-auto-inventory/commit/905cb08fcd62d1b239302b17357a4325db4876a1))

## Changelog

All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).


================================================
FILE: CODE_OF_CONDUCT.md
================================================
## Code of Conduct
This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
opensource-codeofconduct@amazon.com with any additional questions or comments.


================================================
FILE: CONTRIBUTING.md
================================================
# Contributing Guidelines

Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
documentation, we greatly value feedback and contributions from our community.

Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
information to effectively respond to your bug report or contribution.

## Reporting Bugs/Feature Requests

We welcome you to use the GitHub issue tracker to report bugs or suggest features.

When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already
reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:

- A reproducible test case or series of steps
- The version of our code being used
- Any modifications you've made relevant to the bug
- Anything unusual about your environment or deployment

## Contributing via Pull Requests

Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:

1. You are working against the latest source on the _main_ branch.
2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
3. You open an issue to discuss any significant work - we would hate for your time to be wasted.

To send us a pull request, please:

1. Fork the repository.
2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
3. Ensure local tests pass.
4. Commit to your fork using clear commit messages.
5. Send us a pull request, answering any default questions in the pull request interface.
6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.

GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
[creating a pull request](https://help.github.com/articles/creating-a-pull-request/).

## Finding contributions to work on

Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start.

## Code of Conduct

This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
opensource-codeofconduct@amazon.com with any additional questions or comments.

## Security issue notifications

If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.

## Licensing

See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.


================================================
FILE: LICENSE
================================================

                                 Apache License
                           Version 2.0, January 2004
                        http://www.apache.org/licenses/

   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

   1. Definitions.

      "License" shall mean the terms and conditions for use, reproduction,
      and distribution as defined by Sections 1 through 9 of this document.

      "Licensor" shall mean the copyright owner or entity authorized by
      the copyright owner that is granting the License.

      "Legal Entity" shall mean the union of the acting entity and all
      other entities that control, are controlled by, or are under common
      control with that entity. For the purposes of this definition,
      "control" means (i) the power, direct or indirect, to cause the
      direction or management of such entity, whether by contract or
      otherwise, or (ii) ownership of fifty percent (50%) or more of the
      outstanding shares, or (iii) beneficial ownership of such entity.

      "You" (or "Your") shall mean an individual or Legal Entity
      exercising permissions granted by this License.

      "Source" form shall mean the preferred form for making modifications,
      including but not limited to software source code, documentation
      source, and configuration files.

      "Object" form shall mean any form resulting from mechanical
      transformation or translation of a Source form, including but
      not limited to compiled object code, generated documentation,
      and conversions to other media types.

      "Work" shall mean the work of authorship, whether in Source or
      Object form, made available under the License, as indicated by a
      copyright notice that is included in or attached to the work
      (an example is provided in the Appendix below).

      "Derivative Works" shall mean any work, whether in Source or Object
      form, that is based on (or derived from) the Work and for which the
      editorial revisions, annotations, elaborations, or other modifications
      represent, as a whole, an original work of authorship. For the purposes
      of this License, Derivative Works shall not include works that remain
      separable from, or merely link (or bind by name) to the interfaces of,
      the Work and Derivative Works thereof.

      "Contribution" shall mean any work of authorship, including
      the original version of the Work and any modifications or additions
      to that Work or Derivative Works thereof, that is intentionally
      submitted to Licensor for inclusion in the Work by the copyright owner
      or by an individual or Legal Entity authorized to submit on behalf of
      the copyright owner. For the purposes of this definition, "submitted"
      means any form of electronic, verbal, or written communication sent
      to the Licensor or its representatives, including but not limited to
      communication on electronic mailing lists, source code control systems,
      and issue tracking systems that are managed by, or on behalf of, the
      Licensor for the purpose of discussing and improving the Work, but
      excluding communication that is conspicuously marked or otherwise
      designated in writing by the copyright owner as "Not a Contribution."

      "Contributor" shall mean Licensor and any individual or Legal Entity
      on behalf of whom a Contribution has been received by Licensor and
      subsequently incorporated within the Work.

   2. Grant of Copyright License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      copyright license to reproduce, prepare Derivative Works of,
      publicly display, publicly perform, sublicense, and distribute the
      Work and such Derivative Works in Source or Object form.

   3. Grant of Patent License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      (except as stated in this section) patent license to make, have made,
      use, offer to sell, sell, import, and otherwise transfer the Work,
      where such license applies only to those patent claims licensable
      by such Contributor that are necessarily infringed by their
      Contribution(s) alone or by combination of their Contribution(s)
      with the Work to which such Contribution(s) was submitted. If You
      institute patent litigation against any entity (including a
      cross-claim or counterclaim in a lawsuit) alleging that the Work
      or a Contribution incorporated within the Work constitutes direct
      or contributory patent infringement, then any patent licenses
      granted to You under this License for that Work shall terminate
      as of the date such litigation is filed.

   4. Redistribution. You may reproduce and distribute copies of the
      Work or Derivative Works thereof in any medium, with or without
      modifications, and in Source or Object form, provided that You
      meet the following conditions:

      (a) You must give any other recipients of the Work or
          Derivative Works a copy of this License; and

      (b) You must cause any modified files to carry prominent notices
          stating that You changed the files; and

      (c) You must retain, in the Source form of any Derivative Works
          that You distribute, all copyright, patent, trademark, and
          attribution notices from the Source form of the Work,
          excluding those notices that do not pertain to any part of
          the Derivative Works; and

      (d) If the Work includes a "NOTICE" text file as part of its
          distribution, then any Derivative Works that You distribute must
          include a readable copy of the attribution notices contained
          within such NOTICE file, excluding those notices that do not
          pertain to any part of the Derivative Works, in at least one
          of the following places: within a NOTICE text file distributed
          as part of the Derivative Works; within the Source form or
          documentation, if provided along with the Derivative Works; or,
          within a display generated by the Derivative Works, if and
          wherever such third-party notices normally appear. The contents
          of the NOTICE file are for informational purposes only and
          do not modify the License. You may add Your own attribution
          notices within Derivative Works that You distribute, alongside
          or as an addendum to the NOTICE text from the Work, provided
          that such additional attribution notices cannot be construed
          as modifying the License.

      You may add Your own copyright statement to Your modifications and
      may provide additional or different license terms and conditions
      for use, reproduction, or distribution of Your modifications, or
      for any such Derivative Works as a whole, provided Your use,
      reproduction, and distribution of the Work otherwise complies with
      the conditions stated in this License.

   5. Submission of Contributions. Unless You explicitly state otherwise,
      any Contribution intentionally submitted for inclusion in the Work
      by You to the Licensor shall be under the terms and conditions of
      this License, without any additional terms or conditions.
      Notwithstanding the above, nothing herein shall supersede or modify
      the terms of any separate license agreement you may have executed
      with Licensor regarding such Contributions.

   6. Trademarks. This License does not grant permission to use the trade
      names, trademarks, service marks, or product names of the Licensor,
      except as required for reasonable and customary use in describing the
      origin of the Work and reproducing the content of the NOTICE file.

   7. Disclaimer of Warranty. Unless required by applicable law or
      agreed to in writing, Licensor provides the Work (and each
      Contributor provides its Contributions) on an "AS IS" BASIS,
      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      implied, including, without limitation, any warranties or conditions
      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      PARTICULAR PURPOSE. You are solely responsible for determining the
      appropriateness of using or redistributing the Work and assume any
      risks associated with Your exercise of permissions under this License.

   8. Limitation of Liability. In no event and under no legal theory,
      whether in tort (including negligence), contract, or otherwise,
      unless required by applicable law (such as deliberate and grossly
      negligent acts) or agreed to in writing, shall any Contributor be
      liable to You for damages, including any direct, indirect, special,
      incidental, or consequential damages of any character arising as a
      result of this License or out of the use or inability to use the
      Work (including but not limited to damages for loss of goodwill,
      work stoppage, computer failure or malfunction, or any and all
      other commercial damages or losses), even if such Contributor
      has been advised of the possibility of such damages.

   9. Accepting Warranty or Additional Liability. While redistributing
      the Work or Derivative Works thereof, You may choose to offer,
      and charge a fee for, acceptance of support, warranty, indemnity,
      or other liability obligations and/or rights consistent with this
      License. However, in accepting such obligations, You may act only
      on Your own behalf and on Your sole responsibility, not on behalf
      of any other Contributor, and only if You agree to indemnify,
      defend, and hold each Contributor harmless for any liability
      incurred by, or claims asserted against, such Contributor by reason
      of your accepting any such warranty or additional liability.


================================================
FILE: Makefile
================================================
export WORKSPACE=$(shell pwd)
export HABITS = $(WORKSPACE)/habits

include $(HABITS)/lib/make/Makefile
include $(HABITS)/lib/make/*/Makefile

.PHONY: clean
## Clean project
clean:
	@rm -rf output/

.PHONY: hygiene
hygiene: doc/build pre-commit/run


================================================
FILE: NOTICE
================================================
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.


================================================
FILE: README.md
================================================
# AWS Auto Inventory

A tool for scanning AWS services across regions and accounts to collect resource information.

## Features

- **Multi-format Configuration**: Support for both YAML and JSON configuration formats
- **Multi-format Output**: Generate both JSON and Excel outputs
- **Multi-threading**: Concurrent scanning of regions and services for faster results
- **Organization Scanning**: Scan resources across all accounts in an AWS Organization
- **Robust Error Handling**: Retry logic for API throttling and transient errors
- **Flexible Filtering**: Filter resources by tags, IDs, and other attributes
- **Data Transformation**: Transform data for better analysis, including transposition in Excel
- **Binary Data Support**: Proper handling of binary data (bytes) returned by AWS APIs

## Installation

### From PyPI

```bash
pip install aws-auto-inventory
```

### From Source

```bash
git clone https://github.com/aws-samples/aws-auto-inventory.git
cd aws-auto-inventory
pip install -e .
```

## Usage

### Basic Usage

```bash
aws-auto-inventory --config examples/config_example.yaml --output-dir output --format both
```

### Command-line Options

```
usage: aws-auto-inventory [-h] -c CONFIG [-o OUTPUT_DIR] [-f {json,excel,both}]
                         [--max-regions MAX_REGIONS] [--max-services MAX_SERVICES]
                         [--max-retries MAX_RETRIES] [--retry-delay RETRY_DELAY]
                         [--log-level {DEBUG,INFO,WARNING,ERROR,CRITICAL}]
                         [--validate-only]

AWS Auto Inventory - Scan AWS resources and generate inventory

optional arguments:
  -h, --help            show this help message and exit
  -c CONFIG, --config CONFIG
                        Path to configuration file (YAML or JSON)
  -o OUTPUT_DIR, --output-dir OUTPUT_DIR
                        Directory to store output files (default: output)
  -f {json,excel,both}, --format {json,excel,both}
                        Output format (default: json)
  --max-regions MAX_REGIONS
                        Maximum number of regions to scan concurrently
  --max-services MAX_SERVICES
                        Maximum number of services to scan concurrently per region
  --max-retries MAX_RETRIES
                        Maximum number of retries for API calls (default: 3)
  --retry-delay RETRY_DELAY
                        Base delay in seconds between retries (default: 2)
  --log-level {DEBUG,INFO,WARNING,ERROR,CRITICAL}
                        Logging level (default: INFO)
  --validate-only       Validate configuration and exit without scanning
```

## Configuration

AWS Auto Inventory uses a configuration file to define what resources to scan. The configuration file can be in either YAML or JSON format.

### Example Configuration (YAML)

```yaml
inventories:
  - name: my-aws-inventory
    aws:
      profile: default
      region:
        - us-east-1
        - us-west-2
      organization: false
    excel:
      transpose: true
    sheets:
      - name: EC2Instances
        service: ec2
        function: describe_instances
        result_key: Reservations
        parameters:
          Filters:
            - Name: instance-state-name
              Values:
                - running
      - name: S3Buckets
        service: s3
        function: list_buckets
        result_key: Buckets
```

### Example Configuration (JSON)

```json
{
  "inventories": [
    {
      "name": "my-aws-inventory",
      "aws": {
        "profile": "default",
        "region": ["us-east-1", "us-west-2"],
        "organization": false
      },
      "excel": {
        "transpose": true
      },
      "sheets": [
        {
          "name": "EC2Instances",
          "service": "ec2",
          "function": "describe_instances",
          "result_key": "Reservations",
          "parameters": {
            "Filters": [
              {
                "Name": "instance-state-name",
                "Values": ["running"]
              }
            ]
          }
        },
        {
          "name": "S3Buckets",
          "service": "s3",
          "function": "list_buckets",
          "result_key": "Buckets"
        }
      ]
    }
  ]
}
```

### Organization Scanning

To scan resources across all accounts in an AWS Organization, set `organization: true` in the configuration:

```yaml
inventories:
  - name: organization-wide
    aws:
      profile: management
      region:
        - us-east-1
        - us-west-2
      organization: true
      role_name: OrganizationAccountAccessRole
    sheets:
      # ... sheets configuration ...
```

## Output

AWS Auto Inventory generates output files in the specified output directory:

- **JSON Output**: JSON files for each service in each region
- **Excel Output**: Excel spreadsheets with one sheet per service

### Handling of Binary Data

Some AWS APIs (like CloudTrail.Client.list_public_keys) return binary data as bytes. AWS Auto Inventory handles this data as follows:

- In JSON output: Binary data is encoded as base64 and stored in a special format: `{"__bytes_b64__": "base64-encoded-string"}`
- In Excel output: Binary data is converted to a string in the format: `[BYTES: base64-encoded-string]`

This ensures that all data can be properly serialized and deserialized without errors.

## Examples

Example configuration files are provided in the `examples` directory:

- `config_example.yaml`: Basic YAML configuration
- `config_example.json`: Basic JSON configuration
- `config_organization_example.yaml`: Configuration for organization-wide scanning

## AWS Credentials

AWS Auto Inventory uses the standard AWS credential providers:

1. Environment variables
2. Shared credential file (~/.aws/credentials)
3. AWS IAM Instance Profile (if running on an EC2 instance)

You can specify a profile name in the configuration file to use a specific profile from your credentials file.

## License

This project is licensed under the Apache License 2.0 - see the LICENSE file for details.

================================================
FILE: SECURITY.md
================================================
## Security

We take the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations.
If you believe you have found a security vulnerability in any AWS-owned repository, please raise an issue.

Thank you!


================================================
FILE: aws-auto-inventory-unified-architecture.md
================================================
# AWS Auto Inventory: Unified Architecture Plan

## Table of Contents
1. [Current State Analysis](#current-state-analysis)
2. [Unified Architecture Design](#unified-architecture-design)
3. [Detailed Implementation Strategy](#detailed-implementation-strategy)
4. [Implementation Details](#implementation-details)
5. [Potential Challenges and Mitigation Strategies](#potential-challenges-and-mitigation-strategies)
6. [Next Steps](#next-steps)

## Current State Analysis

### JSON-based Implementation (Current Codebase)
- **Strengths**: 
  - Multi-threading for concurrent scanning of regions and services
  - Organization-wide scanning across multiple AWS accounts
  - Robust API call retry logic and throttling handling
  - Simple JSON configuration format
  - Direct AWS API access through boto3
  - Modular design with separate components for scanning and organization handling

### Excel-based Implementation (Described in habits.yaml)
- **Strengths**:
  - Excel spreadsheet output (more user-friendly for analysis)
  - YAML configuration format (more human-readable)
  - Advanced filtering capabilities (by tags, VPC, subnets, etc.)
  - Support for multiple AWS accounts through profiles
  - Transposable data in Excel output

## Unified Architecture Design

### Architecture Overview

```mermaid
graph TD
    A[Configuration Layer] --> B[Core Scanning Engine]
    B --> C[Output Processor]
    
    subgraph "Configuration Layer"
        A1[YAML Config Parser]
        A2[JSON Config Parser]
        A3[Config Validator]
    end
    
    subgraph "Core Scanning Engine"
        B1[AWS API Client]
        B2[Multi-threading Manager]
        B3[Organization Scanner]
        B4[Retry & Throttling Handler]
    end
    
    subgraph "Output Processor"
        C1[JSON Output Generator]
        C2[Excel Output Generator]
        C3[Data Transformer]
    end
    
    A1 --> A3
    A2 --> A3
    A3 --> B
    B --> B1
    B --> B2
    B --> B3
    B --> B4
    B --> C
    C --> C1
    C --> C2
    C --> C3
```

### Component Details

#### 1. Configuration Layer
- **Unified Configuration Format**: Support both YAML and JSON configuration formats
- **Configuration Validator**: Ensure configurations are valid regardless of format
- **Configuration Converter**: Allow conversion between formats
- **Feature Parity**: Ensure all filtering and selection options are available in both formats

#### 2. Core Scanning Engine
- **AWS API Client**: Maintain the robust boto3 integration from the JSON implementation
- **Multi-threading Manager**: Keep the concurrent scanning capabilities
- **Organization Scanner**: Preserve the ability to scan across multiple accounts
- **Retry & Throttling Handler**: Maintain the robust error handling and retry logic
- **Resource Filter**: Implement the filtering capabilities from the Excel implementation

#### 3. Output Processor
- **Data Model**: Create a unified data model that can be serialized to both JSON and Excel
- **JSON Output Generator**: Maintain the current JSON output functionality
- **Excel Output Generator**: Add the Excel output capabilities
- **Data Transformer**: Support operations like transposition for Excel output

### Implementation Plan

```mermaid
gantt
    title AWS Auto Inventory Unified Implementation
    dateFormat  YYYY-MM-DD
    section Architecture
    Design Unified Architecture           :a1, 2025-06-10, 7d
    Create Common Data Models             :a2, after a1, 5d
    section Configuration
    Implement Unified Config Parser       :b1, after a1, 10d
    Add Configuration Validator           :b2, after b1, 5d
    section Core Engine
    Refactor Scanning Engine              :c1, after a2, 14d
    Enhance Organization Scanner          :c2, after c1, 7d
    Implement Advanced Filtering          :c3, after c1, 10d
    section Output
    Implement JSON Output Module          :d1, after c1, 5d
    Implement Excel Output Module         :d2, after c1, 10d
    Add Data Transformation Features      :d3, after d2, 7d
    section Integration
    Integration Testing                   :e1, after d1 d2 d3 c2 c3, 10d
    Performance Optimization              :e2, after e1, 7d
    Documentation                         :e3, after e2, 5d
```

## Detailed Implementation Strategy

### 1. Refactor the Configuration System

Create a unified configuration system that supports both YAML and JSON formats:

```mermaid
classDiagram
    class ConfigLoader {
        +load_config(path: str) : Config
        -detect_format(path: str) : str
    }
    
    class Config {
        +inventories: List[Inventory]
        +validate() : bool
        +to_json() : str
        +to_yaml() : str
    }
    
    class Inventory {
        +name: str
        +aws: AWSConfig
        +sheets: List[Sheet]
        +excel: ExcelConfig
    }
    
    class AWSConfig {
        +profile: str
        +region: List[str]
        +organization: bool
        +role_name: str
    }
    
    class Sheet {
        +name: str
        +service: str
        +function: str
        +result_key: str
        +parameters: dict
    }
    
    class ExcelConfig {
        +transpose: bool
        +formatting: dict
    }
    
    ConfigLoader --> Config : creates
    Config --> Inventory : contains
    Inventory --> AWSConfig : has
    Inventory --> Sheet : has
    Inventory --> ExcelConfig : has
```

### 2. Enhance the Core Scanning Engine

Refactor the scanning engine to maintain the multi-threading and organization scanning capabilities while adding the filtering features:

```mermaid
classDiagram
    class ScanEngine {
        +scan(config: Config) : ScanResult
    }
    
    class AWSClient {
        +call_api(service: str, function: str, params: dict) : dict
        -handle_throttling(error: Exception, retry: int)
    }
    
    class OrganizationScanner {
        +scan_organization(config: Config) : List[AccountResult]
        -assume_role(account_id: str, role_name: str) : Session
    }
    
    class RegionScanner {
        +scan_regions(config: Config, session: Session) : List[RegionResult]
    }
    
    class ServiceScanner {
        +scan_services(config: Config, session: Session, region: str) : List[ServiceResult]
    }
    
    class ResourceFilter {
        +apply_filters(results: dict, filters: dict) : dict
    }
    
    ScanEngine --> OrganizationScanner : uses
    ScanEngine --> RegionScanner : uses
    RegionScanner --> ServiceScanner : uses
    ServiceScanner --> AWSClient : uses
    ServiceScanner --> ResourceFilter : uses
```

### 3. Implement the Output Processor

Create a flexible output system that can generate both JSON and Excel outputs:

```mermaid
classDiagram
    class OutputProcessor {
        +process(scan_result: ScanResult, format: str) : void
    }
    
    class JSONOutputGenerator {
        +generate(scan_result: ScanResult, path: str) : void
    }
    
    class ExcelOutputGenerator {
        +generate(scan_result: ScanResult, path: str) : void
        -format_sheet(sheet: Sheet, data: dict) : void
        -apply_transpose(data: dict, transpose: bool) : dict
    }
    
    class DataTransformer {
        +transform(data: dict, operations: List[str]) : dict
    }
    
    OutputProcessor --> JSONOutputGenerator : uses
    OutputProcessor --> ExcelOutputGenerator : uses
    ExcelOutputGenerator --> DataTransformer : uses
```

## Implementation Details

### Project Structure

```
aws-auto-inventory/
├── aws_auto_inventory/
│   ├── __init__.py
│   ├── cli.py                  # Command-line interface
│   ├── config/
│   │   ├── __init__.py
│   │   ├── loader.py           # Config loading (YAML/JSON)
│   │   ├── validator.py        # Config validation
│   │   └── models.py           # Config data models
│   ├── core/
│   │   ├── __init__.py
│   │   ├── aws_client.py       # AWS API client with retry logic
│   │   ├── scan_engine.py      # Main scanning engine
│   │   ├── organization.py     # Organization scanning
│   │   ├── region.py           # Region scanning
│   │   ├── service.py          # Service scanning
│   │   └── filter.py           # Resource filtering
│   ├── output/
│   │   ├── __init__.py
│   │   ├── processor.py        # Output processing
│   │   ├── json_generator.py   # JSON output
│   │   ├── excel_generator.py  # Excel output
│   │   └── transformer.py      # Data transformation
│   └── utils/
│       ├── __init__.py
│       ├── logging.py          # Logging utilities
│       └── threading.py        # Threading utilities
├── tests/
│   ├── __init__.py
│   ├── conftest.py
│   ├── test_config/
│   ├── test_core/
│   └── test_output/
├── examples/
│   ├── config_yaml_example.yaml
│   ├── config_json_example.json
│   └── README.md
├── setup.py
├── requirements.txt
└── README.md
```

### Key Implementation Components

#### 1. Configuration System

The configuration system will support both YAML and JSON formats with automatic detection:

```python
# aws_auto_inventory/config/loader.py
import yaml
import json
import os
from .models import Config

class ConfigLoader:
    def load_config(self, path):
        """Load configuration from file."""
        format_type = self._detect_format(path)
        
        with open(path, 'r') as f:
            if format_type == 'yaml':
                config_data = yaml.safe_load(f)
            else:  # json
                config_data = json.load(f)
        
        return Config.from_dict(config_data)
    
    def _detect_format(self, path):
        """Detect file format based on extension."""
        _, ext = os.path.splitext(path)
        if ext.lower() in ['.yaml', '.yml']:
            return 'yaml'
        return 'json'
```

Configuration data models will use Pydantic for validation:

```python
# aws_auto_inventory/config/models.py
from pydantic import BaseModel, Field
from typing import List, Dict, Optional, Union, Any

class ExcelConfig(BaseModel):
    transpose: bool = False
    formatting: Dict[str, Any] = Field(default_factory=dict)

class AWSConfig(BaseModel):
    profile: Optional[str] = None
    region: List[str] = Field(default_factory=lambda: ["us-east-1"])
    organization: bool = False
    role_name: str = "OrganizationAccountAccessRole"

class Sheet(BaseModel):
    name: str
    service: str
    function: str
    result_key: Optional[str] = None
    parameters: Dict[str, Any] = Field(default_factory=dict)

class Inventory(BaseModel):
    name: str
    aws: AWSConfig = Field(default_factory=AWSConfig)
    sheets: List[Sheet]
    excel: ExcelConfig = Field(default_factory=ExcelConfig)

class Config(BaseModel):
    inventories: List[Inventory]
    
    def to_json(self):
        """Convert config to JSON string."""
        return self.json(indent=2)
    
    def to_yaml(self):
        """Convert config to YAML string."""
        import yaml
        return yaml.dump(self.dict(), sort_keys=False)
    
    @classmethod
    def from_dict(cls, data):
        """Create config from dictionary."""
        return cls(**data)
```

#### 2. Core Scanning Engine

The core scanning engine will maintain the multi-threading and organization scanning capabilities:

```python
# aws_auto_inventory/core/scan_engine.py
import concurrent.futures
from ..config.models import Config, Inventory
from .organization import OrganizationScanner
from .region import RegionScanner

class ScanEngine:
    def __init__(self, max_workers_regions=None, max_workers_services=None):
        self.max_workers_regions = max_workers_regions
        self.max_workers_services = max_workers_services
        self.organization_scanner = OrganizationScanner()
        self.region_scanner = RegionScanner(max_workers=max_workers_services)
    
    def scan(self, config):
        """Perform scanning based on configuration."""
        results = []
        
        for inventory in config.inventories:
            if inventory.aws.organization:
                # Scan across organization
                org_results = self.organization_scanner.scan_organization(
                    inventory, self.region_scanner
                )
                results.append({
                    "inventory_name": inventory.name,
                    "organization_results": org_results
                })
            else:
                # Scan single account
                import boto3
                session = boto3.Session(profile_name=inventory.aws.profile)
                
                # Scan regions concurrently
                with concurrent.futures.ThreadPoolExecutor(
                    max_workers=self.max_workers_regions
                ) as executor:
                    future_to_region = {
                        executor.submit(
                            self.region_scanner.scan_region,
                            inventory, session, region
                        ): region
                        for region in inventory.aws.region
                    }
                    
                    region_results = []
                    for future in concurrent.futures.as_completed(future_to_region):
                        region = future_to_region[future]
                        try:
                            result = future.result()
                            region_results.append({
                                "region": region,
                                "services": result
                            })
                        except Exception as e:
                            print(f"Error scanning region {region}: {e}")
                
                results.append({
                    "inventory_name": inventory.name,
                    "account_results": region_results
                })
        
        return results
```

AWS API client with retry logic:

```python
# aws_auto_inventory/core/aws_client.py
import time
import boto3
import botocore

class AWSClient:
    def __init__(self, session, max_retries=3, retry_delay=2):
        self.session = session
        self.max_retries = max_retries
        self.retry_delay = retry_delay
    
    def call_api(self, service, function_name, region=None, parameters=None):
        """Call AWS API with retry logic."""
        client = self.session.client(service, region_name=region)
        
        for attempt in range(self.max_retries):
            try:
                function_to_call = getattr(client, function_name)
                if parameters:
                    return function_to_call(**parameters)
                else:
                    return function_to_call()
            except botocore.exceptions.ClientError as error:
                error_code = error.response["Error"]["Code"]
                if error_code in ["Throttling", "RequestLimitExceeded"]:
                    if attempt < (self.max_retries - 1):
                        wait_time = self.retry_delay ** attempt
                        time.sleep(wait_time)
                        continue
                raise
            except botocore.exceptions.BotoCoreError:
                if attempt < (self.max_retries - 1):
                    wait_time = self.retry_delay ** attempt
                    time.sleep(wait_time)
                    continue
                raise
        
        return None
```

#### 3. Output Processor

The output processor will support both JSON and Excel formats:

```python
# aws_auto_inventory/output/processor.py
from .json_generator import JSONOutputGenerator
from .excel_generator import ExcelOutputGenerator

class OutputProcessor:
    def __init__(self):
        self.json_generator = JSONOutputGenerator()
        self.excel_generator = ExcelOutputGenerator()
    
    def process(self, scan_result, output_dir, formats=None):
        """Process scan results and generate output files."""
        if formats is None:
            formats = ["json"]  # Default to JSON
        
        if "json" in formats:
            self.json_generator.generate(scan_result, output_dir)
        
        if "excel" in formats:
            self.excel_generator.generate(scan_result, output_dir)
```

Excel output generator:

```python
# aws_auto_inventory/output/excel_generator.py
import os
import pandas as pd
from .transformer import DataTransformer

class ExcelOutputGenerator:
    def __init__(self):
        self.transformer = DataTransformer()
    
    def generate(self, scan_result, output_dir):
        """Generate Excel output from scan results."""
        os.makedirs(output_dir, exist_ok=True)
        
        for inventory_result in scan_result:
            inventory_name = inventory_result["inventory_name"]
            excel_path = os.path.join(output_dir, f"{inventory_name}.xlsx")
            
            with pd.ExcelWriter(excel_path) as writer:
                if "organization_results" in inventory_result:
                    self._process_organization_results(
                        inventory_result["organization_results"], writer
                    )
                elif "account_results" in inventory_result:
                    self._process_account_results(
                        inventory_result["account_results"], writer
                    )
    
    def _process_organization_results(self, org_results, writer):
        """Process organization results and write to Excel."""
        for account_result in org_results:
            account_id = account_result["account_id"]
            for region_result in account_result["regions"]:
                region = region_result["region"]
                for service_result in region_result["services"]:
                    sheet_name = f"{account_id}_{region}_{service_result['service']}"
                    # Truncate sheet name if too long (Excel limitation)
                    if len(sheet_name) > 31:
                        sheet_name = sheet_name[:31]
                    
                    df = pd.json_normalize(service_result["result"])
                    df.to_excel(writer, sheet_name=sheet_name)
    
    def _process_account_results(self, account_results, writer):
        """Process account results and write to Excel."""
        for region_result in account_results:
            region = region_result["region"]
            for service_result in region_result["services"]:
                sheet_name = f"{region}_{service_result['service']}"
                # Truncate sheet name if too long (Excel limitation)
                if len(sheet_name) > 31:
                    sheet_name = sheet_name[:31]
                
                df = pd.json_normalize(service_result["result"])
                df.to_excel(writer, sheet_name=sheet_name)
```

#### 4. Command-Line Interface

The CLI will provide a unified interface for all features:

```python
# aws_auto_inventory/cli.py
import argparse
import os
import sys
from .config.loader import ConfigLoader
from .core.scan_engine import ScanEngine
from .output.processor import OutputProcessor

def main():
    parser = argparse.ArgumentParser(
        description="AWS Auto Inventory - Scan AWS resources and generate inventory"
    )
    parser.add_argument(
        "-c", "--config", required=True,
        help="Path to configuration file (YAML or JSON)"
    )
    parser.add_argument(
        "-o", "--output-dir", default="output",
        help="Directory to store output files"
    )
    parser.add_argument(
        "-f", "--format", choices=["json", "excel", "both"], default="json",
        help="Output format (default: json)"
    )
    parser.add_argument(
        "--max-regions", type=int, default=None,
        help="Maximum number of regions to scan concurrently"
    )
    parser.add_argument(
        "--max-services", type=int, default=None,
        help="Maximum number of services to scan concurrently per region"
    )
    parser.add_argument(
        "--log-level", choices=["DEBUG", "INFO", "WARNING", "ERROR"], default="INFO",
        help="Logging level"
    )
    
    args = parser.parse_args()
    
    # Set up logging
    import logging
    logging.basicConfig(
        level=getattr(logging, args.log_level),
        format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
    )
    
    # Load configuration
    config_loader = ConfigLoader()
    try:
        config = config_loader.load_config(args.config)
    except Exception as e:
        print(f"Error loading configuration: {e}")
        sys.exit(1)
    
    # Create output directory
    os.makedirs(args.output_dir, exist_ok=True)
    
    # Determine output formats
    formats = []
    if args.format in ["json", "both"]:
        formats.append("json")
    if args.format in ["excel", "both"]:
        formats.append("excel")
    
    # Run scan
    scan_engine = ScanEngine(
        max_workers_regions=args.max_regions,
        max_workers_services=args.max_services
    )
    
    try:
        results = scan_engine.scan(config)
    except Exception as e:
        print(f"Error during scan: {e}")
        sys.exit(1)
    
    # Process output
    output_processor = OutputProcessor()
    output_processor.process(results, args.output_dir, formats)
    
    print(f"Inventory completed successfully. Results stored in {args.output_dir}")

if __name__ == "__main__":
    main()
```

### Configuration Examples

#### YAML Configuration Example

```yaml
# Example YAML configuration
inventories:
  - name: my-aws-inventory
    aws:
      profile: default
      region:
        - us-east-1
        - us-west-2
      organization: false
    excel:
      transpose: true
    sheets:
      - name: EC2Instances
        service: ec2
        function: describe_instances
        result_key: Reservations
      - name: S3Buckets
        service: s3
        function: list_buckets
        result_key: Buckets
      - name: IAMRoles
        service: iam
        function: list_roles
        result_key: Roles
```

#### JSON Configuration Example

```json
{
  "inventories": [
    {
      "name": "my-aws-inventory",
      "aws": {
        "profile": "default",
        "region": ["us-east-1", "us-west-2"],
        "organization": false
      },
      "excel": {
        "transpose": true
      },
      "sheets": [
        {
          "name": "EC2Instances",
          "service": "ec2",
          "function": "describe_instances",
          "result_key": "Reservations"
        },
        {
          "name": "S3Buckets",
          "service": "s3",
          "function": "list_buckets",
          "result_key": "Buckets"
        },
        {
          "name": "IAMRoles",
          "service": "iam",
          "function": "list_roles",
          "result_key": "Roles"
        }
      ]
    }
  ]
}
```

## Potential Challenges and Mitigation Strategies

### 1. Backward Compatibility

**Challenge**: Ensuring the new unified implementation remains compatible with existing configurations and workflows.

**Mitigation Strategies**:
- Implement configuration adapters that can convert between old and new formats
- Provide clear migration guides and examples
- Include backward compatibility layers that can process legacy configurations
- Add deprecation warnings for legacy features that will be removed in future versions
- Create automated migration tools to help users transition

**Implementation Example**:
```python
def detect_legacy_config(config_path):
    """Detect if a configuration file is in legacy format."""
    with open(config_path, 'r') as f:
        try:
            if config_path.endswith('.json'):
                config = json.load(f)
                # Check for legacy JSON format indicators
                return 'service' in config[0] and 'function' in config[0]
            elif config_path.endswith('.yaml') or config_path.endswith('.yml'):
                config = yaml.safe_load(f)
                # Check for legacy YAML format indicators
                return 'inventories' not in config and 'sheets' in config
        except:
            return False
    return False

def convert_legacy_config(config_path, output_path=None):
    """Convert legacy configuration to new format."""
    if output_path is None:
        base, ext = os.path.splitext(config_path)
        output_path = f"{base}_converted{ext}"
    
    # Implementation of conversion logic
    # ...
    
    return output_path
```

### 2. Performance Considerations

**Challenge**: Maintaining or improving performance while adding new features, especially for large AWS environments.

**Mitigation Strategies**:
- Implement efficient multi-threading with configurable thread pools
- Use connection pooling for AWS API calls
- Add caching mechanisms for frequently accessed data
- Implement pagination for large result sets
- Allow selective scanning of specific services/regions
- Add progress reporting for long-running operations
- Implement incremental scanning options

**Implementation Example**:
```python
class CachingAWSClient:
    def __init__(self, session, cache_ttl=300):
        self.session = session
        self.cache = {}
        self.cache_ttl = cache_ttl
        self.cache_timestamps = {}
    
    def call_api(self, service, function_name, region=None, parameters=None):
        """Call AWS API with caching."""
        cache_key = f"{service}:{function_name}:{region}:{json.dumps(parameters)}"
        
        # Check cache
        current_time = time.time()
        if cache_key in self.cache:
            if current_time - self.cache_timestamps[cache_key] < self.cache_ttl:
                return self.cache[cache_key]
        
        # Call API
        result = self._make_api_call(service, function_name, region, parameters)
        
        # Update cache
        self.cache[cache_key] = result
        self.cache_timestamps[cache_key] = current_time
        
        return result
    
    def _make_api_call(self, service, function_name, region, parameters):
        # Implementation of API call with retry logic
        # ...
```

### 3. Error Handling and Edge Cases

**Challenge**: Robust error handling for various AWS API errors, rate limiting, and edge cases.

**Mitigation Strategies**:
- Implement comprehensive error handling with specific error types
- Add detailed logging for troubleshooting
- Implement graceful degradation for non-critical failures
- Add retry mechanisms with exponential backoff
- Provide clear error messages and suggestions
- Implement validation for all inputs and configurations

**Implementation Example**:
```python
class AWSInventoryError(Exception):
    """Base exception for AWS Auto Inventory."""
    pass

class ConfigurationError(AWSInventoryError):
    """Error in configuration."""
    pass

class AWSAPIError(AWSInventoryError):
    """Error in AWS API call."""
    def __init__(self, service, function, error):
        self.service = service
        self.function = function
        self.error = error
        super().__init__(f"Error calling {service}.{function}: {error}")

class ThrottlingError(AWSAPIError):
    """AWS API throttling error."""
    def __init__(self, service, function, retry_after=None):
        self.retry_after = retry_after
        super().__init__(service, function, "API throttling")

# Error handling in API client
def call_api_with_error_handling(client, service, function, parameters=None):
    try:
        # Make API call
        # ...
    except botocore.exceptions.ClientError as e:
        error_code = e.response.get("Error", {}).get("Code", "")
        if error_code == "Throttling":
            retry_after = int(e.response.get("ResponseMetadata", {}).get("RetryAfter", 1))
            raise ThrottlingError(service, function, retry_after)
        elif error_code == "AccessDenied":
            raise PermissionError(f"Access denied for {service}.{function}")
        else:
            raise AWSAPIError(service, function, str(e))
    except Exception as e:
        raise AWSAPIError(service, function, str(e))
```

### 4. Testing Strategy

**Challenge**: Ensuring comprehensive testing of the unified implementation across different AWS environments.

**Mitigation Strategies**:
- Implement unit tests for all components
- Use mocking for AWS services in tests
- Implement integration tests for key workflows
- Create test fixtures for different configuration scenarios
- Implement CI/CD pipelines for automated testing
- Add property-based testing for edge cases
- Create a test matrix for different Python versions and dependencies

**Implementation Example**:
```python
# tests/test_config/test_loader.py
import pytest
import tempfile
import os
from aws_auto_inventory.config.loader import ConfigLoader

@pytest.fixture
def yaml_config_file():
    with tempfile.NamedTemporaryFile(suffix='.yaml', delete=False) as f:
        f.write(b"""
inventories:
  - name: test-inventory
    aws:
      region:
        - us-east-1
    sheets:
      - name: EC2
        service: ec2
        function: describe_instances
""")
    yield f.name
    os.unlink(f.name)

@pytest.fixture
def json_config_file():
    with tempfile.NamedTemporaryFile(suffix='.json', delete=False) as f:
        f.write(b"""
{
  "inventories": [
    {
      "name": "test-inventory",
      "aws": {
        "region": ["us-east-1"]
      },
      "sheets": [
        {
          "name": "EC2",
          "service": "ec2",
          "function": "describe_instances"
        }
      ]
    }
  ]
}
""")
    yield f.name
    os.unlink(f.name)

def test_load_yaml_config(yaml_config_file):
    loader = ConfigLoader()
    config = loader.load_config(yaml_config_file)
    assert config.inventories[0].name == "test-inventory"
    assert config.inventories[0].aws.region == ["us-east-1"]
    assert config.inventories[0].sheets[0].name == "EC2"

def test_load_json_config(json_config_file):
    loader = ConfigLoader()
    config = loader.load_config(json_config_file)
    assert config.inventories[0].name == "test-inventory"
    assert config.inventories[0].aws.region == ["us-east-1"]
    assert config.inventories[0].sheets[0].name == "EC2"
```

### 5

================================================
FILE: aws_auto_inventory/__init__.py
================================================
"""
AWS Auto Inventory - Scan AWS resources and generate inventory.

A tool for scanning AWS services across regions and accounts to collect resource information.
"""

__version__ = '0.1.0'

================================================
FILE: aws_auto_inventory/cli.py
================================================
"""
Command-line interface for AWS Auto Inventory.
"""
import os
import sys
import argparse
import logging
from typing import List, Optional

import boto3

from .config.loader import ConfigLoader
from .config.validator import ConfigValidator
from .core.scan_engine import ScanEngine
from .output.processor import OutputProcessor
from .utils.logging import setup_logging


def check_aws_credentials(profile_name: Optional[str] = None) -> bool:
    """
    Check AWS credentials by calling the STS GetCallerIdentity operation.
    
    Args:
        profile_name: AWS profile name.
        
    Returns:
        True if credentials are valid, False otherwise.
    """
    try:
        session = boto3.Session(profile_name=profile_name)
        sts = session.client("sts")
        identity = sts.get_caller_identity()
        print(f"Authenticated as: {identity['Arn']}")
        return True
    except Exception as e:
        print(f"Error verifying AWS credentials: {e}")
        return False


def parse_args() -> argparse.Namespace:
    """
    Parse command-line arguments.
    
    Returns:
        Parsed arguments.
    """
    parser = argparse.ArgumentParser(
        description="AWS Auto Inventory - Scan AWS resources and generate inventory"
    )
    
    parser.add_argument(
        "-c", "--config", required=True,
        help="Path to configuration file (YAML or JSON)"
    )
    
    parser.add_argument(
        "-o", "--output-dir", default="output",
        help="Directory to store output files (default: output)"
    )
    
    parser.add_argument(
        "-f", "--format", choices=["json", "excel", "both"], default="json",
        help="Output format (default: json)"
    )
    
    parser.add_argument(
        "--max-regions", type=int, default=None,
        help="Maximum number of regions to scan concurrently"
    )
    
    parser.add_argument(
        "--max-services", type=int, default=None,
        help="Maximum number of services to scan concurrently per region"
    )
    
    parser.add_argument(
        "--max-retries", type=int, default=3,
        help="Maximum number of retries for API calls (default: 3)"
    )
    
    parser.add_argument(
        "--retry-delay", type=int, default=2,
        help="Base delay in seconds between retries (default: 2)"
    )
    
    parser.add_argument(
        "--log-level", choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
        default="INFO", help="Logging level (default: INFO)"
    )
    
    parser.add_argument(
        "--validate-only", action="store_true",
        help="Validate configuration and exit without scanning"
    )
    
    return parser.parse_args()


def main() -> int:
    """
    Main entry point for AWS Auto Inventory.
    
    Returns:
        Exit code (0 for success, non-zero for error).
    """
    # Parse command-line arguments
    args = parse_args()
    
    # Set up logging
    log_dir = os.path.join(args.output_dir, "logs")
    logger = setup_logging(log_dir, args.log_level)
    
    try:
        # Load configuration
        logger.info(f"Loading configuration from {args.config}")
        config_loader = ConfigLoader()
        try:
            config = config_loader.load_config(args.config)
        except Exception as e:
            logger.error(f"Error loading configuration: {e}")
            print(f"Error loading configuration: {e}")
            return 1
        
        # Validate configuration
        logger.info("Validating configuration")
        validator = ConfigValidator()
        validation_errors = validator.validate(config)
        
        if validation_errors:
            logger.error("Configuration validation failed:")
            for error in validation_errors:
                logger.error(f"  - {error}")
                print(f"Configuration error: {error}")
            
            return 1
        
        if args.validate_only:
            logger.info("Configuration validation successful")
            print("Configuration validation successful")
            return 0
        
        # Check AWS credentials
        for inventory in config.inventories:
            if not check_aws_credentials(inventory.aws.profile):
                logger.error(f"Invalid AWS credentials for inventory {inventory.name}")
                print(f"Invalid AWS credentials for inventory {inventory.name}")
                return 1
        
        # Determine output formats
        formats = []
        if args.format in ["json", "both"]:
            formats.append("json")
        if args.format in ["excel", "both"]:
            formats.append("excel")
        
        # Create scan engine
        scan_engine = ScanEngine(
            max_retries=args.max_retries,
            retry_delay=args.retry_delay,
            max_workers_regions=args.max_regions,
            max_workers_services=args.max_services
        )
        
        # Run scan
        logger.info("Starting scan")
        try:
            results = scan_engine.scan(config)
        except Exception as e:
            logger.error(f"Error during scan: {e}")
            print(f"Error during scan: {e}")
            return 1
        
        # Process output
        logger.info("Processing output")
        output_processor = OutputProcessor()
        output_processor.process(results, args.output_dir, formats)
        
        logger.info("Scan completed successfully")
        print(f"Scan completed successfully. Results stored in {args.output_dir}")
        
        return 0
    
    except Exception as e:
        logger.error(f"Unexpected error: {e}")
        print(f"Unexpected error: {e}")
        return 1


if __name__ == "__main__":
    sys.exit(main())

================================================
FILE: aws_auto_inventory/config/__init__.py
================================================


================================================
FILE: aws_auto_inventory/config/loader.py
================================================
"""
Configuration loader for AWS Auto Inventory.
"""
import os
import json
import yaml
from typing import Union, Dict, Any

from .models import Config


class ConfigLoader:
    """
    Configuration loader that supports both YAML and JSON formats.
    """
    
    def load_config(self, path: str) -> Config:
        """
        Load configuration from file.
        
        Args:
            path: Path to the configuration file.
            
        Returns:
            Config object.
            
        Raises:
            FileNotFoundError: If the configuration file does not exist.
            ValueError: If the configuration file format is not supported.
        """
        if not os.path.exists(path):
            raise FileNotFoundError(f"Configuration file not found: {path}")
        
        format_type = self._detect_format(path)
        
        with open(path, 'r') as f:
            if format_type == 'yaml':
                config_data = yaml.safe_load(f)
            elif format_type == 'json':
                config_data = json.load(f)
            else:
                raise ValueError(f"Unsupported configuration format: {format_type}")
        
        # Handle legacy format if needed
        if self._is_legacy_format(config_data):
            config_data = self._convert_legacy_format(config_data)
        
        return Config.from_dict(config_data)
    
    def _detect_format(self, path: str) -> str:
        """
        Detect file format based on extension.
        
        Args:
            path: Path to the configuration file.
            
        Returns:
            Format type ('yaml' or 'json').
        """
        _, ext = os.path.splitext(path)
        if ext.lower() in ['.yaml', '.yml']:
            return 'yaml'
        elif ext.lower() == '.json':
            return 'json'
        else:
            # Default to JSON if extension is not recognized
            return 'json'
    
    def _is_legacy_format(self, config_data: Union[Dict[str, Any], list]) -> bool:
        """
        Check if the configuration is in legacy format.
        
        Args:
            config_data: Configuration data.
            
        Returns:
            True if the configuration is in legacy format, False otherwise.
        """
        # Legacy JSON format is a list of dictionaries with 'service' and 'function' keys
        if isinstance(config_data, list) and len(config_data) > 0:
            first_item = config_data[0]
            return isinstance(first_item, dict) and 'service' in first_item and 'function' in first_item
        
        # Legacy YAML format has 'sheets' at the top level but no 'inventories'
        if isinstance(config_data, dict):
            return 'inventories' not in config_data and 'sheets' in config_data
        
        return False
    
    def _convert_legacy_format(self, config_data: Union[Dict[str, Any], list]) -> Dict[str, Any]:
        """
        Convert legacy format to new format.
        
        Args:
            config_data: Configuration data in legacy format.
            
        Returns:
            Configuration data in new format.
        """
        if isinstance(config_data, list):
            # Convert legacy JSON format
            return {
                "inventories": [
                    {
                        "name": "default",
                        "aws": {},
                        "sheets": [
                            {
                                "name": f"{item['service']}_{item['function']}",
                                "service": item['service'],
                                "function": item['function'],
                                "result_key": item.get('result_key'),
                                "parameters": item.get('parameters', {})
                            }
                            for item in config_data
                        ]
                    }
                ]
            }
        elif isinstance(config_data, dict):
            # Convert legacy YAML format
            return {
                "inventories": [
                    {
                        "name": config_data.get('name', 'default'),
                        "aws": config_data.get('aws', {}),
                        "sheets": config_data.get('sheets', []),
                        "excel": config_data.get('excel', {})
                    }
                ]
            }
        
        # Return as is if we can't convert
        return config_data

================================================
FILE: aws_auto_inventory/config/models.py
================================================
"""
Configuration models for AWS Auto Inventory.
"""
from typing import List, Dict, Optional, Any, Union
from pydantic import BaseModel, Field


class ExcelConfig(BaseModel):
    """Excel output configuration."""
    transpose: bool = False
    formatting: Dict[str, Any] = Field(default_factory=dict)


class AWSConfig(BaseModel):
    """AWS configuration."""
    profile: Optional[str] = None
    region: List[str] = Field(default_factory=lambda: ["us-east-1"])
    organization: bool = False
    role_name: str = "OrganizationAccountAccessRole"


class Sheet(BaseModel):
    """Sheet configuration for inventory."""
    name: str
    service: str
    function: str
    result_key: Optional[str] = None
    parameters: Dict[str, Any] = Field(default_factory=dict)


class Inventory(BaseModel):
    """Inventory configuration."""
    name: str
    aws: AWSConfig = Field(default_factory=AWSConfig)
    sheets: List[Sheet]
    excel: ExcelConfig = Field(default_factory=ExcelConfig)


class Config(BaseModel):
    """Main configuration model."""
    inventories: List[Inventory]
    
    def to_json(self):
        """Convert config to JSON string."""
        return self.json(indent=2)
    
    def to_yaml(self):
        """Convert config to YAML string."""
        import yaml
        return yaml.dump(self.dict(), sort_keys=False)
    
    @classmethod
    def from_dict(cls, data):
        """Create config from dictionary."""
        return cls(**data)

================================================
FILE: aws_auto_inventory/config/validator.py
================================================
"""
Configuration validator for AWS Auto Inventory.
"""
import boto3
from typing import List, Optional, Dict, Any

from .models import Config, Inventory, Sheet


class ConfigValidator:
    """
    Validates AWS Auto Inventory configurations.
    """
    
    def validate(self, config: Config) -> List[str]:
        """
        Validate a configuration.
        
        Args:
            config: Configuration to validate.
            
        Returns:
            List of validation errors. Empty list if configuration is valid.
        """
        errors = []
        
        # Check if there are any inventories
        if not config.inventories:
            errors.append("No inventories defined in configuration")
            return errors
        
        # Validate each inventory
        for inventory in config.inventories:
            inventory_errors = self._validate_inventory(inventory)
            errors.extend([f"Inventory '{inventory.name}': {error}" for error in inventory_errors])
        
        return errors
    
    def _validate_inventory(self, inventory: Inventory) -> List[str]:
        """
        Validate an inventory configuration.
        
        Args:
            inventory: Inventory to validate.
            
        Returns:
            List of validation errors. Empty list if inventory is valid.
        """
        errors = []
        
        # Check if there are any sheets
        if not inventory.sheets:
            errors.append("No sheets defined")
            return errors
        
        # Validate AWS configuration
        aws_errors = self._validate_aws_config(inventory)
        errors.extend(aws_errors)
        
        # Validate each sheet
        for sheet in inventory.sheets:
            sheet_errors = self._validate_sheet(sheet)
            errors.extend([f"Sheet '{sheet.name}': {error}" for error in sheet_errors])
        
        return errors
    
    def _validate_aws_config(self, inventory: Inventory) -> List[str]:
        """
        Validate AWS configuration.
        
        Args:
            inventory: Inventory containing AWS configuration.
            
        Returns:
            List of validation errors. Empty list if AWS configuration is valid.
        """
        errors = []
        
        # Check if regions are specified
        if not inventory.aws.region:
            errors.append("No regions specified")
        
        # Check if profile exists (if specified)
        if inventory.aws.profile:
            try:
                session = boto3.Session(profile_name=inventory.aws.profile)
                # Try to get caller identity to verify credentials
                sts = session.client('sts')
                sts.get_caller_identity()
            except Exception as e:
                errors.append(f"Invalid AWS profile '{inventory.aws.profile}': {str(e)}")
        
        return errors
    
    def _validate_sheet(self, sheet: Sheet) -> List[str]:
        """
        Validate a sheet configuration.
        
        Args:
            sheet: Sheet to validate.
            
        Returns:
            List of validation errors. Empty list if sheet is valid.
        """
        errors = []
        
        # Check required fields
        if not sheet.service:
            errors.append("No service specified")
        
        if not sheet.function:
            errors.append("No function specified")
        
        # Check if service and function exist in boto3
        try:
            session = boto3.Session()
            if sheet.service not in session.get_available_services():
                errors.append(f"Invalid AWS service: {sheet.service}")
            else:
                # Check if function exists
                client = session.client(sheet.service, region_name='us-east-1')
                if not hasattr(client, sheet.function):
                    errors.append(f"Function '{sheet.function}' does not exist for service '{sheet.service}'")
                elif not sheet.function.startswith(('describe_', 'get_', 'list_')):
                    errors.append(f"Function '{sheet.function}' is not a read-only operation")
        except Exception as e:
            errors.append(f"Error validating service and function: {str(e)}")
        
        return errors

================================================
FILE: aws_auto_inventory/core/__init__.py
================================================


================================================
FILE: aws_auto_inventory/core/aws_client.py
================================================
"""
AWS client with retry logic for AWS Auto Inventory.
"""
import time
import json
import logging
from typing import Optional, Dict, Any, Union

import boto3
import botocore
import jq

# Set up logger
logger = logging.getLogger(__name__)


class AWSClientError(Exception):
    """Base exception for AWS client errors."""
    pass


class ThrottlingError(AWSClientError):
    """Exception raised when AWS API throttling occurs."""
    def __init__(self, service: str, function: str, retry_after: Optional[int] = None):
        self.service = service
        self.function = function
        self.retry_after = retry_after
        super().__init__(f"API throttling for {service}.{function}")


class AWSClient:
    """
    AWS client with retry logic for API calls.
    """
    
    def __init__(self, session: boto3.Session, max_retries: int = 3, retry_delay: int = 2):
        """
        Initialize AWS client.
        
        Args:
            session: boto3 Session.
            max_retries: Maximum number of retries for API calls.
            retry_delay: Base delay (in seconds) between retries.
        """
        self.session = session
        self.max_retries = max_retries
        self.retry_delay = retry_delay
    
    def call_api(
        self, 
        service: str, 
        function_name: str, 
        region: Optional[str] = None, 
        parameters: Optional[Dict[str, Any]] = None,
        result_key: Optional[str] = None
    ) -> Any:
        """
        Call AWS API with retry logic.
        
        Args:
            service: AWS service name.
            function_name: API function name.
            region: AWS region.
            parameters: API parameters.
            result_key: Key to extract from the response.
            
        Returns:
            API response or extracted data if result_key is specified.
            
        Raises:
            AWSClientError: If the API call fails after all retries.
        """
        client = self.session.client(service, region_name=region)
        
        if not hasattr(client, function_name):
            raise AWSClientError(f"Function {function_name} does not exist for service {service}")
        
        function_to_call = getattr(client, function_name)
        
        for attempt in range(self.max_retries):
            try:
                if parameters:
                    response = function_to_call(**parameters)
                else:
                    response = function_to_call()
                
                # Process the response
                if result_key:
                    if result_key.startswith('.'):
                        # Use jq for complex queries
                        return jq.compile(result_key).input_value(json.loads(json.dumps(response, default=str))).all()
                    else:
                        # Simple key extraction
                        return response.get(result_key)
                else:
                    # Return full response with metadata removed
                    if isinstance(response, dict):
                        response.pop("ResponseMetadata", None)
                    return response
                
            except botocore.exceptions.ClientError as error:
                error_code = error.response["Error"]["Code"]
                if error_code in ["Throttling", "RequestLimitExceeded"]:
                    if attempt < (self.max_retries - 1):
                        wait_time = self.retry_delay ** attempt
                        logger.warning(
                            f"Throttling for {service}.{function_name}, retrying in {wait_time}s "
                            f"(attempt {attempt + 1}/{self.max_retries})"
                        )
                        time.sleep(wait_time)
                        continue
                    else:
                        raise ThrottlingError(service, function_name)
                else:
                    logger.error(f"AWS API error for {service}.{function_name}: {error}")
                    raise AWSClientError(f"AWS API error: {error}")
            except botocore.exceptions.BotoCoreError as error:
                if attempt < (self.max_retries - 1):
                    wait_time = self.retry_delay ** attempt
                    logger.warning(
                        f"BotoCore error for {service}.{function_name}, retrying in {wait_time}s "
                        f"(attempt {attempt + 1}/{self.max_retries})"
                    )
                    time.sleep(wait_time)
                    continue
                else:
                    logger.error(f"BotoCore error for {service}.{function_name}: {error}")
                    raise AWSClientError(f"BotoCore error: {error}")
            except Exception as error:
                logger.error(f"Unexpected error for {service}.{function_name}: {error}")
                raise AWSClientError(f"Unexpected error: {error}")
        
        # This should not be reached, but just in case
        raise AWSClientError(f"Failed to call {service}.{function_name} after {self.max_retries} attempts")

================================================
FILE: aws_auto_inventory/core/organization.py
================================================
"""
Organization scanner for AWS Auto Inventory.
"""
import logging
from typing import Dict, Any, List, Optional

import boto3

from ..config.models import Inventory
from .region import RegionScanner, RegionResult

# Set up logger
logger = logging.getLogger(__name__)


class AccountResult:
    """
    Result of an account scan.
    """
    
    def __init__(
        self, 
        account_id: str, 
        account_name: str, 
        regions: List[RegionResult],
        success: bool = True,
        error: Optional[str] = None
    ):
        """
        Initialize account result.
        
        Args:
            account_id: AWS account ID.
            account_name: AWS account name.
            regions: List of region scan results.
            success: Whether the scan was successful.
            error: Error message if scan failed.
        """
        self.account_id = account_id
        self.account_name = account_name
        self.regions = regions
        self.success = success
        self.error = error
    
    def to_dict(self) -> Dict[str, Any]:
        """
        Convert to dictionary.
        
        Returns:
            Dictionary representation of the account result.
        """
        return {
            "account_id": self.account_id,
            "account_name": self.account_name,
            "regions": [region.to_dict() for region in self.regions],
            "success": self.success,
            "error": self.error
        }


class OrganizationScanner:
    """
    Scanner for AWS organizations.
    """
    
    def __init__(self):
        """
        Initialize organization scanner.
        """
        pass
    
    def get_organization_accounts(self, session: boto3.Session) -> List[Dict[str, str]]:
        """
        Get all active accounts in the AWS Organization.
        
        Args:
            session: boto3 Session for the management account.
            
        Returns:
            List of dictionaries containing account information (id, name, email).
        """
        logger.info("Discovering accounts in the organization")
        
        org_client = session.client('organizations')
        accounts = []
        
        try:
            paginator = org_client.get_paginator('list_accounts')
            for page in paginator.paginate():
                for account in page['Accounts']:
                    if account['Status'] == 'ACTIVE':
                        accounts.append({
                            'id': account['Id'],
                            'name': account['Name'],
                            'email': account['Email']
                        })
            
            logger.info(f"Found {len(accounts)} active accounts in the organization")
        
        except Exception as e:
            logger.error(f"Error retrieving organization accounts: {str(e)}")
            return []
        
        return accounts
    
    def assume_role(
        self, 
        session: boto3.Session, 
        account_id: str, 
        role_name: str
    ) -> Optional[boto3.Session]:
        """
        Assume a role in the specified account.
        
        Args:
            session: boto3 Session for the management account.
            account_id: AWS account ID to assume the role in.
            role_name: Name of the IAM role to assume.
            
        Returns:
            New boto3 Session with the assumed role credentials, or None if the role assumption fails.
        """
        logger.info(f"Assuming role {role_name} in account {account_id}")
        
        sts_client = session.client('sts')
        role_arn = f'arn:aws:iam::{account_id}:role/{role_name}'
        
        try:
            response = sts_client.assume_role(
                RoleArn=role_arn,
                RoleSessionName='AWSAutoInventorySession',
                DurationSeconds=3600
            )
            
            credentials = response['Credentials']
            assumed_session = boto3.Session(
                aws_access_key_id=credentials['AccessKeyId'],
                aws_secret_access_key=credentials['SecretAccessKey'],
                aws_session_token=credentials['SessionToken']
            )
            
            logger.info(f"Successfully assumed role in account {account_id}")
            return assumed_session
        
        except Exception as e:
            logger.error(f"Failed to assume role in account {account_id}: {str(e)}")
            return None
    
    def scan_organization(
        self, 
        inventory: Inventory, 
        region_scanner: RegionScanner
    ) -> List[AccountResult]:
        """
        Scan resources across all accounts in an organization.
        
        Args:
            inventory: Inventory configuration.
            region_scanner: Region scanner to use for scanning regions.
            
        Returns:
            List of account scan results.
        """
        logger.info("Starting organization scan")
        
        # Get the management account session
        management_session = boto3.Session(profile_name=inventory.aws.profile)
        
        # Get all accounts in the organization
        accounts = self.get_organization_accounts(management_session)
        
        if not accounts:
            logger.warning("No accounts found in the organization")
            return []
        
        account_results = []
        
        # Scan each account
        for account in accounts:
            account_id = account['id']
            account_name = account['name']
            
            logger.info(f"Processing account: {account_name} ({account_id})")
            
            # Assume role in the account
            account_session = self.assume_role(
                management_session, 
                account_id, 
                inventory.aws.role_name
            )
            
            if account_session:
                # Scan regions in the account
                region_results = []
                
                for region in inventory.aws.region:
                    try:
                        region_result = region_scanner.scan_region(
                            inventory, 
                            account_session, 
                            region
                        )
                        region_results.append(region_result)
                    except Exception as e:
                        logger.error(f"Error scanning region {region} in account {account_id}: {str(e)}")
                
                account_results.append(
                    AccountResult(
                        account_id=account_id,
                        account_name=account_name,
                        regions=region_results
                    )
                )
            else:
                account_results.append(
                    AccountResult(
                        account_id=account_id,
                        account_name=account_name,
                        regions=[],
                        success=False,
                        error=f"Failed to assume role in account {account_id}"
                    )
                )
        
        logger.info("Completed organization scan")
        
        return account_results

================================================
FILE: aws_auto_inventory/core/region.py
================================================
"""
Region scanner for AWS Auto Inventory.
"""
import logging
import concurrent.futures
from typing import Dict, Any, List, Optional

import boto3

from ..config.models import Inventory, Sheet
from .service import ServiceScanner, ServiceResult

# Set up logger
logger = logging.getLogger(__name__)


class RegionResult:
    """
    Result of a region scan.
    """
    
    def __init__(self, region: str, services: List[ServiceResult]):
        """
        Initialize region result.
        
        Args:
            region: AWS region.
            services: List of service scan results.
        """
        self.region = region
        self.services = services
    
    def to_dict(self) -> Dict[str, Any]:
        """
        Convert to dictionary.
        
        Returns:
            Dictionary representation of the region result.
        """
        return {
            "region": self.region,
            "services": [service.to_dict() for service in self.services]
        }


class RegionScanner:
    """
    Scanner for AWS regions.
    """
    
    def __init__(
        self, 
        max_retries: int = 3, 
        retry_delay: int = 2, 
        max_workers: Optional[int] = None
    ):
        """
        Initialize region scanner.
        
        Args:
            max_retries: Maximum number of retries for API calls.
            retry_delay: Base delay (in seconds) between retries.
            max_workers: Maximum number of worker threads for concurrent service scanning.
        """
        self.max_retries = max_retries
        self.retry_delay = retry_delay
        self.max_workers = max_workers
        self.service_scanner = ServiceScanner(max_retries, retry_delay)
    
    def scan_region(
        self, 
        inventory: Inventory, 
        session: boto3.Session, 
        region: str
    ) -> RegionResult:
        """
        Scan all services in a region.
        
        Args:
            inventory: Inventory configuration.
            session: boto3 Session.
            region: AWS region.
            
        Returns:
            Region scan result.
        """
        logger.info(f"Scanning region {region}")
        
        services_results = []
        
        # Use ThreadPoolExecutor for concurrent service scanning
        with concurrent.futures.ThreadPoolExecutor(max_workers=self.max_workers) as executor:
            # Create a future for each service
            future_to_sheet = {
                executor.submit(
                    self.service_scanner.scan_service,
                    sheet,
                    session,
                    region
                ): sheet
                for sheet in inventory.sheets
            }
            
            # Process completed futures
            for future in concurrent.futures.as_completed(future_to_sheet):
                sheet = future_to_sheet[future]
                try:
                    service_result = future.result()
                    services_results.append(service_result)
                    
                    if service_result.success:
                        logger.info(
                            f"Successfully scanned service {sheet.service} with function {sheet.function} in region {region}"
                        )
                    else:
                        logger.warning(
                            f"Failed to scan service {sheet.service} with function {sheet.function} in region {region}: {service_result.error}"
                        )
                
                except Exception as e:
                    logger.error(
                        f"Error processing service {sheet.service} with function {sheet.function} in region {region}: {str(e)}"
                    )
                    
                    services_results.append(
                        ServiceResult(
                            service=sheet.service,
                            function=sheet.function,
                            region=region,
                            result=None,
                            success=False,
                            error=f"Error processing service: {str(e)}"
                        )
                    )
        
        logger.info(f"Completed scanning region {region}")
        
        return RegionResult(region=region, services=services_results)

================================================
FILE: aws_auto_inventory/core/scan_engine.py
================================================
"""
Main scanning engine for AWS Auto Inventory.
"""
import logging
import concurrent.futures
from typing import Dict, Any, List, Optional, Union

import boto3

from ..config.models import Config, Inventory
from .organization import OrganizationScanner, AccountResult
from .region import RegionScanner, RegionResult

# Set up logger
logger = logging.getLogger(__name__)


class ScanResult:
    """
    Result of a scan.
    """
    
    def __init__(
        self, 
        inventory_name: str, 
        account_results: Optional[List[AccountResult]] = None,
        region_results: Optional[List[RegionResult]] = None
    ):
        """
        Initialize scan result.
        
        Args:
            inventory_name: Name of the inventory.
            account_results: List of account scan results (for organization scans).
            region_results: List of region scan results (for single account scans).
        """
        self.inventory_name = inventory_name
        self.account_results = account_results or []
        self.region_results = region_results or []
        self.is_organization_scan = account_results is not None
    
    def to_dict(self) -> Dict[str, Any]:
        """
        Convert to dictionary.
        
        Returns:
            Dictionary representation of the scan result.
        """
        result = {
            "inventory_name": self.inventory_name,
        }
        
        if self.is_organization_scan:
            result["organization_results"] = [
                account.to_dict() for account in self.account_results
            ]
        else:
            result["account_results"] = [
                region.to_dict() for region in self.region_results
            ]
        
        return result


class ScanEngine:
    """
    Main scanning engine for AWS Auto Inventory.
    """
    
    def __init__(
        self, 
        max_retries: int = 3, 
        retry_delay: int = 2,
        max_workers_regions: Optional[int] = None,
        max_workers_services: Optional[int] = None
    ):
        """
        Initialize scan engine.
        
        Args:
            max_retries: Maximum number of retries for API calls.
            retry_delay: Base delay (in seconds) between retries.
            max_workers_regions: Maximum number of worker threads for concurrent region scanning.
            max_workers_services: Maximum number of worker threads for concurrent service scanning.
        """
        self.max_retries = max_retries
        self.retry_delay = retry_delay
        self.max_workers_regions = max_workers_regions
        self.max_workers_services = max_workers_services
        
        self.organization_scanner = OrganizationScanner()
        self.region_scanner = RegionScanner(
            max_retries=max_retries,
            retry_delay=retry_delay,
            max_workers=max_workers_services
        )
    
    def scan(self, config: Config) -> List[ScanResult]:
        """
        Perform scanning based on configuration.
        
        Args:
            config: Configuration to use for scanning.
            
        Returns:
            List of scan results, one for each inventory in the configuration.
        """
        results = []
        
        for inventory in config.inventories:
            logger.info(f"Starting scan for inventory: {inventory.name}")
            
            if inventory.aws.organization:
                # Scan across organization
                result = self._scan_organization(inventory)
            else:
                # Scan single account
                result = self._scan_account(inventory)
            
            results.append(result)
            logger.info(f"Completed scan for inventory: {inventory.name}")
        
        return results
    
    def _scan_organization(self, inventory: Inventory) -> ScanResult:
        """
        Scan across an organization.
        
        Args:
            inventory: Inventory configuration.
            
        Returns:
            Scan result.
        """
        logger.info(f"Starting organization scan for inventory: {inventory.name}")
        
        account_results = self.organization_scanner.scan_organization(
            inventory, 
            self.region_scanner
        )
        
        logger.info(f"Completed organization scan for inventory: {inventory.name}")
        
        return ScanResult(
            inventory_name=inventory.name,
            account_results=account_results
        )
    
    def _scan_account(self, inventory: Inventory) -> ScanResult:
        """
        Scan a single account.
        
        Args:
            inventory: Inventory configuration.
            
        Returns:
            Scan result.
        """
        logger.info(f"Starting account scan for inventory: {inventory.name}")
        
        # Create session
        session = boto3.Session(profile_name=inventory.aws.profile)
        
        # Scan regions concurrently
        region_results = []
        
        with concurrent.futures.ThreadPoolExecutor(
            max_workers=self.max_workers_regions
        ) as executor:
            # Create a future for each region
            future_to_region = {
                executor.submit(
                    self.region_scanner.scan_region,
                    inventory,
                    session,
                    region
                ): region
                for region in inventory.aws.region
            }
            
            # Process completed futures
            for future in concurrent.futures.as_completed(future_to_region):
                region = future_to_region[future]
                try:
                    region_result = future.result()
                    region_results.append(region_result)
                    logger.info(f"Successfully scanned region {region}")
                except Exception as e:
                    logger.error(f"Error scanning region {region}: {str(e)}")
        
        logger.info(f"Completed account scan for inventory: {inventory.name}")
        
        return ScanResult(
            inventory_name=inventory.name,
            region_results=region_results
        )

================================================
FILE: aws_auto_inventory/core/service.py
================================================
"""
Service scanner for AWS Auto Inventory.
"""
import logging
from typing import Dict, Any, List, Optional

import boto3

from ..config.models import Sheet
from .aws_client import AWSClient, AWSClientError

# Set up logger
logger = logging.getLogger(__name__)


class ServiceResult:
    """
    Result of a service scan.
    """
    
    def __init__(
        self, 
        service: str, 
        function: str, 
        region: str, 
        result: Any, 
        success: bool = True, 
        error: Optional[str] = None
    ):
        """
        Initialize service result.
        
        Args:
            service: AWS service name.
            function: API function name.
            region: AWS region.
            result: API response.
            success: Whether the scan was successful.
            error: Error message if scan failed.
        """
        self.service = service
        self.function = function
        self.region = region
        self.result = result
        self.success = success
        self.error = error
    
    def to_dict(self) -> Dict[str, Any]:
        """
        Convert to dictionary.
        
        Returns:
            Dictionary representation of the service result.
        """
        return {
            "service": self.service,
            "function": self.function,
            "region": self.region,
            "result": self.result,
            "success": self.success,
            "error": self.error
        }


class ServiceScanner:
    """
    Scanner for AWS services.
    """
    
    def __init__(self, max_retries: int = 3, retry_delay: int = 2):
        """
        Initialize service scanner.
        
        Args:
            max_retries: Maximum number of retries for API calls.
            retry_delay: Base delay (in seconds) between retries.
        """
        self.max_retries = max_retries
        self.retry_delay = retry_delay
    
    def scan_service(
        self, 
        sheet: Sheet, 
        session: boto3.Session, 
        region: str
    ) -> ServiceResult:
        """
        Scan a service in a region.
        
        Args:
            sheet: Sheet configuration.
            session: boto3 Session.
            region: AWS region.
            
        Returns:
            Service scan result.
        """
        logger.info(
            f"Scanning service {sheet.service} with function {sheet.function} in region {region}"
        )
        
        aws_client = AWSClient(session, self.max_retries, self.retry_delay)
        
        try:
            result = aws_client.call_api(
                sheet.service,
                sheet.function,
                region,
                sheet.parameters,
                sheet.result_key
            )
            
            logger.info(
                f"Successfully scanned service {sheet.service} with function {sheet.function} in region {region}"
            )
            
            return ServiceResult(
                service=sheet.service,
                function=sheet.function,
                region=region,
                result=result
            )
        
        except AWSClientError as e:
            logger.error(
                f"Error scanning service {sheet.service} with function {sheet.function} in region {region}: {str(e)}"
            )
            
            return ServiceResult(
                service=sheet.service,
                function=sheet.function,
                region=region,
                result=None,
                success=False,
                error=str(e)
            )
        
        except Exception as e:
            logger.error(
                f"Unexpected error scanning service {sheet.service} with function {sheet.function} in region {region}: {str(e)}"
            )
            
            return ServiceResult(
                service=sheet.service,
                function=sheet.function,
                region=region,
                result=None,
                success=False,
                error=f"Unexpected error: {str(e)}"
            )


class ResourceFilter:
    """
    Filter for AWS resources.
    """
    
    def apply_filters(self, results: Any, filters: Dict[str, Any]) -> Any:
        """
        Apply filters to results.
        
        Args:
            results: API results.
            filters: Filters to apply.
            
        Returns:
            Filtered results.
        """
        # This is a placeholder for more complex filtering logic
        # In a real implementation, this would apply JMESPath or similar filtering
        
        if not filters or not results:
            return results
        
        # For now, just return the results as is
        return results

================================================
FILE: aws_auto_inventory/utils/__init__.py
================================================


================================================
FILE: aws_auto_inventory/utils/logging.py
================================================
"""
Logging utilities for AWS Auto Inventory.
"""
import os
import logging
from datetime import datetime
from typing import Optional


def setup_logging(
    log_dir: str, 
    log_level: str = "INFO", 
    log_file_prefix: str = "aws_auto_inventory"
) -> logging.Logger:
    """
    Set up logging configuration.
    
    Args:
        log_dir: Directory to store log files.
        log_level: Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).
        log_file_prefix: Prefix for log file name.
        
    Returns:
        Configured logger.
    """
    # Create log directory if it doesn't exist
    os.makedirs(log_dir, exist_ok=True)
    
    # Create timestamp for log file
    timestamp = datetime.now().isoformat(timespec="minutes").replace(":", "-")
    log_filename = f"{log_file_prefix}_{timestamp}.log"
    log_file = os.path.join(log_dir, log_filename)
    
    # Convert log level string to logging level
    numeric_level = getattr(logging, log_level.upper(), logging.INFO)
    
    # Configure root logger
    logging.basicConfig(
        level=numeric_level,
        format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
        handlers=[
            logging.FileHandler(log_file),
            logging.StreamHandler()  # Also log to console
        ]
    )
    
    # Get logger for this module
    logger = logging.getLogger("aws_auto_inventory")
    logger.setLevel(numeric_level)
    
    logger.info(f"Logging initialized at level {log_level}")
    logger.info(f"Log file: {log_file}")
    
    return logger


def get_logger(name: Optional[str] = None) -> logging.Logger:
    """
    Get a logger with the specified name.
    
    Args:
        name: Logger name. If None, returns the root logger.
        
    Returns:
        Logger instance.
    """
    if name is None:
        return logging.getLogger("aws_auto_inventory")
    else:
        return logging.getLogger(f"aws_auto_inventory.{name}")

================================================
FILE: aws_auto_inventory/utils/threading.py
================================================
"""
Threading utilities for AWS Auto Inventory.
"""
import os
import logging
import concurrent.futures
from typing import List, Callable, TypeVar, Generic, Any, Dict, Optional

# Set up logger
logger = logging.getLogger(__name__)

# Type variables for generic functions
T = TypeVar('T')  # Input type
R = TypeVar('R')  # Result type


class ThreadingManager(Generic[T, R]):
    """
    Manager for concurrent execution of tasks.
    """
    
    def __init__(self, max_workers: Optional[int] = None):
        """
        Initialize threading manager.
        
        Args:
            max_workers: Maximum number of worker threads. If None, uses the default
                        from concurrent.futures.ThreadPoolExecutor.
        """
        self.max_workers = max_workers or min(32, os.cpu_count() * 5)
    
    def execute(
        self, 
        func: Callable[[T], R], 
        items: List[T]
    ) -> List[Dict[str, Any]]:
        """
        Execute a function concurrently for each item in a list.
        
        Args:
            func: Function to execute for each item.
            items: List of items to process.
            
        Returns:
            List of dictionaries containing the item, result, success flag, and error message.
        """
        results = []
        
        with concurrent.futures.ThreadPoolExecutor(max_workers=self.max_workers) as executor:
            # Create a future for each item
            future_to_item = {executor.submit(func, item): item for item in items}
            
            # Process completed futures
            for future in concurrent.futures.as_completed(future_to_item):
                item = future_to_item[future]
                try:
                    result = future.result()
                    results.append({
                        "item": item,
                        "result": result,
                        "success": True,
                        "error": None
                    })
                except Exception as e:
                    logger.error(f"Error processing item {item}: {str(e)}")
                    results.append({
                        "item": item,
                        "result": None,
                        "success": False,
                        "error": str(e)
                    })
        
        return results
    
    def execute_with_progress(
        self, 
        func: Callable[[T], R], 
        items: List[T], 
        progress_callback: Optional[Callable[[int, int], None]] = None
    ) -> List[Dict[str, Any]]:
        """
        Execute a function concurrently for each item in a list with progress reporting.
        
        Args:
            func: Function to execute for each item.
            items: List of items to process.
            progress_callback: Callback function to report progress. Takes two arguments:
                              completed_count and total_count.
            
        Returns:
            List of dictionaries containing the item, result, success flag, and error message.
        """
        results = []
        total_count = len(items)
        completed_count = 0
        
        with concurrent.futures.ThreadPoolExecutor(max_workers=self.max_workers) as executor:
            # Create a future for each item
            future_to_item = {executor.submit(func, item): item for item in items}
            
            # Process completed futures
            for future in concurrent.futures.as_completed(future_to_item):
                item = future_to_item[future]
                try:
                    result = future.result()
                    results.append({
                        "item": item,
                        "result": result,
                        "success": True,
                        "error": None
                    })
                except Exception as e:
                    logger.error(f"Error processing item {item}: {str(e)}")
                    results.append({
                        "item": item,
                        "result": None,
                        "success": False,
                        "error": str(e)
                    })
                
                completed_count += 1
                if progress_callback:
                    progress_callback(completed_count, total_count)
        
        return results

================================================
FILE: doc/habits.yaml
================================================
url: https://github.com/aws-samples/aws-auto-inventory
logo: doc/logo.png

# optional
# badges:
#   # optional
#   custom:
#     - title: lorem
#       icon: https://
#       url: https://
#   # optional
#   workflows:
#     - name: hygiene # optional, needs to match filename on .github/workflows/

screenshots:
  - label: ec2-inventory-result
    url: doc/screenshots/1.png
    caption: EC2 Inventory Result

title: AWS Automated Inventory

# required
description: |-
  Automates creation of detailed inventories from AWS resources.

  ### Problem
  Projects usually have several resources and fetching all the information about these resources manually is a very time-consuming task.
  This issue is intensified when the same project have multiple account and/or environments, e.g.: NonProd, QA and/or Prod.

  ### Solution
  Provide a simple way to fetch the required information and generate a spreadsheet.
  The information can be filtered, e.g. filter results by tag:x, vpc, subnets, etc.
  Additionally, inventories can be generated related to many services, which are collected and organized per sheet in the spreadsheet.

# optional
usage: |-
  ```
  aws-auto-inventory --help
  usage: aws-auto-inventory [-h] --name NAME
  Automates creation of detailed inventories from AWS resources.
  optional arguments:
    -h, --help            show this help message and exit
    --name NAME, -n NAME  inventory name
  ```

prerequisites:
  content: |-
    A list of things you need, or how to install them.
  references:
    - name: Python 3
      description: Python is a high-level, general-purpose programming language.
      url: https://www.python.org

installation: |-
  Download the binary under [releases](https://github.com/aws-samples/aws-auto-inventory/releases).

  You will need to create a `config.yaml` file in order to tell the tool how to generate your inventory, here are the default search paths for each platform:
  * OS X: `~/.config/aws-auto-inventory/config.yaml` or  `~/Library/Application Support/aws-auto-inventory/config.yaml`
  * Other Unix: `$XDG_CONFIG_HOME/aws-auto-inventory/config.yaml` or  `~/.config/aws-auto-inventory/config.yaml`
  * Windows: `%APPDATA%\aws-auto-inventory\config.yaml` where the `APPDATA` environment variable falls back to `%HOME%\AppData\Roaming\config.yaml` if undefined
  You can use the [config-sample](config-sample.yaml) as an example. A snippet can be found below:
  ```yaml
  inventories:
    - name: your-inventory-name
      aws: # optional
        profile: your-aws-profile # if not provided, the AWS environment variables will be used instead
        region: # if not provided, 'us-east-1' will be used as default region
          - us-east-1
      excel:
        transpose: true
      sheets:
        - name: EC2 # sheet name on Excel
          service: ec2 # the boto3 client of an AWS service
          function: describe_instances # the client method of the service defined above
          result_key: Reservations # [optional]: The first key of the response dict
        - name: EBS
          service: ec2
          function: describe_volumes
          result_key: Volumes
  ```
  If you are interested in building an inventory for multiple AWS Accounts
  (within your AWS organization) with the same sheets, you can use the
  [config-sample-for-organization](config-sample-for-organization.yaml) for simplicity.
  Code snippet:
  ```yaml
  Sheets: &sheets
    - name: CloudFrontDistros
      service: cloudfront
      function: list_distributions
      result_key: DistributionList
    - name: S3Buckets
      service: s3
      function: list_buckets
      result_key: Buckets
  inventories:
    - name: your-org-master
      aws:
        profile: your-org
        region:
          - us-east-1
      excel:
        transpose: true
      sheets: *sheets
    - name: your-org-account1
      aws:
        profile: your-org-account1
        region:
          - us-east-1
      excel:
        transpose: true
      sheets: *sheets
    - name: your-org-account2
      aws:
        profile: your-org-account2
        region:
          - us-east-1
      excel:
        transpose: true
      sheets: *sheets
  ```
  Then you need to run the auto-inventory script multiple times for your accounts as follows:
  ```shell
  ./dist/aws-auto-inventory --name your-org-master
  ./dist/aws-auto-inventory --name your-org-account1
  ./dist/aws-auto-inventory --name your-org-account1
  ```
  Now, download the binary according to your operating system and platform and execute it, informing which inventory you want to generate.
  The tool will create a folder `aws-auto-inventory-report`, in the current path, with the inventory report inside.

testing: |-
  AWS-Auto-Inventory uses [boto3](https://github.com/boto/boto3).
  You can use any service that contains any list or describe method to fetch information about your resources.
  ### Parameters
  You can use [boto3](https://github.com/boto/boto3) parameters to narrow down your search results.
  #### Filter by tag:Name
  ```
  sheets:
    - name: VPC
      service: ec2
      function: describe_vpcs
      result_key: Vpcs
      parameters:
        Filters:
          - Name: tag:Name
            Values:
              - my-vpc
  ```
  ### Filter by vpc-id
  ```
  sheets:
    - name: Subnets
      service: ec2
      function: describe_subnets
      result_key: Subnets
      parameters:
        Filters:
          - Name: vpc-id
            Values:
              - vpc-xxx
  ```
  ### Find a particular RDS instance
  ```
  sheets:
    - name: RDS
      service: rds
      function: describe_db_instances
      result_key: DBInstances
      parameters:
        DBInstanceIdentifier: the-name-of-my-rds-instance
  ```
  ### Find EC2 instances by a particular tag
  ```
  sheets:
    - name: EC2
      service: ec2
      function: describe_instances
      result_key: Reservations
      parameters:
        Filters:
          - Name: tag:ApplicationName
            Values:
              - my-application
  ```
  ### Find a particular IAM Role
  ```
  sheets:
    - name: IAM.Role
      service: iam
      function: get_role
      result_key: Role
      parameters:
        RoleName: my-role
  ```
    ### Development
  ```
  # Linux/MacOS:
  # clone the project and enter cloned directory
  make init build
  ./dist/aws-auto-inventory --name <your-inventory-name>
  ```

references:
  - name: AWS Code Habits
    url:  https://github.com/awslabs/aws-code-habits
    description: A library with Make targets, Ansible playbooks, Jinja templates (and more) designed to boost common software development tasks and enhance governance.

license: Apache License 2.0

copyright: Copyright Amazon, Inc. or its affiliates. All Rights Reserved.

trademark: no

anchors:
  - name: habits
    url: https://github.com/awslabs/aws-code-habits


================================================
FILE: examples/config_example.json
================================================
{
  "inventories": [
    {
      "name": "my-aws-inventory",
      "aws": {
        "profile": "default",
        "region": [
          "us-east-1",
          "us-west-2"
        ],
        "organization": false,
        "role_name": "OrganizationAccountAccessRole"
      },
      "excel": {
        "transpose": true,
        "formatting": {
          "header_style": {
            "bold": true,
            "bg_color": "#4F81BD",
            "font_color": "#FFFFFF"
          }
        }
      },
      "sheets": [
        {
          "name": "EC2Instances",
          "service": "ec2",
          "function": "describe_instances",
          "result_key": "Reservations",
          "parameters": {
            "Filters": [
              {
                "Name": "instance-state-name",
                "Values": [
                  "running"
                ]
              }
            ]
          }
        },
        {
          "name": "S3Buckets",
          "service": "s3",
          "function": "list_buckets",
          "result_key": "Buckets"
        },
        {
          "name": "IAMRoles",
          "service": "iam",
          "function": "list_roles",
          "result_key": "Roles"
        },
        {
          "name": "LambdaFunctions",
          "service": "lambda",
          "function": "list_functions",
          "result_key": "Functions"
        }
      ]
    }
  ]
}

================================================
FILE: examples/config_example.yaml
================================================
# AWS Auto Inventory - Example YAML Configuration

inventories:
  - name: my-aws-inventory
    aws:
      # AWS profile to use (optional, uses default credentials if not specified)
      profile: default
      
      # AWS regions to scan
      region:
        - us-east-1
        - us-west-2
      
      # Set to true to scan across all accounts in the organization
      organization: false
      
      # Role name to assume in each account (only used if organization is true)
      role_name: OrganizationAccountAccessRole
    
    # Excel output configuration
    excel:
      # Whether to transpose data in Excel output
      transpose: true
      
      # Additional formatting options
      formatting:
        header_style:
          bold: true
          bg_color: "#4F81BD"
          font_color: "#FFFFFF"
    
    # Sheets to include in the inventory
    sheets:
      # EC2 Instances
      - name: EC2Instances
        service: ec2
        function: describe_instances
        result_key: Reservations
        parameters:
          Filters:
            - Name: instance-state-name
              Values:
                - running
      
      # S3 Buckets
      - name: S3Buckets
        service: s3
        function: list_buckets
        result_key: Buckets
      
      # IAM Roles
      - name: IAMRoles
        service: iam
        function: list_roles
        result_key: Roles
      
      # Lambda Functions
      - name: LambdaFunctions
        service: lambda
        function: list_functions
        result_key: Functions

================================================
FILE: examples/config_organization_example.yaml
================================================
# AWS Auto Inventory - Example Organization-wide Scanning Configuration

# Define common sheets to reuse across inventories
Sheets: &sheets
  - name: EC2Instances
    service: ec2
    function: describe_instances
    result_key: Reservations
    parameters:
      Filters:
        - Name: instance-state-name
          Values:
            - running
  
  - name: S3Buckets
    service: s3
    function: list_buckets
    result_key: Buckets
  
  - name: IAMRoles
    service: iam
    function: list_roles
    result_key: Roles
  
  - name: LambdaFunctions
    service: lambda
    function: list_functions
    result_key: Functions

inventories:
  # Management account inventory
  - name: management-account
    aws:
      # AWS profile for the management account
      profile: management
      
      # AWS regions to scan
      region:
        - us-east-1
        - us-west-2
      
      # Not scanning across organization for the management account itself
      organization: false
    
    # Excel output configuration
    excel:
      transpose: true
    
    # Use the common sheets defined above
    sheets: *sheets
  
  # Organization-wide inventory
  - name: organization-wide
    aws:
      # AWS profile for the management account with organization access
      profile: management
      
      # AWS regions to scan
      region:
        - us-east-1
        - us-west-2
      
      # Enable organization scanning
      organization: true
      
      # Role name to assume in each account
      role_name: OrganizationAccountAccessRole
    
    # Excel output configuration
    excel:
      transpose: true
    
    # Use the common sheets defined above
    sheets: *sheets

================================================
FILE: organization_scanner.py
================================================
# -*- coding: utf-8 -*-
import boto3
import os
import json
from scan import main as scan_account
from datetime import datetime

def get_organization_accounts(session):
    """Get all active accounts in the AWS Organization.
    
    Args:
        session: The boto3 Session for the management account.
        
    Returns:
        A list of dictionaries containing account information (id, name, email).
    """
    org_client = session.client('organizations')
    accounts = []
    
    try:
        paginator = org_client.get_paginator('list_accounts')
        for page in paginator.paginate():
            for account in page['Accounts']:
                if account['Status'] == 'ACTIVE':
                    accounts.append({
                        'id': account['Id'],
                        'name': account['Name'],
                        'email': account['Email']
                    })
    except Exception as e:
        print(f"Error retrieving organization accounts: {e}")
        return []
    
    return accounts

def assume_role(session, account_id, role_name):
    """Assume a role in the specified account.
    
    Args:
        session: The boto3 Session for the management account.
        account_id: The AWS account ID to assume the role in.
        role_name: The name of the IAM role to assume.
        
    Returns:
        A new boto3 Session with the assumed role credentials, or None if the role assumption fails.
    """
    sts_client = session.client('sts')
    
    role_arn = f'arn:aws:iam::{account_id}:role/{role_name}'
    
    try:
        response = sts_client.assume_role(
            RoleArn=role_arn,
            RoleSessionName='AWSAutoInventorySession',
            DurationSeconds=3600
        )
        
        credentials = response['Credentials']
        return boto3.Session(
            aws_access_key_id=credentials['AccessKeyId'],
            aws_secret_access_key=credentials['SecretAccessKey'],
            aws_session_token=credentials['SessionToken']
        )
    except Exception as e:
        print(f"Failed to assume role in account {account_id}: {e}")
        return None

def scan_organization(org_role_name, scan_config, regions, output_dir, log_level, max_retries, retry_delay, concurrent_regions, concurrent_services):
    """Scan resources across all accounts in an organization.
    
    Args:
        org_role_name: The IAM role name to assume in each account.
        scan_config: The path to the JSON file or URL containing the AWS services to scan.
        regions: The AWS regions to scan.
        output_dir: The directory to store the results.
        log_level: The log level for the script.
        max_retries: The maximum number of retries for each service.
        retry_delay: The delay before each retry.
        concurrent_regions: The number of regions to process concurrently.
        concurrent_services: The number of services to process concurrently for each region.
    """
    # Get the management account session
    management_session = boto3.Session()
    
    # Create organization output directory with timestamp
    timestamp = datetime.now().isoformat(timespec="minutes").replace(":", "-")
    org_output_dir = os.path.join(output_dir, f"organization-{timestamp}")
    os.makedirs(org_output_dir, exist_ok=True)
    
    # Get all accounts in the organization
    print("Discovering accounts in the organization...")
    accounts = get_organization_accounts(management_session)
    print(f"Found {len(accounts)} active accounts in the organization.")
    
    # Save account information
    with open(os.path.join(org_output_dir, "accounts.json"), "w") as f:
        json.dump(accounts, f, indent=2)
    
    # Scan each account
    for account in accounts:
        account_id = account['id']
        account_name = account['name']
        
        print(f"\nProcessing account: {account_name} ({account_id})")
        
        # Assume role in the account
        print(f"Assuming role {org_role_name} in account {account_id}...")
        account_session = assume_role(management_session, account_id, org_role_name)
        
        if account_session:
            print(f"Successfully assumed role in account {account_id}")
            
            # Create account-specific output directory
            account_output_dir = os.path.join(org_output_dir, account_id)
            os.makedirs(account_output_dir, exist_ok=True)
            
            # Save account metadata
            with open(os.path.join(account_output_dir, "account_info.json"), "w") as f:
                json.dump(account, f, indent=2)
            
            # Run the scan for this account
            print(f"Starting scan for account {account_id}...")
            scan_account(
                scan_config,
                regions,
                account_output_dir,
                log_level,
                max_retries,
                retry_delay,
                concurrent_regions,
                concurrent_services,
                session=account_session
            )
            print(f"Completed scan for account {account_id}")
        else:
            print(f"Skipping account {account_name} ({account_id}) due to role assumption failure")
    
    print(f"\nOrganization scan complete. Results stored in {org_output_dir}")

================================================
FILE: requirements-and-versions.txt
================================================
boto3==1.28.18
botocore==1.31.18
cfgv==3.3.1
distlib==0.3.7
filelock==3.20.3
identify==2.5.26
jmespath==1.0.1
nodeenv==1.8.0
platformdirs==3.10.0
pre-commit==3.3.3
python-dateutil==2.8.2
PyYAML==6.0.1
s3transfer==0.6.1
six==1.16.0
urllib3==2.6.3
virtualenv==20.36.1
jq==1.10.0


================================================
FILE: requirements.txt
================================================
boto3>=1.20.0
pydantic>=1.8.0
jq>=1.6.0
pandas>=1.3.0
xlsxwriter>=3.0.0
pyyaml>=6.0

================================================
FILE: scan/sample/all_services.json
================================================
[
  {
    "function": "get_access_preview",
    "service": "accessanalyzer"
  },
  {
    "function": "get_analyzed_resource",
    "service": "accessanalyzer"
  },
  {
    "function": "get_analyzer",
    "service": "accessanalyzer"
  },
  {
    "function": "get_archive_rule",
    "service": "accessanalyzer"
  },
  {
    "function": "get_finding",
    "service": "accessanalyzer"
  },
  {
    "function": "get_generated_policy",
    "service": "accessanalyzer"
  },
  {
    "function": "get_paginator",
    "service": "accessanalyzer"
  },
  {
    "function": "get_waiter",
    "service": "accessanalyzer"
  },
  {
    "function": "list_access_preview_findings",
    "service": "accessanalyzer"
  },
  {
    "function": "list_access_previews",
    "service": "accessanalyzer"
  },
  {
    "function": "list_analyzed_resources",
    "service": "accessanalyzer"
  },
  {
    "function": "list_analyzers",
    "service": "accessanalyzer"
  },
  {
    "function": "list_archive_rules",
    "service": "accessanalyzer"
  },
  {
    "function": "list_findings",
    "service": "accessanalyzer"
  },
  {
    "function": "list_policy_generations",
    "service": "accessanalyzer"
  },
  {
    "function": "list_tags_for_resource",
    "service": "accessanalyzer"
  },
  {
    "function": "get_alternate_contact",
    "service": "account"
  },
  {
    "function": "get_contact_information",
    "service": "account"
  },
  {
    "function": "get_paginator",
    "service": "account"
  },
  {
    "function": "get_region_opt_status",
    "service": "account"
  },
  {
    "function": "get_waiter",
    "service": "account"
  },
  {
    "function": "list_regions",
    "service": "account"
  },
  {
    "function": "describe_certificate",
    "service": "acm"
  },
  {
    "function": "get_account_configuration",
    "service": "acm"
  },
  {
    "function": "get_certificate",
    "service": "acm"
  },
  {
    "function": "get_paginator",
    "service": "acm"
  },
  {
    "function": "get_waiter",
    "service": "acm"
  },
  {
    "function": "list_certificates",
    "service": "acm"
  },
  {
    "function": "list_tags_for_certificate",
    "service": "acm"
  },
  {
    "function": "describe_certificate_authority",
    "service": "acm-pca"
  },
  {
    "function": "describe_certificate_authority_audit_report",
    "service": "acm-pca"
  },
  {
    "function": "get_certificate",
    "service": "acm-pca"
  },
  {
    "function": "get_certificate_authority_certificate",
    "service": "acm-pca"
  },
  {
    "function": "get_certificate_authority_csr",
    "service": "acm-pca"
  },
  {
    "function": "get_paginator",
    "service": "acm-pca"
  },
  {
    "function": "get_policy",
    "service": "acm-pca"
  },
  {
    "function": "get_waiter",
    "service": "acm-pca"
  },
  {
    "function": "list_certificate_authorities",
    "service": "acm-pca"
  },
  {
    "function": "list_permissions",
    "service": "acm-pca"
  },
  {
    "function": "list_tags",
    "service": "acm-pca"
  },
  {
    "function": "get_address_book",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_conference_preference",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_conference_provider",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_contact",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_device",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_gateway",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_gateway_group",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_invitation_configuration",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_network_profile",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_paginator",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_profile",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_room",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_room_skill_parameter",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_skill_group",
    "service": "alexaforbusiness"
  },
  {
    "function": "get_waiter",
    "service": "alexaforbusiness"
  },
  {
    "function": "list_business_report_schedules",
    "service": "alexaforbusiness"
  },
  {
    "function": "list_conference_providers",
    "service": "alexaforbusiness"
  },
  {
    "function": "list_device_events",
    "service": "alexaforbusiness"
  },
  {
    "function": "list_gateway_groups",
    "service": "alexaforbusiness"
  },
  {
    "function": "list_gateways",
    "service": "alexaforbusiness"
  },
  {
    "function": "list_skills",
    "service": "alexaforbusiness"
  },
  {
    "function": "list_skills_store_categories",
    "service": "alexaforbusiness"
  },
  {
    "function": "list_skills_store_skills_by_category",
    "service": "alexaforbusiness"
  },
  {
    "function": "list_smart_home_appliances",
    "service": "alexaforbusiness"
  },
  {
    "function": "list_tags",
    "service": "alexaforbusiness"
  },
  {
    "function": "describe_alert_manager_definition",
    "service": "amp"
  },
  {
    "function": "describe_logging_configuration",
    "service": "amp"
  },
  {
    "function": "describe_rule_groups_namespace",
    "service": "amp"
  },
  {
    "function": "describe_workspace",
    "service": "amp"
  },
  {
    "function": "get_paginator",
    "service": "amp"
  },
  {
    "function": "get_waiter",
    "service": "amp"
  },
  {
    "function": "list_rule_groups_namespaces",
    "service": "amp"
  },
  {
    "function": "list_tags_for_resource",
    "service": "amp"
  },
  {
    "function": "list_workspaces",
    "service": "amp"
  },
  {
    "function": "get_app",
    "service": "amplify"
  },
  {
    "function": "get_artifact_url",
    "service": "amplify"
  },
  {
    "function": "get_backend_environment",
    "service": "amplify"
  },
  {
    "function": "get_branch",
    "service": "amplify"
  },
  {
    "function": "get_domain_association",
    "service": "amplify"
  },
  {
    "function": "get_job",
    "service": "amplify"
  },
  {
    "function": "get_paginator",
    "service": "amplify"
  },
  {
    "function": "get_waiter",
    "service": "amplify"
  },
  {
    "function": "get_webhook",
    "service": "amplify"
  },
  {
    "function": "list_apps",
    "service": "amplify"
  },
  {
    "function": "list_artifacts",
    "service": "amplify"
  },
  {
    "function": "list_backend_environments",
    "service": "amplify"
  },
  {
    "function": "list_branches",
    "service": "amplify"
  },
  {
    "function": "list_domain_associations",
    "service": "amplify"
  },
  {
    "function": "list_jobs",
    "service": "amplify"
  },
  {
    "function": "list_tags_for_resource",
    "service": "amplify"
  },
  {
    "function": "list_webhooks",
    "service": "amplify"
  },
  {
    "function": "get_backend",
    "service": "amplifybackend"
  },
  {
    "function": "get_backend_api",
    "service": "amplifybackend"
  },
  {
    "function": "get_backend_api_models",
    "service": "amplifybackend"
  },
  {
    "function": "get_backend_auth",
    "service": "amplifybackend"
  },
  {
    "function": "get_backend_job",
    "service": "amplifybackend"
  },
  {
    "function": "get_backend_storage",
    "service": "amplifybackend"
  },
  {
    "function": "get_paginator",
    "service": "amplifybackend"
  },
  {
    "function": "get_token",
    "service": "amplifybackend"
  },
  {
    "function": "get_waiter",
    "service": "amplifybackend"
  },
  {
    "function": "list_backend_jobs",
    "service": "amplifybackend"
  },
  {
    "function": "list_s3_buckets",
    "service": "amplifybackend"
  },
  {
    "function": "get_codegen_job",
    "service": "amplifyuibuilder"
  },
  {
    "function": "get_component",
    "service": "amplifyuibuilder"
  },
  {
    "function": "get_form",
    "service": "amplifyuibuilder"
  },
  {
    "function": "get_metadata",
    "service": "amplifyuibuilder"
  },
  {
    "function": "get_paginator",
    "service": "amplifyuibuilder"
  },
  {
    "function": "get_theme",
    "service": "amplifyuibuilder"
  },
  {
    "function": "get_waiter",
    "service": "amplifyuibuilder"
  },
  {
    "function": "list_codegen_jobs",
    "service": "amplifyuibuilder"
  },
  {
    "function": "list_components",
    "service": "amplifyuibuilder"
  },
  {
    "function": "list_forms",
    "service": "amplifyuibuilder"
  },
  {
    "function": "list_themes",
    "service": "amplifyuibuilder"
  },
  {
    "function": "get_account",
    "service": "apigateway"
  },
  {
    "function": "get_api_key",
    "service": "apigateway"
  },
  {
    "function": "get_api_keys",
    "service": "apigateway"
  },
  {
    "function": "get_authorizer",
    "service": "apigateway"
  },
  {
    "function": "get_authorizers",
    "service": "apigateway"
  },
  {
    "function": "get_base_path_mapping",
    "service": "apigateway"
  },
  {
    "function": "get_base_path_mappings",
    "service": "apigateway"
  },
  {
    "function": "get_client_certificate",
    "service": "apigateway"
  },
  {
    "function": "get_client_certificates",
    "service": "apigateway"
  },
  {
    "function": "get_deployment",
    "service": "apigateway"
  },
  {
    "function": "get_deployments",
    "service": "apigateway"
  },
  {
    "function": "get_documentation_part",
    "service": "apigateway"
  },
  {
    "function": "get_documentation_parts",
    "service": "apigateway"
  },
  {
    "function": "get_documentation_version",
    "service": "apigateway"
  },
  {
    "function": "get_documentation_versions",
    "service": "apigateway"
  },
  {
    "function": "get_domain_name",
    "service": "apigateway"
  },
  {
    "function": "get_domain_names",
    "service": "apigateway"
  },
  {
    "function": "get_export",
    "service": "apigateway"
  },
  {
    "function": "get_gateway_response",
    "service": "apigateway"
  },
  {
    "function": "get_gateway_responses",
    "service": "apigateway"
  },
  {
    "function": "get_integration",
    "service": "apigateway"
  },
  {
    "function": "get_integration_response",
    "service": "apigateway"
  },
  {
    "function": "get_method",
    "service": "apigateway"
  },
  {
    "function": "get_method_response",
    "service": "apigateway"
  },
  {
    "function": "get_model",
    "service": "apigateway"
  },
  {
    "function": "get_model_template",
    "service": "apigateway"
  },
  {
    "function": "get_models",
    "service": "apigateway"
  },
  {
    "function": "get_paginator",
    "service": "apigateway"
  },
  {
    "function": "get_request_validator",
    "service": "apigateway"
  },
  {
    "function": "get_request_validators",
    "service": "apigateway"
  },
  {
    "function": "get_resource",
    "service": "apigateway"
  },
  {
    "function": "get_resources",
    "service": "apigateway"
  },
  {
    "function": "get_rest_api",
    "service": "apigateway"
  },
  {
    "function": "get_rest_apis",
    "service": "apigateway"
  },
  {
    "function": "get_sdk",
    "service": "apigateway"
  },
  {
    "function": "get_sdk_type",
    "service": "apigateway"
  },
  {
    "function": "get_sdk_types",
    "service": "apigateway"
  },
  {
    "function": "get_stage",
    "service": "apigateway"
  },
  {
    "function": "get_stages",
    "service": "apigateway"
  },
  {
    "function": "get_tags",
    "service": "apigateway"
  },
  {
    "function": "get_usage",
    "service": "apigateway"
  },
  {
    "function": "get_usage_plan",
    "service": "apigateway"
  },
  {
    "function": "get_usage_plan_key",
    "service": "apigateway"
  },
  {
    "function": "get_usage_plan_keys",
    "service": "apigateway"
  },
  {
    "function": "get_usage_plans",
    "service": "apigateway"
  },
  {
    "function": "get_vpc_link",
    "service": "apigateway"
  },
  {
    "function": "get_vpc_links",
    "service": "apigateway"
  },
  {
    "function": "get_waiter",
    "service": "apigateway"
  },
  {
    "function": "get_connection",
    "service": "apigatewaymanagementapi"
  },
  {
    "function": "get_paginator",
    "service": "apigatewaymanagementapi"
  },
  {
    "function": "get_waiter",
    "service": "apigatewaymanagementapi"
  },
  {
    "function": "get_api",
    "service": "apigatewayv2"
  },
  {
    "function": "get_api_mapping",
    "service": "apigatewayv2"
  },
  {
    "function": "get_api_mappings",
    "service": "apigatewayv2"
  },
  {
    "function": "get_apis",
    "service": "apigatewayv2"
  },
  {
    "function": "get_authorizer",
    "service": "apigatewayv2"
  },
  {
    "function": "get_authorizers",
    "service": "apigatewayv2"
  },
  {
    "function": "get_deployment",
    "service": "apigatewayv2"
  },
  {
    "function": "get_deployments",
    "service": "apigatewayv2"
  },
  {
    "function": "get_domain_name",
    "service": "apigatewayv2"
  },
  {
    "function": "get_domain_names",
    "service": "apigatewayv2"
  },
  {
    "function": "get_integration",
    "service": "apigatewayv2"
  },
  {
    "function": "get_integration_response",
    "service": "apigatewayv2"
  },
  {
    "function": "get_integration_responses",
    "service": "apigatewayv2"
  },
  {
    "function": "get_integrations",
    "service": "apigatewayv2"
  },
  {
    "function": "get_model",
    "service": "apigatewayv2"
  },
  {
    "function": "get_model_template",
    "service": "apigatewayv2"
  },
  {
    "function": "get_models",
    "service": "apigatewayv2"
  },
  {
    "function": "get_paginator",
    "service": "apigatewayv2"
  },
  {
    "function": "get_route",
    "service": "apigatewayv2"
  },
  {
    "function": "get_route_response",
    "service": "apigatewayv2"
  },
  {
    "function": "get_route_responses",
    "service": "apigatewayv2"
  },
  {
    "function": "get_routes",
    "service": "apigatewayv2"
  },
  {
    "function": "get_stage",
    "service": "apigatewayv2"
  },
  {
    "function": "get_stages",
    "service": "apigatewayv2"
  },
  {
    "function": "get_tags",
    "service": "apigatewayv2"
  },
  {
    "function": "get_vpc_link",
    "service": "apigatewayv2"
  },
  {
    "function": "get_vpc_links",
    "service": "apigatewayv2"
  },
  {
    "function": "get_waiter",
    "service": "apigatewayv2"
  },
  {
    "function": "get_application",
    "service": "appconfig"
  },
  {
    "function": "get_configuration",
    "service": "appconfig"
  },
  {
    "function": "get_configuration_profile",
    "service": "appconfig"
  },
  {
    "function": "get_deployment",
    "service": "appconfig"
  },
  {
    "function": "get_deployment_strategy",
    "service": "appconfig"
  },
  {
    "function": "get_environment",
    "service": "appconfig"
  },
  {
    "function": "get_extension",
    "service": "appconfig"
  },
  {
    "function": "get_extension_association",
    "service": "appconfig"
  },
  {
    "function": "get_hosted_configuration_version",
    "service": "appconfig"
  },
  {
    "function": "get_paginator",
    "service": "appconfig"
  },
  {
    "function": "get_waiter",
    "service": "appconfig"
  },
  {
    "function": "list_applications",
    "service": "appconfig"
  },
  {
    "function": "list_configuration_profiles",
    "service": "appconfig"
  },
  {
    "function": "list_deployment_strategies",
    "service": "appconfig"
  },
  {
    "function": "list_deployments",
    "service": "appconfig"
  },
  {
    "function": "list_environments",
    "service": "appconfig"
  },
  {
    "function": "list_extension_associations",
    "service": "appconfig"
  },
  {
    "function": "list_extensions",
    "service": "appconfig"
  },
  {
    "function": "list_hosted_configuration_versions",
    "service": "appconfig"
  },
  {
    "function": "list_tags_for_resource",
    "service": "appconfig"
  },
  {
    "function": "get_latest_configuration",
    "service": "appconfigdata"
  },
  {
    "function": "get_paginator",
    "service": "appconfigdata"
  },
  {
    "function": "get_waiter",
    "service": "appconfigdata"
  },
  {
    "function": "get_app_authorization",
    "service": "appfabric"
  },
  {
    "function": "get_app_bundle",
    "service": "appfabric"
  },
  {
    "function": "get_ingestion",
    "service": "appfabric"
  },
  {
    "function": "get_ingestion_destination",
    "service": "appfabric"
  },
  {
    "function": "get_paginator",
    "service": "appfabric"
  },
  {
    "function": "get_waiter",
    "service": "appfabric"
  },
  {
    "function": "list_app_authorizations",
    "service": "appfabric"
  },
  {
    "function": "list_app_bundles",
    "service": "appfabric"
  },
  {
    "function": "list_ingestion_destinations",
    "service": "appfabric"
  },
  {
    "function": "list_ingestions",
    "service": "appfabric"
  },
  {
    "function": "list_tags_for_resource",
    "service": "appfabric"
  },
  {
    "function": "describe_connector",
    "service": "appflow"
  },
  {
    "function": "describe_connector_entity",
    "service": "appflow"
  },
  {
    "function": "describe_connector_profiles",
    "service": "appflow"
  },
  {
    "function": "describe_connectors",
    "service": "appflow"
  },
  {
    "function": "describe_flow",
    "service": "appflow"
  },
  {
    "function": "describe_flow_execution_records",
    "service": "appflow"
  },
  {
    "function": "get_paginator",
    "service": "appflow"
  },
  {
    "function": "get_waiter",
    "service": "appflow"
  },
  {
    "function": "list_connector_entities",
    "service": "appflow"
  },
  {
    "function": "list_connectors",
    "service": "appflow"
  },
  {
    "function": "list_flows",
    "service": "appflow"
  },
  {
    "function": "list_tags_for_resource",
    "service": "appflow"
  },
  {
    "function": "get_data_integration",
    "service": "appintegrations"
  },
  {
    "function": "get_event_integration",
    "service": "appintegrations"
  },
  {
    "function": "get_paginator",
    "service": "appintegrations"
  },
  {
    "function": "get_waiter",
    "service": "appintegrations"
  },
  {
    "function": "list_data_integration_associations",
    "service": "appintegrations"
  },
  {
    "function": "list_data_integrations",
    "service": "appintegrations"
  },
  {
    "function": "list_event_integration_associations",
    "service": "appintegrations"
  },
  {
    "function": "list_event_integrations",
    "service": "appintegrations"
  },
  {
    "function": "list_tags_for_resource",
    "service": "appintegrations"
  },
  {
    "function": "describe_scalable_targets",
    "service": "application-autoscaling"
  },
  {
    "function": "describe_scaling_activities",
    "service": "application-autoscaling"
  },
  {
    "function": "describe_scaling_policies",
    "service": "application-autoscaling"
  },
  {
    "function": "describe_scheduled_actions",
    "service": "application-autoscaling"
  },
  {
    "function": "get_paginator",
    "service": "application-autoscaling"
  },
  {
    "function": "get_waiter",
    "service": "application-autoscaling"
  },
  {
    "function": "list_tags_for_resource",
    "service": "application-autoscaling"
  },
  {
    "function": "describe_application",
    "service": "application-insights"
  },
  {
    "function": "describe_component",
    "service": "application-insights"
  },
  {
    "function": "describe_component_configuration",
    "service": "application-insights"
  },
  {
    "function": "describe_component_configuration_recommendation",
    "service": "application-insights"
  },
  {
    "function": "describe_log_pattern",
    "service": "application-insights"
  },
  {
    "function": "describe_observation",
    "service": "application-insights"
  },
  {
    "function": "describe_problem",
    "service": "application-insights"
  },
  {
    "function": "describe_problem_observations",
    "service": "application-insights"
  },
  {
    "function": "describe_workload",
    "service": "application-insights"
  },
  {
    "function": "get_paginator",
    "service": "application-insights"
  },
  {
    "function": "get_waiter",
    "service": "application-insights"
  },
  {
    "function": "list_applications",
    "service": "application-insights"
  },
  {
    "function": "list_components",
    "service": "application-insights"
  },
  {
    "function": "list_configuration_history",
    "service": "application-insights"
  },
  {
    "function": "list_log_pattern_sets",
    "service": "application-insights"
  },
  {
    "function": "list_log_patterns",
    "service": "application-insights"
  },
  {
    "function": "list_problems",
    "service": "application-insights"
  },
  {
    "function": "list_tags_for_resource",
    "service": "application-insights"
  },
  {
    "function": "list_workloads",
    "service": "application-insights"
  },
  {
    "function": "get_paginator",
    "service": "applicationcostprofiler"
  },
  {
    "function": "get_report_definition",
    "service": "applicationcostprofiler"
  },
  {
    "function": "get_waiter",
    "service": "applicationcostprofiler"
  },
  {
    "function": "list_report_definitions",
    "service": "applicationcostprofiler"
  },
  {
    "function": "describe_gateway_route",
    "service": "appmesh"
  },
  {
    "function": "describe_mesh",
    "service": "appmesh"
  },
  {
    "function": "describe_route",
    "service": "appmesh"
  },
  {
    "function": "describe_virtual_gateway",
    "service": "appmesh"
  },
  {
    "function": "describe_virtual_node",
    "service": "appmesh"
  },
  {
    "function": "describe_virtual_router",
    "service": "appmesh"
  },
  {
    "function": "describe_virtual_service",
    "service": "appmesh"
  },
  {
    "function": "get_paginator",
    "service": "appmesh"
  },
  {
    "function": "get_waiter",
    "service": "appmesh"
  },
  {
    "function": "list_gateway_routes",
    "service": "appmesh"
  },
  {
    "function": "list_meshes",
    "service": "appmesh"
  },
  {
    "function": "list_routes",
    "service": "appmesh"
  },
  {
    "function": "list_tags_for_resource",
    "service": "appmesh"
  },
  {
    "function": "list_virtual_gateways",
    "service": "appmesh"
  },
  {
    "function": "list_virtual_nodes",
    "service": "appmesh"
  },
  {
    "function": "list_virtual_routers",
    "service": "appmesh"
  },
  {
    "function": "list_virtual_services",
    "service": "appmesh"
  },
  {
    "function": "describe_auto_scaling_configuration",
    "service": "apprunner"
  },
  {
    "function": "describe_custom_domains",
    "service": "apprunner"
  },
  {
    "function": "describe_observability_configuration",
    "service": "apprunner"
  },
  {
    "function": "describe_service",
    "service": "apprunner"
  },
  {
    "function": "describe_vpc_connector",
    "service": "apprunner"
  },
  {
    "function": "describe_vpc_ingress_connection",
    "service": "apprunner"
  },
  {
    "function": "get_paginator",
    "service": "apprunner"
  },
  {
    "function": "get_waiter",
    "service": "apprunner"
  },
  {
    "function": "list_auto_scaling_configurations",
    "service": "apprunner"
  },
  {
    "function": "list_connections",
    "service": "apprunner"
  },
  {
    "function": "list_observability_configurations",
    "service": "apprunner"
  },
  {
    "function": "list_operations",
    "service": "apprunner"
  },
  {
    "function": "list_services",
    "service": "apprunner"
  },
  {
    "function": "list_tags_for_resource",
    "service": "apprunner"
  },
  {
    "function": "list_vpc_connectors",
    "service": "apprunner"
  },
  {
    "function": "list_vpc_ingress_connections",
    "service": "apprunner"
  },
  {
    "function": "describe_app_block_builder_app_block_associations",
    "service": "appstream"
  },
  {
    "function": "describe_app_block_builders",
    "service": "appstream"
  },
  {
    "function": "describe_app_blocks",
    "service": "appstream"
  },
  {
    "function": "describe_application_fleet_associations",
    "service": "appstream"
  },
  {
    "function": "describe_applications",
    "service": "appstream"
  },
  {
    "function": "describe_directory_configs",
    "service": "appstream"
  },
  {
    "function": "describe_entitlements",
    "service": "appstream"
  },
  {
    "function": "describe_fleets",
    "service": "appstream"
  },
  {
    "function": "describe_image_builders",
    "service": "appstream"
  },
  {
    "function": "describe_image_permissions",
    "service": "appstream"
  },
  {
    "function": "describe_images",
    "service": "appstream"
  },
  {
    "function": "describe_sessions",
    "service": "appstream"
  },
  {
    "function": "describe_stacks",
    "service": "appstream"
  },
  {
    "function": "describe_usage_report_subscriptions",
    "service": "appstream"
  },
  {
    "function": "describe_user_stack_associations",
    "service": "appstream"
  },
  {
    "function": "describe_users",
    "service": "appstream"
  },
  {
    "function": "get_paginator",
    "service": "appstream"
  },
  {
    "function": "get_waiter",
    "service": "appstream"
  },
  {
    "function": "list_associated_fleets",
    "service": "appstream"
  },
  {
    "function": "list_associated_stacks",
    "service": "appstream"
  },
  {
    "function": "list_entitled_applications",
    "service": "appstream"
  },
  {
    "function": "list_tags_for_resource",
    "service": "appstream"
  },
  {
    "function": "get_api_association",
    "service": "appsync"
  },
  {
    "function": "get_api_cache",
    "service": "appsync"
  },
  {
    "function": "get_data_source",
    "service": "appsync"
  },
  {
    "function": "get_domain_name",
    "service": "appsync"
  },
  {
    "function": "get_function",
    "service": "appsync"
  },
  {
    "function": "get_graphql_api",
    "service": "appsync"
  },
  {
    "function": "get_introspection_schema",
    "service": "appsync"
  },
  {
    "function": "get_paginator",
    "service": "appsync"
  },
  {
    "function": "get_resolver",
    "service": "appsync"
  },
  {
    "function": "get_schema_creation_status",
    "service": "appsync"
  },
  {
    "function": "get_source_api_association",
    "service": "appsync"
  },
  {
    "function": "get_type",
    "service": "appsync"
  },
  {
    "function": "get_waiter",
    "service": "appsync"
  },
  {
    "function": "list_api_keys",
    "service": "appsync"
  },
  {
    "function": "list_data_sources",
    "service": "appsync"
  },
  {
    "function": "list_domain_names",
    "service": "appsync"
  },
  {
    "function": "list_functions",
    "service": "appsync"
  },
  {
    "function": "list_graphql_apis",
    "service": "appsync"
  },
  {
    "function": "list_resolvers",
    "service": "appsync"
  },
  {
    "function": "list_resolvers_by_function",
    "service": "appsync"
  },
  {
    "function": "list_source_api_associations",
    "service": "appsync"
  },
  {
    "function": "list_tags_for_resource",
    "service": "appsync"
  },
  {
    "function": "list_types",
    "service": "appsync"
  },
  {
    "function": "list_types_by_association",
    "service": "appsync"
  },
  {
    "function": "get_managed_resource",
    "service": "arc-zonal-shift"
  },
  {
    "function": "get_paginator",
    "service": "arc-zonal-shift"
  },
  {
    "function": "get_waiter",
    "service": "arc-zonal-shift"
  },
  {
    "function": "list_managed_resources",
    "service": "arc-zonal-shift"
  },
  {
    "function": "list_zonal_shifts",
    "service": "arc-zonal-shift"
  },
  {
    "function": "get_calculation_execution",
    "service": "athena"
  },
  {
    "function": "get_calculation_execution_code",
    "service": "athena"
  },
  {
    "function": "get_calculation_execution_status",
    "service": "athena"
  },
  {
    "function": "get_capacity_assignment_configuration",
    "service": "athena"
  },
  {
    "function": "get_capacity_reservation",
    "service": "athena"
  },
  {
    "function": "get_data_catalog",
    "service": "athena"
  },
  {
    "function": "get_database",
    "service": "athena"
  },
  {
    "function": "get_named_query",
    "service": "athena"
  },
  {
    "function": "get_notebook_metadata",
    "service": "athena"
  },
  {
    "function": "get_paginator",
    "service": "athena"
  },
  {
    "function": "get_prepared_statement",
    "service": "athena"
  },
  {
    "function": "get_query_execution",
    "service": "athena"
  },
  {
    "function": "get_query_results",
    "service": "athena"
  },
  {
    "function": "get_query_runtime_statistics",
    "service": "athena"
  },
  {
    "function": "get_session",
    "service": "athena"
  },
  {
    "function": "get_session_status",
    "service": "athena"
  },
  {
    "function": "get_table_metadata",
    "service": "athena"
  },
  {
    "function": "get_waiter",
    "service": "athena"
  },
  {
    "function": "get_work_group",
    "service": "athena"
  },
  {
    "function": "list_application_dpu_sizes",
    "service": "athena"
  },
  {
    "function": "list_calculation_executions",
    "service": "athena"
  },
  {
    "function": "list_capacity_reservations",
    "service": "athena"
  },
  {
    "function": "list_data_catalogs",
    "service": "athena"
  },
  {
    "function": "list_databases",
    "service": "athena"
  },
  {
    "function": "list_engine_versions",
    "service": "athena"
  },
  {
    "function": "list_executors",
    "service": "athena"
  },
  {
    "function": "list_named_queries",
    "service": "athena"
  },
  {
    "function": "list_notebook_metadata",
    "service": "athena"
  },
  {
    "function": "list_notebook_sessions",
    "service": "athena"
  },
  {
    "function": "list_prepared_statements",
    "service": "athena"
  },
  {
    "function": "list_query_executions",
    "service": "athena"
  },
  {
    "function": "list_sessions",
    "service": "athena"
  },
  {
    "function": "list_table_metadata",
    "service": "athena"
  },
  {
    "function": "list_tags_for_resource",
    "service": "athena"
  },
  {
    "function": "list_work_groups",
    "service": "athena"
  },
  {
    "function": "get_account_status",
    "service": "auditmanager"
  },
  {
    "function": "get_assessment",
    "service": "auditmanager"
  },
  {
    "function": "get_assessment_framework",
    "service": "auditmanager"
  },
  {
    "function": "get_assessment_report_url",
    "service": "auditmanager"
  },
  {
    "function": "get_change_logs",
    "service": "auditmanager"
  },
  {
    "function": "get_control",
    "service": "auditmanager"
  },
  {
    "function": "get_delegations",
    "service": "auditmanager"
  },
  {
    "function": "get_evidence",
    "service": "auditmanager"
  },
  {
    "function": "get_evidence_by_evidence_folder",
    "service": "auditmanager"
  },
  {
    "function": "get_evidence_file_upload_url",
    "service": "auditmanager"
  },
  {
    "function": "get_evidence_folder",
    "service": "auditmanager"
  },
  {
    "function": "get_evidence_folders_by_assessment",
    "service": "auditmanager"
  },
  {
    "function": "get_evidence_folders_by_assessment_control",
    "service": "auditmanager"
  },
  {
    "function": "get_insights",
    "service": "auditmanager"
  },
  {
    "function": "get_insights_by_assessment",
    "service": "auditmanager"
  },
  {
    "function": "get_organization_admin_account",
    "service": "auditmanager"
  },
  {
    "function": "get_paginator",
    "service": "auditmanager"
  },
  {
    "function": "get_services_in_scope",
    "service": "auditmanager"
  },
  {
    "function": "get_settings",
    "service": "auditmanager"
  },
  {
    "function": "get_waiter",
    "service": "auditmanager"
  },
  {
    "function": "list_assessment_control_insights_by_control_domain",
    "service": "auditmanager"
  },
  {
    "function": "list_assessment_framework_share_requests",
    "service": "auditmanager"
  },
  {
    "function": "list_assessment_frameworks",
    "service": "auditmanager"
  },
  {
    "function": "list_assessment_reports",
    "service": "auditmanager"
  },
  {
    "function": "list_assessments",
    "service": "auditmanager"
  },
  {
    "function": "list_control_domain_insights",
    "service": "auditmanager"
  },
  {
    "function": "list_control_domain_insights_by_assessment",
    "service": "auditmanager"
  },
  {
    "function": "list_control_insights_by_control_domain",
    "service": "auditmanager"
  },
  {
    "function": "list_controls",
    "service": "auditmanager"
  },
  {
    "function": "list_keywords_for_data_source",
    "service": "auditmanager"
  },
  {
    "function": "list_notifications",
    "service": "auditmanager"
  },
  {
    "function": "list_tags_for_resource",
    "service": "auditmanager"
  },
  {
    "function": "describe_account_limits",
    "service": "autoscaling"
  },
  {
    "function": "describe_adjustment_types",
    "service": "autoscaling"
  },
  {
    "function": "describe_auto_scaling_groups",
    "service": "autoscaling"
  },
  {
    "function": "describe_auto_scaling_instances",
    "service": "autoscaling"
  },
  {
    "function": "describe_auto_scaling_notification_types",
    "service": "autoscaling"
  },
  {
    "function": "describe_instance_refreshes",
    "service": "autoscaling"
  },
  {
    "function": "describe_launch_configurations",
    "service": "autoscaling"
  },
  {
    "function": "describe_lifecycle_hook_types",
    "service": "autoscaling"
  },
  {
    "function": "describe_lifecycle_hooks",
    "service": "autoscaling"
  },
  {
    "function": "describe_load_balancer_target_groups",
    "service": "autoscaling"
  },
  {
    "function": "describe_load_balancers",
    "service": "autoscaling"
  },
  {
    "function": "describe_metric_collection_types",
    "service": "autoscaling"
  },
  {
    "function": "describe_notification_configurations",
    "service": "autoscaling"
  },
  {
    "function": "describe_policies",
    "service": "autoscaling"
  },
  {
    "function": "describe_scaling_activities",
    "service": "autoscaling"
  },
  {
    "function": "describe_scaling_process_types",
    "service": "autoscaling"
  },
  {
    "function": "describe_scheduled_actions",
    "service": "autoscaling"
  },
  {
    "function": "describe_tags",
    "service": "autoscaling"
  },
  {
    "function": "describe_termination_policy_types",
    "service": "autoscaling"
  },
  {
    "function": "describe_traffic_sources",
    "service": "autoscaling"
  },
  {
    "function": "describe_warm_pool",
    "service": "autoscaling"
  },
  {
    "function": "get_paginator",
    "service": "autoscaling"
  },
  {
    "function": "get_predictive_scaling_forecast",
    "service": "autoscaling"
  },
  {
    "function": "get_waiter",
    "service": "autoscaling"
  },
  {
    "function": "describe_scaling_plan_resources",
    "service": "autoscaling-plans"
  },
  {
    "function": "describe_scaling_plans",
    "service": "autoscaling-plans"
  },
  {
    "function": "get_paginator",
    "service": "autoscaling-plans"
  },
  {
    "function": "get_scaling_plan_resource_forecast_data",
    "service": "autoscaling-plans"
  },
  {
    "function": "get_waiter",
    "service": "autoscaling-plans"
  },
  {
    "function": "describe_backup_job",
    "service": "backup"
  },
  {
    "function": "describe_backup_vault",
    "service": "backup"
  },
  {
    "function": "describe_copy_job",
    "service": "backup"
  },
  {
    "function": "describe_framework",
    "service": "backup"
  },
  {
    "function": "describe_global_settings",
    "service": "backup"
  },
  {
    "function": "describe_protected_resource",
    "service": "backup"
  },
  {
    "function": "describe_recovery_point",
    "service": "backup"
  },
  {
    "function": "describe_region_settings",
    "service": "backup"
  },
  {
    "function": "describe_report_job",
    "service": "backup"
  },
  {
    "function": "describe_report_plan",
    "service": "backup"
  },
  {
    "function": "describe_restore_job",
    "service": "backup"
  },
  {
    "function": "get_backup_plan",
    "service": "backup"
  },
  {
    "function": "get_backup_plan_from_json",
    "service": "backup"
  },
  {
    "function": "get_backup_plan_from_template",
    "service": "backup"
  },
  {
    "function": "get_backup_selection",
    "service": "backup"
  },
  {
    "function": "get_backup_vault_access_policy",
    "service": "backup"
  },
  {
    "function": "get_backup_vault_notifications",
    "service": "backup"
  },
  {
    "function": "get_legal_hold",
    "service": "backup"
  },
  {
    "function": "get_paginator",
    "service": "backup"
  },
  {
    "function": "get_recovery_point_restore_metadata",
    "service": "backup"
  },
  {
    "function": "get_supported_resource_types",
    "service": "backup"
  },
  {
    "function": "get_waiter",
    "service": "backup"
  },
  {
    "function": "list_backup_jobs",
    "service": "backup"
  },
  {
    "function": "list_backup_plan_templates",
    "service": "backup"
  },
  {
    "function": "list_backup_plan_versions",
    "service": "backup"
  },
  {
    "function": "list_backup_plans",
    "service": "backup"
  },
  {
    "function": "list_backup_selections",
    "service": "backup"
  },
  {
    "function": "list_backup_vaults",
    "service": "backup"
  },
  {
    "function": "list_copy_jobs",
    "service": "backup"
  },
  {
    "function": "list_frameworks",
    "service": "backup"
  },
  {
    "function": "list_legal_holds",
    "service": "backup"
  },
  {
    "function": "list_protected_resources",
    "service": "backup"
  },
  {
    "function": "list_recovery_points_by_backup_vault",
    "service": "backup"
  },
  {
    "function": "list_recovery_points_by_legal_hold",
    "service": "backup"
  },
  {
    "function": "list_recovery_points_by_resource",
    "service": "backup"
  },
  {
    "function": "list_report_jobs",
    "service": "backup"
  },
  {
    "function": "list_report_plans",
    "service": "backup"
  },
  {
    "function": "list_restore_jobs",
    "service": "backup"
  },
  {
    "function": "list_tags",
    "service": "backup"
  },
  {
    "function": "get_bandwidth_rate_limit_schedule",
    "service": "backup-gateway"
  },
  {
    "function": "get_gateway",
    "service": "backup-gateway"
  },
  {
    "function": "get_hypervisor",
    "service": "backup-gateway"
  },
  {
    "function": "get_hypervisor_property_mappings",
    "service": "backup-gateway"
  },
  {
    "function": "get_paginator",
    "service": "backup-gateway"
  },
  {
    "function": "get_virtual_machine",
    "service": "backup-gateway"
  },
  {
    "function": "get_waiter",
    "service": "backup-gateway"
  },
  {
    "function": "list_gateways",
    "service": "backup-gateway"
  },
  {
    "function": "list_hypervisors",
    "service": "backup-gateway"
  },
  {
    "function": "list_tags_for_resource",
    "service": "backup-gateway"
  },
  {
    "function": "list_virtual_machines",
    "service": "backup-gateway"
  },
  {
    "function": "get_chunk",
    "service": "backupstorage"
  },
  {
    "function": "get_object_metadata",
    "service": "backupstorage"
  },
  {
    "function": "get_paginator",
    "service": "backupstorage"
  },
  {
    "function": "get_waiter",
    "service": "backupstorage"
  },
  {
    "function": "list_chunks",
    "service": "backupstorage"
  },
  {
    "function": "list_objects",
    "service": "backupstorage"
  },
  {
    "function": "describe_compute_environments",
    "service": "batch"
  },
  {
    "function": "describe_job_definitions",
    "service": "batch"
  },
  {
    "function": "describe_job_queues",
    "service": "batch"
  },
  {
    "function": "describe_jobs",
    "service": "batch"
  },
  {
    "function": "describe_scheduling_policies",
    "service": "batch"
  },
  {
    "function": "get_paginator",
    "service": "batch"
  },
  {
    "function": "get_waiter",
    "service": "batch"
  },
  {
    "function": "list_jobs",
    "service": "batch"
  },
  {
    "function": "list_scheduling_policies",
    "service": "batch"
  },
  {
    "function": "list_tags_for_resource",
    "service": "batch"
  },
  {
    "function": "get_paginator",
    "service": "billingconductor"
  },
  {
    "function": "get_waiter",
    "service": "billingconductor"
  },
  {
    "function": "list_account_associations",
    "service": "billingconductor"
  },
  {
    "function": "list_billing_group_cost_reports",
    "service": "billingconductor"
  },
  {
    "function": "list_billing_groups",
    "service": "billingconductor"
  },
  {
    "function": "list_custom_line_item_versions",
    "service": "billingconductor"
  },
  {
    "function": "list_custom_line_items",
    "service": "billingconductor"
  },
  {
    "function": "list_pricing_plans",
    "service": "billingconductor"
  },
  {
    "function": "list_pricing_plans_associated_with_pricing_rule",
    "service": "billingconductor"
  },
  {
    "function": "list_pricing_rules",
    "service": "billingconductor"
  },
  {
    "function": "list_pricing_rules_associated_to_pricing_plan",
    "service": "billingconductor"
  },
  {
    "function": "list_resources_associated_to_custom_line_item",
    "service": "billingconductor"
  },
  {
    "function": "list_tags_for_resource",
    "service": "billingconductor"
  },
  {
    "function": "get_device",
    "service": "braket"
  },
  {
    "function": "get_job",
    "service": "braket"
  },
  {
    "function": "get_paginator",
    "service": "braket"
  },
  {
    "function": "get_quantum_task",
    "service": "braket"
  },
  {
    "function": "get_waiter",
    "service": "braket"
  },
  {
    "function": "list_tags_for_resource",
    "service": "braket"
  },
  {
    "function": "describe_budget",
    "service": "budgets"
  },
  {
    "function": "describe_budget_action",
    "service": "budgets"
  },
  {
    "function": "describe_budget_action_histories",
    "service": "budgets"
  },
  {
    "function": "describe_budget_actions_for_account",
    "service": "budgets"
  },
  {
    "function": "describe_budget_actions_for_budget",
    "service": "budgets"
  },
  {
    "function": "describe_budget_notifications_for_account",
    "service": "budgets"
  },
  {
    "function": "describe_budget_performance_history",
    "service": "budgets"
  },
  {
    "function": "describe_budgets",
    "service": "budgets"
  },
  {
    "function": "describe_notifications_for_budget",
    "service": "budgets"
  },
  {
    "function": "describe_subscribers_for_notification",
    "service": "budgets"
  },
  {
    "function": "get_paginator",
    "service": "budgets"
  },
  {
    "function": "get_waiter",
    "service": "budgets"
  },
  {
    "function": "describe_cost_category_definition",
    "service": "ce"
  },
  {
    "function": "get_anomalies",
    "service": "ce"
  },
  {
    "function": "get_anomaly_monitors",
    "service": "ce"
  },
  {
    "function": "get_anomaly_subscriptions",
    "service": "ce"
  },
  {
    "function": "get_cost_and_usage",
    "service": "ce"
  },
  {
    "function": "get_cost_and_usage_with_resources",
    "service": "ce"
  },
  {
    "function": "get_cost_categories",
    "service": "ce"
  },
  {
    "function": "get_cost_forecast",
    "service": "ce"
  },
  {
    "function": "get_dimension_values",
    "service": "ce"
  },
  {
    "function": "get_paginator",
    "service": "ce"
  },
  {
    "function": "get_reservation_coverage",
    "service": "ce"
  },
  {
    "function": "get_reservation_purchase_recommendation",
    "service": "ce"
  },
  {
    "function": "get_reservation_utilization",
    "service": "ce"
  },
  {
    "function": "get_rightsizing_recommendation",
    "service": "ce"
  },
  {
    "function": "get_savings_plan_purchase_recommendation_details",
    "service": "ce"
  },
  {
    "function": "get_savings_plans_coverage",
    "service": "ce"
  },
  {
    "function": "get_savings_plans_purchase_recommendation",
    "service": "ce"
  },
  {
    "function": "get_savings_plans_utilization",
    "service": "ce"
  },
  {
    "function": "get_savings_plans_utilization_details",
    "service": "ce"
  },
  {
    "function": "get_tags",
    "service": "ce"
  },
  {
    "function": "get_usage_forecast",
    "service": "ce"
  },
  {
    "function": "get_waiter",
    "service": "ce"
  },
  {
    "function": "list_cost_allocation_tags",
    "service": "ce"
  },
  {
    "function": "list_cost_category_definitions",
    "service": "ce"
  },
  {
    "function": "list_savings_plans_purchase_recommendation_generation",
    "service": "ce"
  },
  {
    "function": "list_tags_for_resource",
    "service": "ce"
  },
  {
    "function": "describe_app_instance",
    "service": "chime"
  },
  {
    "function": "describe_app_instance_admin",
    "service": "chime"
  },
  {
    "function": "describe_app_instance_user",
    "service": "chime"
  },
  {
    "function": "describe_channel",
    "service": "chime"
  },
  {
    "function": "describe_channel_ban",
    "service": "chime"
  },
  {
    "function": "describe_channel_membership",
    "service": "chime"
  },
  {
    "function": "describe_channel_membership_for_app_instance_user",
    "service": "chime"
  },
  {
    "function": "describe_channel_moderated_by_app_instance_user",
    "service": "chime"
  },
  {
    "function": "describe_channel_moderator",
    "service": "chime"
  },
  {
    "function": "get_account",
    "service": "chime"
  },
  {
    "function": "get_account_settings",
    "service": "chime"
  },
  {
    "function": "get_app_instance_retention_settings",
    "service": "chime"
  },
  {
    "function": "get_app_instance_streaming_configurations",
    "service": "chime"
  },
  {
    "function": "get_attendee",
    "service": "chime"
  },
  {
    "function": "get_bot",
    "service": "chime"
  },
  {
    "function": "get_channel_message",
    "service": "chime"
  },
  {
    "function": "get_events_configuration",
    "service": "chime"
  },
  {
    "function": "get_global_settings",
    "service": "chime"
  },
  {
    "function": "get_media_capture_pipeline",
    "service": "chime"
  },
  {
    "function": "get_meeting",
    "service": "chime"
  },
  {
    "function": "get_messaging_session_endpoint",
    "service": "chime"
  },
  {
    "function": "get_paginator",
    "service": "chime"
  },
  {
    "function": "get_phone_number",
    "service": "chime"
  },
  {
    "function": "get_phone_number_order",
    "service": "chime"
  },
  {
    "function": "get_phone_number_settings",
    "service": "chime"
  },
  {
    "function": "get_proxy_session",
    "service": "chime"
  },
  {
    "function": "get_retention_settings",
    "service": "chime"
  },
  {
    "function": "get_room",
    "service": "chime"
  },
  {
    "function": "get_sip_media_application",
    "service": "chime"
  },
  {
    "function": "get_sip_media_application_logging_configuration",
    "service": "chime"
  },
  {
    "function": "get_sip_rule",
    "service": "chime"
  },
  {
    "function": "get_user",
    "service": "chime"
  },
  {
    "function": "get_user_settings",
    "service": "chime"
  },
  {
    "function": "get_voice_connector",
    "service": "chime"
  },
  {
    "function": "get_voice_connector_emergency_calling_configuration",
    "service": "chime"
  },
  {
    "function": "get_voice_connector_group",
    "service": "chime"
  },
  {
    "function": "get_voice_connector_logging_configuration",
    "service": "chime"
  },
  {
    "function": "get_voice_connector_origination",
    "service": "chime"
  },
  {
    "function": "get_voice_connector_proxy",
    "service": "chime"
  },
  {
    "function": "get_voice_connector_streaming_configuration",
    "service": "chime"
  },
  {
    "function": "get_voice_connector_termination",
    "service": "chime"
  },
  {
    "function": "get_voice_connector_termination_health",
    "service": "chime"
  },
  {
    "function": "get_waiter",
    "service": "chime"
  },
  {
    "function": "list_accounts",
    "service": "chime"
  },
  {
    "function": "list_app_instance_admins",
    "service": "chime"
  },
  {
    "function": "list_app_instance_users",
    "service": "chime"
  },
  {
    "function": "list_app_instances",
    "service": "chi
Download .txt
gitextract_nae5be8a/

├── .devcontainer/
│   ├── README.md
│   ├── config.yaml
│   ├── devcontainer.json
│   └── post-create.sh
├── .github/
│   ├── ISSUE_TEMPLATE/
│   │   ├── bug_report.md
│   │   ├── feature_request.md
│   │   └── question.md
│   ├── actions/
│   │   └── .keep
│   ├── pull_request_template.md
│   └── workflows/
│       ├── build.yml
│       ├── codeql-analysis.yml
│       ├── hygiene.yml
│       ├── publish.yml
│       └── release.yml
├── .gitignore
├── .gitmodules
├── .pre-commit-config.yaml
├── .vscode/
│   ├── launch.json
│   └── tasks.json
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── Makefile
├── NOTICE
├── README.md
├── SECURITY.md
├── aws-auto-inventory-unified-architecture.md
├── aws_auto_inventory/
│   ├── __init__.py
│   ├── cli.py
│   ├── config/
│   │   ├── __init__.py
│   │   ├── loader.py
│   │   ├── models.py
│   │   └── validator.py
│   ├── core/
│   │   ├── __init__.py
│   │   ├── aws_client.py
│   │   ├── organization.py
│   │   ├── region.py
│   │   ├── scan_engine.py
│   │   └── service.py
│   └── utils/
│       ├── __init__.py
│       ├── logging.py
│       └── threading.py
├── doc/
│   └── habits.yaml
├── examples/
│   ├── config_example.json
│   ├── config_example.yaml
│   └── config_organization_example.yaml
├── organization_scanner.py
├── requirements-and-versions.txt
├── requirements.txt
├── scan/
│   └── sample/
│       ├── all_services.json
│       ├── list_emr_clusters_id_name.json
│       ├── list_of_bucket_names.json
│       ├── running_ec2.json
│       ├── running_ec2_names.json
│       ├── s3_buckets.json
│       ├── services/
│       │   ├── accessanalyzer.json
│       │   ├── account.json
│       │   ├── acm-pca.json
│       │   ├── acm.json
│       │   ├── alexaforbusiness.json
│       │   ├── amp.json
│       │   ├── amplify.json
│       │   ├── amplifybackend.json
│       │   ├── amplifyuibuilder.json
│       │   ├── apigateway.json
│       │   ├── apigatewaymanagementapi.json
│       │   ├── apigatewayv2.json
│       │   ├── appconfig.json
│       │   ├── appconfigdata.json
│       │   ├── appfabric.json
│       │   ├── appflow.json
│       │   ├── appintegrations.json
│       │   ├── application-autoscaling.json
│       │   ├── application-insights.json
│       │   ├── applicationcostprofiler.json
│       │   ├── appmesh.json
│       │   ├── apprunner.json
│       │   ├── appstream.json
│       │   ├── appsync.json
│       │   ├── arc-zonal-shift.json
│       │   ├── athena.json
│       │   ├── auditmanager.json
│       │   ├── autoscaling-plans.json
│       │   ├── autoscaling.json
│       │   ├── backup-gateway.json
│       │   ├── backup.json
│       │   ├── backupstorage.json
│       │   ├── batch.json
│       │   ├── billingconductor.json
│       │   ├── braket.json
│       │   ├── budgets.json
│       │   ├── ce.json
│       │   ├── chime-sdk-identity.json
│       │   ├── chime-sdk-media-pipelines.json
│       │   ├── chime-sdk-meetings.json
│       │   ├── chime-sdk-messaging.json
│       │   ├── chime-sdk-voice.json
│       │   ├── chime.json
│       │   ├── cleanrooms.json
│       │   ├── cloud9.json
│       │   ├── cloudcontrol.json
│       │   ├── clouddirectory.json
│       │   ├── cloudformation.json
│       │   ├── cloudfront.json
│       │   ├── cloudhsm.json
│       │   ├── cloudhsmv2.json
│       │   ├── cloudsearch.json
│       │   ├── cloudsearchdomain.json
│       │   ├── cloudtrail-data.json
│       │   ├── cloudtrail.json
│       │   ├── cloudwatch.json
│       │   ├── codeartifact.json
│       │   ├── codebuild.json
│       │   ├── codecatalyst.json
│       │   ├── codecommit.json
│       │   ├── codedeploy.json
│       │   ├── codeguru-reviewer.json
│       │   ├── codeguru-security.json
│       │   ├── codeguruprofiler.json
│       │   ├── codepipeline.json
│       │   ├── codestar-connections.json
│       │   ├── codestar-notifications.json
│       │   ├── codestar.json
│       │   ├── cognito-identity.json
│       │   ├── cognito-idp.json
│       │   ├── cognito-sync.json
│       │   ├── comprehend.json
│       │   ├── comprehendmedical.json
│       │   ├── compute-optimizer.json
│       │   ├── config.json
│       │   ├── connect-contact-lens.json
│       │   ├── connect.json
│       │   ├── connectcampaigns.json
│       │   ├── connectcases.json
│       │   ├── connectparticipant.json
│       │   ├── controltower.json
│       │   ├── cur.json
│       │   ├── customer-profiles.json
│       │   ├── databrew.json
│       │   ├── dataexchange.json
│       │   ├── datapipeline.json
│       │   ├── datasync.json
│       │   ├── dax.json
│       │   ├── detective.json
│       │   ├── devicefarm.json
│       │   ├── devops-guru.json
│       │   ├── directconnect.json
│       │   ├── discovery.json
│       │   ├── dlm.json
│       │   ├── dms.json
│       │   ├── docdb-elastic.json
│       │   ├── docdb.json
│       │   ├── drs.json
│       │   ├── ds.json
│       │   ├── dynamodb.json
│       │   ├── dynamodbstreams.json
│       │   ├── ebs.json
│       │   ├── ec2-instance-connect.json
│       │   ├── ec2.json
│       │   ├── ecr-public.json
│       │   ├── ecr.json
│       │   ├── ecs.json
│       │   ├── efs.json
│       │   ├── eks.json
│       │   ├── elastic-inference.json
│       │   ├── elasticache.json
│       │   ├── elasticbeanstalk.json
│       │   ├── elastictranscoder.json
│       │   ├── elb.json
│       │   ├── elbv2.json
│       │   ├── emr-containers.json
│       │   ├── emr-serverless.json
│       │   ├── emr.json
│       │   ├── entityresolution.json
│       │   ├── es.json
│       │   ├── events.json
│       │   ├── evidently.json
│       │   ├── finspace-data.json
│       │   ├── finspace.json
│       │   ├── firehose.json
│       │   ├── fis.json
│       │   ├── fms.json
│       │   ├── forecast.json
│       │   ├── forecastquery.json
│       │   ├── frauddetector.json
│       │   ├── fsx.json
│       │   ├── gamelift.json
│       │   ├── gamesparks.json
│       │   ├── glacier.json
│       │   ├── globalaccelerator.json
│       │   ├── glue.json
│       │   ├── grafana.json
│       │   ├── greengrass.json
│       │   ├── greengrassv2.json
│       │   ├── groundstation.json
│       │   ├── guardduty.json
│       │   ├── health.json
│       │   ├── healthlake.json
│       │   ├── honeycode.json
│       │   ├── iam.json
│       │   ├── identitystore.json
│       │   ├── imagebuilder.json
│       │   ├── importexport.json
│       │   ├── inspector.json
│       │   ├── inspector2.json
│       │   ├── internetmonitor.json
│       │   ├── iot-data.json
│       │   ├── iot-jobs-data.json
│       │   ├── iot-roborunner.json
│       │   ├── iot.json
│       │   ├── iot1click-devices.json
│       │   ├── iot1click-projects.json
│       │   ├── iotanalytics.json
│       │   ├── iotdeviceadvisor.json
│       │   ├── iotevents-data.json
│       │   ├── iotevents.json
│       │   ├── iotfleethub.json
│       │   ├── iotfleetwise.json
│       │   ├── iotsecuretunneling.json
│       │   ├── iotsitewise.json
│       │   ├── iotthingsgraph.json
│       │   ├── iottwinmaker.json
│       │   ├── iotwireless.json
│       │   ├── ivs-realtime.json
│       │   ├── ivs.json
│       │   ├── ivschat.json
│       │   ├── kafka.json
│       │   ├── kafkaconnect.json
│       │   ├── kendra-ranking.json
│       │   ├── kendra.json
│       │   ├── keyspaces.json
│       │   ├── kinesis-video-archived-media.json
│       │   ├── kinesis-video-media.json
│       │   ├── kinesis-video-signaling.json
│       │   ├── kinesis-video-webrtc-storage.json
│       │   ├── kinesis.json
│       │   ├── kinesisanalytics.json
│       │   ├── kinesisanalyticsv2.json
│       │   ├── kinesisvideo.json
│       │   ├── kms.json
│       │   ├── lakeformation.json
│       │   ├── lambda.json
│       │   ├── lex-models.json
│       │   ├── lex-runtime.json
│       │   ├── lexv2-models.json
│       │   ├── lexv2-runtime.json
│       │   ├── license-manager-linux-subscriptions.json
│       │   ├── license-manager-user-subscriptions.json
│       │   ├── license-manager.json
│       │   ├── lightsail.json
│       │   ├── location.json
│       │   ├── logs.json
│       │   ├── lookoutequipment.json
│       │   ├── lookoutmetrics.json
│       │   ├── lookoutvision.json
│       │   ├── m2.json
│       │   ├── machinelearning.json
│       │   ├── macie.json
│       │   ├── macie2.json
│       │   ├── managedblockchain-query.json
│       │   ├── managedblockchain.json
│       │   ├── marketplace-catalog.json
│       │   ├── marketplace-entitlement.json
│       │   ├── marketplacecommerceanalytics.json
│       │   ├── mediaconnect.json
│       │   ├── mediaconvert.json
│       │   ├── medialive.json
│       │   ├── mediapackage-vod.json
│       │   ├── mediapackage.json
│       │   ├── mediapackagev2.json
│       │   ├── mediastore-data.json
│       │   ├── mediastore.json
│       │   ├── mediatailor.json
│       │   ├── medical-imaging.json
│       │   ├── memorydb.json
│       │   ├── meteringmarketplace.json
│       │   ├── mgh.json
│       │   ├── mgn.json
│       │   ├── migration-hub-refactor-spaces.json
│       │   ├── migrationhub-config.json
│       │   ├── migrationhuborchestrator.json
│       │   ├── migrationhubstrategy.json
│       │   ├── mobile.json
│       │   ├── mq.json
│       │   ├── mturk.json
│       │   ├── mwaa.json
│       │   ├── neptune.json
│       │   ├── network-firewall.json
│       │   ├── networkmanager.json
│       │   ├── nimble.json
│       │   ├── oam.json
│       │   ├── omics.json
│       │   ├── opensearch.json
│       │   ├── opensearchserverless.json
│       │   ├── opsworks.json
│       │   ├── opsworkscm.json
│       │   ├── organizations.json
│       │   ├── osis.json
│       │   ├── outposts.json
│       │   ├── panorama.json
│       │   ├── payment-cryptography-data.json
│       │   ├── payment-cryptography.json
│       │   ├── personalize-events.json
│       │   ├── personalize-runtime.json
│       │   ├── personalize.json
│       │   ├── pi.json
│       │   ├── pinpoint-email.json
│       │   ├── pinpoint-sms-voice-v2.json
│       │   ├── pinpoint-sms-voice.json
│       │   ├── pinpoint.json
│       │   ├── pipes.json
│       │   ├── polly.json
│       │   ├── pricing.json
│       │   ├── privatenetworks.json
│       │   ├── proton.json
│       │   ├── qldb-session.json
│       │   ├── qldb.json
│       │   ├── quicksight.json
│       │   ├── ram.json
│       │   ├── rbin.json
│       │   ├── rds-data.json
│       │   ├── rds.json
│       │   ├── redshift-data.json
│       │   ├── redshift-serverless.json
│       │   ├── redshift.json
│       │   ├── rekognition.json
│       │   ├── resiliencehub.json
│       │   ├── resource-explorer-2.json
│       │   ├── resource-groups.json
│       │   ├── resourcegroupstaggingapi.json
│       │   ├── robomaker.json
│       │   ├── rolesanywhere.json
│       │   ├── route53-recovery-cluster.json
│       │   ├── route53-recovery-control-config.json
│       │   ├── route53-recovery-readiness.json
│       │   ├── route53.json
│       │   ├── route53domains.json
│       │   ├── route53resolver.json
│       │   ├── rum.json
│       │   ├── s3.json
│       │   ├── s3control.json
│       │   ├── s3outposts.json
│       │   ├── sagemaker-a2i-runtime.json
│       │   ├── sagemaker-edge.json
│       │   ├── sagemaker-featurestore-runtime.json
│       │   ├── sagemaker-geospatial.json
│       │   ├── sagemaker-metrics.json
│       │   ├── sagemaker-runtime.json
│       │   ├── sagemaker.json
│       │   ├── savingsplans.json
│       │   ├── scheduler.json
│       │   ├── schemas.json
│       │   ├── sdb.json
│       │   ├── secretsmanager.json
│       │   ├── securityhub.json
│       │   ├── securitylake.json
│       │   ├── serverlessrepo.json
│       │   ├── service-quotas.json
│       │   ├── servicecatalog-appregistry.json
│       │   ├── servicecatalog.json
│       │   ├── servicediscovery.json
│       │   ├── ses.json
│       │   ├── sesv2.json
│       │   ├── shield.json
│       │   ├── signer.json
│       │   ├── simspaceweaver.json
│       │   ├── sms-voice.json
│       │   ├── sms.json
│       │   ├── snow-device-management.json
│       │   ├── snowball.json
│       │   ├── sns.json
│       │   ├── sqs.json
│       │   ├── ssm-contacts.json
│       │   ├── ssm-incidents.json
│       │   ├── ssm-sap.json
│       │   ├── ssm.json
│       │   ├── sso-admin.json
│       │   ├── sso-oidc.json
│       │   ├── sso.json
│       │   ├── stepfunctions.json
│       │   ├── storagegateway.json
│       │   ├── sts.json
│       │   ├── support-app.json
│       │   ├── support.json
│       │   ├── swf.json
│       │   ├── synthetics.json
│       │   ├── textract.json
│       │   ├── timestream-query.json
│       │   ├── timestream-write.json
│       │   ├── tnb.json
│       │   ├── transcribe.json
│       │   ├── transfer.json
│       │   ├── translate.json
│       │   ├── verifiedpermissions.json
│       │   ├── voice-id.json
│       │   ├── vpc-lattice.json
│       │   ├── waf-regional.json
│       │   ├── waf.json
│       │   ├── wafv2.json
│       │   ├── wellarchitected.json
│       │   ├── wisdom.json
│       │   ├── workdocs.json
│       │   ├── worklink.json
│       │   ├── workmail.json
│       │   ├── workmailmessageflow.json
│       │   ├── workspaces-web.json
│       │   ├── workspaces.json
│       │   └── xray.json
│       └── tagged_emrs.json
├── scan.py
├── scan_builder.py
├── setup.py
├── test_requirements.txt
└── tests/
    ├── __init__.py
    ├── conftest.py
    ├── test_api_calls.py
    ├── test_config/
    │   ├── __init__.py
    │   └── test_loader.py
    ├── test_core/
    │   └── __init__.py
    ├── test_organization.py
    ├── test_organization_scanner.py
    ├── test_output/
    │   └── __init__.py
    ├── test_role_assumption.py
    └── test_service_scanning.py
Download .txt
SYMBOL INDEX (110 symbols across 21 files)

FILE: aws_auto_inventory/cli.py
  function check_aws_credentials (line 19) | def check_aws_credentials(profile_name: Optional[str] = None) -> bool:
  function parse_args (line 40) | def parse_args() -> argparse.Namespace:
  function main (line 99) | def main() -> int:

FILE: aws_auto_inventory/config/loader.py
  class ConfigLoader (line 12) | class ConfigLoader:
    method load_config (line 17) | def load_config(self, path: str) -> Config:
    method _detect_format (line 50) | def _detect_format(self, path: str) -> str:
    method _is_legacy_format (line 69) | def _is_legacy_format(self, config_data: Union[Dict[str, Any], list]) ...
    method _convert_legacy_format (line 90) | def _convert_legacy_format(self, config_data: Union[Dict[str, Any], li...

FILE: aws_auto_inventory/config/models.py
  class ExcelConfig (line 8) | class ExcelConfig(BaseModel):
  class AWSConfig (line 14) | class AWSConfig(BaseModel):
  class Sheet (line 22) | class Sheet(BaseModel):
  class Inventory (line 31) | class Inventory(BaseModel):
  class Config (line 39) | class Config(BaseModel):
    method to_json (line 43) | def to_json(self):
    method to_yaml (line 47) | def to_yaml(self):
    method from_dict (line 53) | def from_dict(cls, data):

FILE: aws_auto_inventory/config/validator.py
  class ConfigValidator (line 10) | class ConfigValidator:
    method validate (line 15) | def validate(self, config: Config) -> List[str]:
    method _validate_inventory (line 39) | def _validate_inventory(self, inventory: Inventory) -> List[str]:
    method _validate_aws_config (line 67) | def _validate_aws_config(self, inventory: Inventory) -> List[str]:
    method _validate_sheet (line 95) | def _validate_sheet(self, sheet: Sheet) -> List[str]:

FILE: aws_auto_inventory/core/aws_client.py
  class AWSClientError (line 17) | class AWSClientError(Exception):
  class ThrottlingError (line 22) | class ThrottlingError(AWSClientError):
    method __init__ (line 24) | def __init__(self, service: str, function: str, retry_after: Optional[...
  class AWSClient (line 31) | class AWSClient:
    method __init__ (line 36) | def __init__(self, session: boto3.Session, max_retries: int = 3, retry...
    method call_api (line 49) | def call_api(

FILE: aws_auto_inventory/core/organization.py
  class AccountResult (line 16) | class AccountResult:
    method __init__ (line 21) | def __init__(
    method to_dict (line 45) | def to_dict(self) -> Dict[str, Any]:
  class OrganizationScanner (line 61) | class OrganizationScanner:
    method __init__ (line 66) | def __init__(self):
    method get_organization_accounts (line 72) | def get_organization_accounts(self, session: boto3.Session) -> List[Di...
    method assume_role (line 106) | def assume_role(
    method scan_organization (line 149) | def scan_organization(

FILE: aws_auto_inventory/core/region.py
  class RegionResult (line 17) | class RegionResult:
    method __init__ (line 22) | def __init__(self, region: str, services: List[ServiceResult]):
    method to_dict (line 33) | def to_dict(self) -> Dict[str, Any]:
  class RegionScanner (line 46) | class RegionScanner:
    method __init__ (line 51) | def __init__(
    method scan_region (line 70) | def scan_region(

FILE: aws_auto_inventory/core/scan_engine.py
  class ScanResult (line 18) | class ScanResult:
    method __init__ (line 23) | def __init__(
    method to_dict (line 42) | def to_dict(self) -> Dict[str, Any]:
  class ScanEngine (line 65) | class ScanEngine:
    method __init__ (line 70) | def __init__(
    method scan (line 98) | def scan(self, config: Config) -> List[ScanResult]:
    method _scan_organization (line 125) | def _scan_organization(self, inventory: Inventory) -> ScanResult:
    method _scan_account (line 149) | def _scan_account(self, inventory: Inventory) -> ScanResult:

FILE: aws_auto_inventory/core/service.py
  class ServiceResult (line 16) | class ServiceResult:
    method __init__ (line 21) | def __init__(
    method to_dict (line 48) | def to_dict(self) -> Dict[str, Any]:
  class ServiceScanner (line 65) | class ServiceScanner:
    method __init__ (line 70) | def __init__(self, max_retries: int = 3, retry_delay: int = 2):
    method scan_service (line 81) | def scan_service(
  class ResourceFilter (line 153) | class ResourceFilter:
    method apply_filters (line 158) | def apply_filters(self, results: Any, filters: Dict[str, Any]) -> Any:

FILE: aws_auto_inventory/utils/logging.py
  function setup_logging (line 10) | def setup_logging(
  function get_logger (line 57) | def get_logger(name: Optional[str] = None) -> logging.Logger:

FILE: aws_auto_inventory/utils/threading.py
  class ThreadingManager (line 17) | class ThreadingManager(Generic[T, R]):
    method __init__ (line 22) | def __init__(self, max_workers: Optional[int] = None):
    method execute (line 32) | def execute(
    method execute_with_progress (line 75) | def execute_with_progress(

FILE: organization_scanner.py
  function get_organization_accounts (line 8) | def get_organization_accounts(session):
  function assume_role (line 36) | def assume_role(session, account_id, role_name):
  function scan_organization (line 68) | def scan_organization(org_role_name, scan_config, regions, output_dir, l...

FILE: scan.py
  function get_json_from_url (line 22) | def get_json_from_url(url):
  class DateTimeEncoder (line 38) | class DateTimeEncoder(json.JSONEncoder):
    method default (line 41) | def default(self, o):
  function setup_logging (line 47) | def setup_logging(log_dir, log_level):
  function api_call_with_retry (line 67) | def api_call_with_retry(client, function_name, parameters, max_retries, ...
  function _get_service_data (line 103) | def _get_service_data(session, region_name, service, log, max_retries, r...
  function process_region (line 178) | def process_region(
  function display_time (line 232) | def display_time(seconds):
  function check_aws_credentials (line 239) | def check_aws_credentials(session):
  function main (line 252) | def main(

FILE: scan_builder.py
  function build_service_sheet (line 7) | def build_service_sheet():

FILE: tests/conftest.py
  function aws_credentials (line 8) | def aws_credentials():
  function mock_boto (line 17) | def mock_boto():
  function sample_scan_config (line 23) | def sample_scan_config():
  function mock_organization (line 38) | def mock_organization(mock_boto):
  class MockLog (line 56) | class MockLog:
    method __init__ (line 58) | def __init__(self):
    method info (line 64) | def info(self, message, *args, **kwargs):
    method error (line 67) | def error(self, message, *args, **kwargs):
    method debug (line 70) | def debug(self, message, *args, **kwargs):
    method warning (line 73) | def warning(self, message, *args, **kwargs):
  function mock_log (line 77) | def mock_log():

FILE: tests/test_api_calls.py
  function test_api_call_success (line 5) | def test_api_call_success(mocker):
  function test_api_call_with_parameters (line 16) | def test_api_call_with_parameters(mocker):
  function test_api_call_throttling_retry (line 28) | def test_api_call_throttling_retry(mocker):
  function test_api_call_max_retries_exceeded (line 45) | def test_api_call_max_retries_exceeded(mocker):
  function test_api_call_non_throttling_error (line 62) | def test_api_call_non_throttling_error(mocker):

FILE: tests/test_config/test_loader.py
  function yaml_config_file (line 11) | def yaml_config_file():
  function json_config_file (line 30) | def json_config_file():
  function legacy_json_config_file (line 57) | def legacy_json_config_file():
  function test_load_yaml_config (line 78) | def test_load_yaml_config(yaml_config_file):
  function test_load_json_config (line 90) | def test_load_json_config(json_config_file):
  function test_load_legacy_json_config (line 102) | def test_load_legacy_json_config(legacy_json_config_file):
  function test_file_not_found (line 120) | def test_file_not_found():

FILE: tests/test_organization.py
  function test_get_organization_accounts (line 4) | def test_get_organization_accounts(mock_boto, mock_organization):
  function test_get_organization_accounts_empty (line 16) | def test_get_organization_accounts_empty(mock_boto):
  function test_get_organization_accounts_pagination (line 27) | def test_get_organization_accounts_pagination(mocker):

FILE: tests/test_organization_scanner.py
  function test_scan_organization (line 5) | def test_scan_organization(mocker, tmp_path):
  function test_scan_organization_role_assumption_failure (line 70) | def test_scan_organization_role_assumption_failure(mocker, tmp_path):

FILE: tests/test_role_assumption.py
  function test_assume_role_success (line 5) | def test_assume_role_success(mocker):
  function test_assume_role_failure (line 41) | def test_assume_role_failure(mocker):

FILE: tests/test_service_scanning.py
  function setup_aws_resources (line 9) | def setup_aws_resources(mock_boto):
  function test_process_region_s3 (line 22) | def test_process_region_s3(setup_aws_resources, aws_credentials, mock_log):
  function test_process_region_ec2 (line 44) | def test_process_region_ec2(setup_aws_resources, aws_credentials, mock_l...
Condensed preview — 425 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (1,350K chars).
[
  {
    "path": ".devcontainer/README.md",
    "chars": 987,
    "preview": "```json\n// For format details, see https://aka.ms/devcontainer.json. For config options, see the\n// README at: https://g"
  },
  {
    "path": ".devcontainer/config.yaml",
    "chars": 857,
    "preview": "inventories:\n  - name: learning\n    aws:\n      profile: your-aws-profile\n      region:\n        - us-east-1\n    excel:\n  "
  },
  {
    "path": ".devcontainer/devcontainer.json",
    "chars": 376,
    "preview": "{\n  \"customizations\": {\n    \"vscode\": {\n      \"extensions\": [\n        \"ms-python.python\",\n        \"ms-python.vscode-pyla"
  },
  {
    "path": ".devcontainer/post-create.sh",
    "chars": 267,
    "preview": "#!/usr/bin/env bash\n\nset -eu pipefail\n\nexport DEBIAN_FRONTEND=noninteractive\n\nsudo apt-get update\nsudo apt-get -y instal"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.md",
    "chars": 309,
    "preview": "---\nname: 🐛 Bug Report\nabout: Create a new ticket for a bug.\nlabels: bug\n---\n\n<!-- Please search existing issues to avoi"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature_request.md",
    "chars": 127,
    "preview": "---\nname: 💡 Feature Request\nabout: Create a new ticket for a new feature request\nlabels: enhancement\n---\n\nType your idea"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/question.md",
    "chars": 102,
    "preview": "---\nname: ❔ Question\nabout: What is your question about?\nlabels: question\n---\n\nAdd more context here.\n"
  },
  {
    "path": ".github/actions/.keep",
    "chars": 0,
    "preview": ""
  },
  {
    "path": ".github/pull_request_template.md",
    "chars": 1257,
    "preview": "## 🧠 Pull Request\n\n### Changes\n\n<!-- What changes are being made? Is this a change a bugfix or new functionality?  Have "
  },
  {
    "path": ".github/workflows/build.yml",
    "chars": 532,
    "preview": "name: Build\non:\n  push:\n    branches:\n    - 'feat/**'\n    - 'fix/**'\n\npermissions:\n  contents: read\n\njobs:\n  build:\n    "
  },
  {
    "path": ".github/workflows/codeql-analysis.yml",
    "chars": 2431,
    "preview": "# For most projects, this workflow file will not need changing; you simply need\n# to commit it to your repository.\n#\n# Y"
  },
  {
    "path": ".github/workflows/hygiene.yml",
    "chars": 740,
    "preview": "name: Hygiene\n\non:\n  push:\n    branches:\n      - 'feat/**'\n      - 'fix/**'\n\npermissions:\n  contents: read\n\njobs:\n  code"
  },
  {
    "path": ".github/workflows/publish.yml",
    "chars": 2013,
    "preview": "name: Publish\non:\n  push:\n    tags:\n      - \"v*.*.*\"\n\npermissions:\n  contents: write\n\njobs:\n  build:\n    runs-on: ${{ ma"
  },
  {
    "path": ".github/workflows/release.yml",
    "chars": 358,
    "preview": "on:\n  push:\n    branches:\n      - main\n\nname: Release\npermissions:\n  contents: write\n  pull-requests: write\n\njobs:\n  rel"
  },
  {
    "path": ".gitignore",
    "chars": 7061,
    "preview": "# Created by https://www.toptal.com/developers/gitignore/api/macos,windows,linux,visualstudiocode,python,node\n# Edit at "
  },
  {
    "path": ".gitmodules",
    "chars": 105,
    "preview": "[submodule \"habits\"]\n\tpath = habits\n\turl = https://github.com/awslabs/aws-code-habits.git\n\tbranch = main\n"
  },
  {
    "path": ".pre-commit-config.yaml",
    "chars": 3180,
    "preview": "default_language_version:\n  python: python3\nfail_fast: true\nminimum_pre_commit_version: 2.13.0\nrepos:\n- repo: https://gi"
  },
  {
    "path": ".vscode/launch.json",
    "chars": 512,
    "preview": "{\n  \"configurations\": [\n    {\n      \"args\": [\n        \"--name\",\n        \"your-inventory-name\"\n      ],\n      \"console\": "
  },
  {
    "path": ".vscode/tasks.json",
    "chars": 237,
    "preview": "{\n  \"tasks\": [\n    {\n      \"command\": \"make build\",\n      \"group\": {\n        \"isDefault\": true,\n        \"kind\": \"build\"\n"
  },
  {
    "path": "CHANGELOG.md",
    "chars": 6324,
    "preview": "# Changelog\n\n## [1.1.1](https://github.com/aws-samples/aws-auto-inventory/compare/v1.1.0...v1.1.1) (2023-08-28)\n\n\n### Bu"
  },
  {
    "path": "CODE_OF_CONDUCT.md",
    "chars": 309,
    "preview": "## Code of Conduct\nThis project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-condu"
  },
  {
    "path": "CONTRIBUTING.md",
    "chars": 3158,
    "preview": "# Contributing Guidelines\n\nThank you for your interest in contributing to our project. Whether it's a bug report, new fe"
  },
  {
    "path": "LICENSE",
    "chars": 10142,
    "preview": "\n                                 Apache License\n                           Version 2.0, January 2004\n                  "
  },
  {
    "path": "Makefile",
    "chars": 248,
    "preview": "export WORKSPACE=$(shell pwd)\nexport HABITS = $(WORKSPACE)/habits\n\ninclude $(HABITS)/lib/make/Makefile\ninclude $(HABITS)"
  },
  {
    "path": "NOTICE",
    "chars": 67,
    "preview": "Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n"
  },
  {
    "path": "README.md",
    "chars": 5979,
    "preview": "# AWS Auto Inventory\n\nA tool for scanning AWS services across regions and accounts to collect resource information.\n\n## "
  },
  {
    "path": "SECURITY.md",
    "chars": 288,
    "preview": "## Security\n\nWe take the security of our software products and services seriously, which includes all source code reposi"
  },
  {
    "path": "aws-auto-inventory-unified-architecture.md",
    "chars": 29899,
    "preview": "# AWS Auto Inventory: Unified Architecture Plan\n\n## Table of Contents\n1. [Current State Analysis](#current-state-analysi"
  },
  {
    "path": "aws_auto_inventory/__init__.py",
    "chars": 189,
    "preview": "\"\"\"\nAWS Auto Inventory - Scan AWS resources and generate inventory.\n\nA tool for scanning AWS services across regions and"
  },
  {
    "path": "aws_auto_inventory/cli.py",
    "chars": 5694,
    "preview": "\"\"\"\nCommand-line interface for AWS Auto Inventory.\n\"\"\"\nimport os\nimport sys\nimport argparse\nimport logging\nfrom typing i"
  },
  {
    "path": "aws_auto_inventory/config/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "aws_auto_inventory/config/loader.py",
    "chars": 4506,
    "preview": "\"\"\"\nConfiguration loader for AWS Auto Inventory.\n\"\"\"\nimport os\nimport json\nimport yaml\nfrom typing import Union, Dict, A"
  },
  {
    "path": "aws_auto_inventory/config/models.py",
    "chars": 1458,
    "preview": "\"\"\"\nConfiguration models for AWS Auto Inventory.\n\"\"\"\nfrom typing import List, Dict, Optional, Any, Union\nfrom pydantic i"
  },
  {
    "path": "aws_auto_inventory/config/validator.py",
    "chars": 4299,
    "preview": "\"\"\"\nConfiguration validator for AWS Auto Inventory.\n\"\"\"\nimport boto3\nfrom typing import List, Optional, Dict, Any\n\nfrom "
  },
  {
    "path": "aws_auto_inventory/core/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "aws_auto_inventory/core/aws_client.py",
    "chars": 5115,
    "preview": "\"\"\"\nAWS client with retry logic for AWS Auto Inventory.\n\"\"\"\nimport time\nimport json\nimport logging\nfrom typing import Op"
  },
  {
    "path": "aws_auto_inventory/core/organization.py",
    "chars": 7264,
    "preview": "\"\"\"\nOrganization scanner for AWS Auto Inventory.\n\"\"\"\nimport logging\nfrom typing import Dict, Any, List, Optional\n\nimport"
  },
  {
    "path": "aws_auto_inventory/core/region.py",
    "chars": 4347,
    "preview": "\"\"\"\nRegion scanner for AWS Auto Inventory.\n\"\"\"\nimport logging\nimport concurrent.futures\nfrom typing import Dict, Any, Li"
  },
  {
    "path": "aws_auto_inventory/core/scan_engine.py",
    "chars": 6191,
    "preview": "\"\"\"\nMain scanning engine for AWS Auto Inventory.\n\"\"\"\nimport logging\nimport concurrent.futures\nfrom typing import Dict, A"
  },
  {
    "path": "aws_auto_inventory/core/service.py",
    "chars": 4744,
    "preview": "\"\"\"\nService scanner for AWS Auto Inventory.\n\"\"\"\nimport logging\nfrom typing import Dict, Any, List, Optional\n\nimport boto"
  },
  {
    "path": "aws_auto_inventory/utils/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "aws_auto_inventory/utils/logging.py",
    "chars": 1939,
    "preview": "\"\"\"\nLogging utilities for AWS Auto Inventory.\n\"\"\"\nimport os\nimport logging\nfrom datetime import datetime\nfrom typing imp"
  },
  {
    "path": "aws_auto_inventory/utils/threading.py",
    "chars": 4365,
    "preview": "\"\"\"\nThreading utilities for AWS Auto Inventory.\n\"\"\"\nimport os\nimport logging\nimport concurrent.futures\nfrom typing impor"
  },
  {
    "path": "doc/habits.yaml",
    "chars": 6847,
    "preview": "url: https://github.com/aws-samples/aws-auto-inventory\nlogo: doc/logo.png\n\n# optional\n# badges:\n#   # optional\n#   custo"
  },
  {
    "path": "examples/config_example.json",
    "chars": 1395,
    "preview": "{\n  \"inventories\": [\n    {\n      \"name\": \"my-aws-inventory\",\n      \"aws\": {\n        \"profile\": \"default\",\n        \"regio"
  },
  {
    "path": "examples/config_example.yaml",
    "chars": 1543,
    "preview": "# AWS Auto Inventory - Example YAML Configuration\n\ninventories:\n  - name: my-aws-inventory\n    aws:\n      # AWS profile "
  },
  {
    "path": "examples/config_organization_example.yaml",
    "chars": 1685,
    "preview": "# AWS Auto Inventory - Example Organization-wide Scanning Configuration\n\n# Define common sheets to reuse across inventor"
  },
  {
    "path": "organization_scanner.py",
    "chars": 5319,
    "preview": "# -*- coding: utf-8 -*-\nimport boto3\nimport os\nimport json\nfrom scan import main as scan_account\nfrom datetime import da"
  },
  {
    "path": "requirements-and-versions.txt",
    "chars": 277,
    "preview": "boto3==1.28.18\nbotocore==1.31.18\ncfgv==3.3.1\ndistlib==0.3.7\nfilelock==3.20.3\nidentify==2.5.26\njmespath==1.0.1\nnodeenv==1"
  },
  {
    "path": "requirements.txt",
    "chars": 83,
    "preview": "boto3>=1.20.0\npydantic>=1.8.0\njq>=1.6.0\npandas>=1.3.0\nxlsxwriter>=3.0.0\npyyaml>=6.0"
  },
  {
    "path": "scan/sample/all_services.json",
    "chars": 488703,
    "preview": "[\n  {\n    \"function\": \"get_access_preview\",\n    \"service\": \"accessanalyzer\"\n  },\n  {\n    \"function\": \"get_analyzed_resou"
  },
  {
    "path": "scan/sample/list_emr_clusters_id_name.json",
    "chars": 198,
    "preview": "[\n  {\n    \"function\": \"list_clusters\",\n    \"service\": \"emr\",\n    \"parameters\": {\n      \"ClusterStates\": [\n            \"W"
  },
  {
    "path": "scan/sample/list_of_bucket_names.json",
    "chars": 102,
    "preview": "[\n  {\n    \"function\": \"list_buckets\",\n    \"result_key\": \".Buckets[]|.Name\",\n    \"service\": \"s3\"\n  }\n]\n"
  },
  {
    "path": "scan/sample/running_ec2.json",
    "chars": 276,
    "preview": "[\n  {\n    \"function\": \"describe_instances\",\n    \"parameters\": {\n      \"Filters\": [\n        {\n          \"Name\": \"instance"
  },
  {
    "path": "scan/sample/running_ec2_names.json",
    "chars": 738,
    "preview": "[\n  {\n    \"function\": \"describe_instances\",\n    \"parameters\": {\n      \"Filters\": [\n        {\n          \"Name\": \"instance"
  },
  {
    "path": "scan/sample/s3_buckets.json",
    "chars": 93,
    "preview": "[\n  {\n    \"function\": \"list_buckets\",\n    \"result_key\": \"Buckets\",\n    \"service\": \"s3\"\n  }\n]\n"
  },
  {
    "path": "scan/sample/services/accessanalyzer.json",
    "chars": 1261,
    "preview": "[\n  {\n    \"function\": \"get_access_preview\",\n    \"service\": \"accessanalyzer\"\n  },\n  {\n    \"function\": \"get_analyzed_resou"
  },
  {
    "path": "scan/sample/services/account.json",
    "chars": 427,
    "preview": "[\n  {\n    \"function\": \"get_alternate_contact\",\n    \"service\": \"account\"\n  },\n  {\n    \"function\": \"get_contact_informatio"
  },
  {
    "path": "scan/sample/services/acm-pca.json",
    "chars": 837,
    "preview": "[\n  {\n    \"function\": \"describe_certificate_authority\",\n    \"service\": \"acm-pca\"\n  },\n  {\n    \"function\": \"describe_cert"
  },
  {
    "path": "scan/sample/services/acm.json",
    "chars": 478,
    "preview": "[\n  {\n    \"function\": \"describe_certificate\",\n    \"service\": \"acm\"\n  },\n  {\n    \"function\": \"get_account_configuration\","
  },
  {
    "path": "scan/sample/services/alexaforbusiness.json",
    "chars": 2034,
    "preview": "[\n  {\n    \"function\": \"get_address_book\",\n    \"service\": \"alexaforbusiness\"\n  },\n  {\n    \"function\": \"get_conference_pre"
  },
  {
    "path": "scan/sample/services/amp.json",
    "chars": 651,
    "preview": "[\n  {\n    \"function\": \"describe_alert_manager_definition\",\n    \"service\": \"amp\"\n  },\n  {\n    \"function\": \"describe_loggi"
  },
  {
    "path": "scan/sample/services/amplify.json",
    "chars": 1169,
    "preview": "[\n  {\n    \"function\": \"get_app\",\n    \"service\": \"amplify\"\n  },\n  {\n    \"function\": \"get_artifact_url\",\n    \"service\": \"a"
  },
  {
    "path": "scan/sample/services/amplifybackend.json",
    "chars": 836,
    "preview": "[\n  {\n    \"function\": \"get_backend\",\n    \"service\": \"amplifybackend\"\n  },\n  {\n    \"function\": \"get_backend_api\",\n    \"se"
  },
  {
    "path": "scan/sample/services/amplifyuibuilder.json",
    "chars": 829,
    "preview": "[\n  {\n    \"function\": \"get_codegen_job\",\n    \"service\": \"amplifyuibuilder\"\n  },\n  {\n    \"function\": \"get_component\",\n   "
  },
  {
    "path": "scan/sample/services/apigateway.json",
    "chars": 3477,
    "preview": "[\n  {\n    \"function\": \"get_account\",\n    \"service\": \"apigateway\"\n  },\n  {\n    \"function\": \"get_api_key\",\n    \"service\": "
  },
  {
    "path": "scan/sample/services/apigatewaymanagementapi.json",
    "chars": 250,
    "preview": "[\n  {\n    \"function\": \"get_connection\",\n    \"service\": \"apigatewaymanagementapi\"\n  },\n  {\n    \"function\": \"get_paginator"
  },
  {
    "path": "scan/sample/services/apigatewayv2.json",
    "chars": 2038,
    "preview": "[\n  {\n    \"function\": \"get_api\",\n    \"service\": \"apigatewayv2\"\n  },\n  {\n    \"function\": \"get_api_mapping\",\n    \"service\""
  },
  {
    "path": "scan/sample/services/appconfig.json",
    "chars": 1526,
    "preview": "[\n  {\n    \"function\": \"get_application\",\n    \"service\": \"appconfig\"\n  },\n  {\n    \"function\": \"get_configuration\",\n    \"s"
  },
  {
    "path": "scan/sample/services/appconfigdata.json",
    "chars": 230,
    "preview": "[\n  {\n    \"function\": \"get_latest_configuration\",\n    \"service\": \"appconfigdata\"\n  },\n  {\n    \"function\": \"get_paginator"
  },
  {
    "path": "scan/sample/services/appfabric.json",
    "chars": 818,
    "preview": "[\n  {\n    \"function\": \"get_app_authorization\",\n    \"service\": \"appfabric\"\n  },\n  {\n    \"function\": \"get_app_bundle\",\n   "
  },
  {
    "path": "scan/sample/services/appflow.json",
    "chars": 877,
    "preview": "[\n  {\n    \"function\": \"describe_connector\",\n    \"service\": \"appflow\"\n  },\n  {\n    \"function\": \"describe_connector_entity"
  },
  {
    "path": "scan/sample/services/appintegrations.json",
    "chars": 761,
    "preview": "[\n  {\n    \"function\": \"get_data_integration\",\n    \"service\": \"appintegrations\"\n  },\n  {\n    \"function\": \"get_event_integ"
  },
  {
    "path": "scan/sample/services/application-autoscaling.json",
    "chars": 641,
    "preview": "[\n  {\n    \"function\": \"describe_scalable_targets\",\n    \"service\": \"application-autoscaling\"\n  },\n  {\n    \"function\": \"de"
  },
  {
    "path": "scan/sample/services/application-insights.json",
    "chars": 1663,
    "preview": "[\n  {\n    \"function\": \"describe_application\",\n    \"service\": \"application-insights\"\n  },\n  {\n    \"function\": \"describe_c"
  },
  {
    "path": "scan/sample/services/applicationcostprofiler.json",
    "chars": 350,
    "preview": "[\n  {\n    \"function\": \"get_paginator\",\n    \"service\": \"applicationcostprofiler\"\n  },\n  {\n    \"function\": \"get_report_def"
  },
  {
    "path": "scan/sample/services/appmesh.json",
    "chars": 1228,
    "preview": "[\n  {\n    \"function\": \"describe_gateway_route\",\n    \"service\": \"appmesh\"\n  },\n  {\n    \"function\": \"describe_mesh\",\n    \""
  },
  {
    "path": "scan/sample/services/apprunner.json",
    "chars": 1263,
    "preview": "[\n  {\n    \"function\": \"describe_auto_scaling_configuration\",\n    \"service\": \"apprunner\"\n  },\n  {\n    \"function\": \"descri"
  },
  {
    "path": "scan/sample/services/appstream.json",
    "chars": 1744,
    "preview": "[\n  {\n    \"function\": \"describe_app_block_builder_app_block_associations\",\n    \"service\": \"appstream\"\n  },\n  {\n    \"func"
  },
  {
    "path": "scan/sample/services/appsync.json",
    "chars": 1710,
    "preview": "[\n  {\n    \"function\": \"get_api_association\",\n    \"service\": \"appsync\"\n  },\n  {\n    \"function\": \"get_api_cache\",\n    \"ser"
  },
  {
    "path": "scan/sample/services/arc-zonal-shift.json",
    "chars": 395,
    "preview": "[\n  {\n    \"function\": \"get_managed_resource\",\n    \"service\": \"arc-zonal-shift\"\n  },\n  {\n    \"function\": \"get_paginator\","
  },
  {
    "path": "scan/sample/services/athena.json",
    "chars": 2562,
    "preview": "[\n  {\n    \"function\": \"get_calculation_execution\",\n    \"service\": \"athena\"\n  },\n  {\n    \"function\": \"get_calculation_exe"
  },
  {
    "path": "scan/sample/services/auditmanager.json",
    "chars": 2649,
    "preview": "[\n  {\n    \"function\": \"get_account_status\",\n    \"service\": \"auditmanager\"\n  },\n  {\n    \"function\": \"get_assessment\",\n   "
  },
  {
    "path": "scan/sample/services/autoscaling-plans.json",
    "chars": 438,
    "preview": "[\n  {\n    \"function\": \"describe_scaling_plan_resources\",\n    \"service\": \"autoscaling-plans\"\n  },\n  {\n    \"function\": \"de"
  },
  {
    "path": "scan/sample/services/autoscaling.json",
    "chars": 2021,
    "preview": "[\n  {\n    \"function\": \"describe_account_limits\",\n    \"service\": \"autoscaling\"\n  },\n  {\n    \"function\": \"describe_adjustm"
  },
  {
    "path": "scan/sample/services/backup-gateway.json",
    "chars": 878,
    "preview": "[\n  {\n    \"function\": \"get_bandwidth_rate_limit_schedule\",\n    \"service\": \"backup-gateway\"\n  },\n  {\n    \"function\": \"get"
  },
  {
    "path": "scan/sample/services/backup.json",
    "chars": 2905,
    "preview": "[\n  {\n    \"function\": \"describe_backup_job\",\n    \"service\": \"backup\"\n  },\n  {\n    \"function\": \"describe_backup_vault\",\n "
  },
  {
    "path": "scan/sample/services/backupstorage.json",
    "chars": 437,
    "preview": "[\n  {\n    \"function\": \"get_chunk\",\n    \"service\": \"backupstorage\"\n  },\n  {\n    \"function\": \"get_object_metadata\",\n    \"s"
  },
  {
    "path": "scan/sample/services/batch.json",
    "chars": 714,
    "preview": "[\n  {\n    \"function\": \"describe_compute_environments\",\n    \"service\": \"batch\"\n  },\n  {\n    \"function\": \"describe_job_def"
  },
  {
    "path": "scan/sample/services/billingconductor.json",
    "chars": 1167,
    "preview": "[\n  {\n    \"function\": \"get_paginator\",\n    \"service\": \"billingconductor\"\n  },\n  {\n    \"function\": \"get_waiter\",\n    \"ser"
  },
  {
    "path": "scan/sample/services/braket.json",
    "chars": 399,
    "preview": "[\n  {\n    \"function\": \"get_device\",\n    \"service\": \"braket\"\n  },\n  {\n    \"function\": \"get_job\",\n    \"service\": \"braket\"\n"
  },
  {
    "path": "scan/sample/services/budgets.json",
    "chars": 974,
    "preview": "[\n  {\n    \"function\": \"describe_budget\",\n    \"service\": \"budgets\"\n  },\n  {\n    \"function\": \"describe_budget_action\",\n   "
  },
  {
    "path": "scan/sample/services/ce.json",
    "chars": 1955,
    "preview": "[\n  {\n    \"function\": \"describe_cost_category_definition\",\n    \"service\": \"ce\"\n  },\n  {\n    \"function\": \"get_anomalies\","
  },
  {
    "path": "scan/sample/services/chime-sdk-identity.json",
    "chars": 1246,
    "preview": "[\n  {\n    \"function\": \"describe_app_instance\",\n    \"service\": \"chime-sdk-identity\"\n  },\n  {\n    \"function\": \"describe_ap"
  },
  {
    "path": "scan/sample/services/chime-sdk-media-pipelines.json",
    "chars": 872,
    "preview": "[\n  {\n    \"function\": \"get_media_capture_pipeline\",\n    \"service\": \"chime-sdk-media-pipelines\"\n  },\n  {\n    \"function\": "
  },
  {
    "path": "scan/sample/services/chime-sdk-meetings.json",
    "chars": 475,
    "preview": "[\n  {\n    \"function\": \"get_attendee\",\n    \"service\": \"chime-sdk-meetings\"\n  },\n  {\n    \"function\": \"get_meeting\",\n    \"s"
  },
  {
    "path": "scan/sample/services/chime-sdk-messaging.json",
    "chars": 2316,
    "preview": "[\n  {\n    \"function\": \"describe_channel\",\n    \"service\": \"chime-sdk-messaging\"\n  },\n  {\n    \"function\": \"describe_channe"
  },
  {
    "path": "scan/sample/services/chime-sdk-voice.json",
    "chars": 3286,
    "preview": "[\n  {\n    \"function\": \"get_global_settings\",\n    \"service\": \"chime-sdk-voice\"\n  },\n  {\n    \"function\": \"get_paginator\",\n"
  },
  {
    "path": "scan/sample/services/chime.json",
    "chars": 5518,
    "preview": "[\n  {\n    \"function\": \"describe_app_instance\",\n    \"service\": \"chime\"\n  },\n  {\n    \"function\": \"describe_app_instance_ad"
  },
  {
    "path": "scan/sample/services/cleanrooms.json",
    "chars": 1725,
    "preview": "[\n  {\n    \"function\": \"get_analysis_template\",\n    \"service\": \"cleanrooms\"\n  },\n  {\n    \"function\": \"get_collaboration\","
  },
  {
    "path": "scan/sample/services/cloud9.json",
    "chars": 516,
    "preview": "[\n  {\n    \"function\": \"describe_environment_memberships\",\n    \"service\": \"cloud9\"\n  },\n  {\n    \"function\": \"describe_env"
  },
  {
    "path": "scan/sample/services/cloudcontrol.json",
    "chars": 455,
    "preview": "[\n  {\n    \"function\": \"get_paginator\",\n    \"service\": \"cloudcontrol\"\n  },\n  {\n    \"function\": \"get_resource\",\n    \"servi"
  },
  {
    "path": "scan/sample/services/clouddirectory.json",
    "chars": 2461,
    "preview": "[\n  {\n    \"function\": \"get_applied_schema_version\",\n    \"service\": \"clouddirectory\"\n  },\n  {\n    \"function\": \"get_direct"
  },
  {
    "path": "scan/sample/services/cloudformation.json",
    "chars": 2769,
    "preview": "[\n  {\n    \"function\": \"describe_account_limits\",\n    \"service\": \"cloudformation\"\n  },\n  {\n    \"function\": \"describe_chan"
  },
  {
    "path": "scan/sample/services/cloudfront.json",
    "chars": 4533,
    "preview": "[\n  {\n    \"function\": \"describe_function\",\n    \"service\": \"cloudfront\"\n  },\n  {\n    \"function\": \"get_cache_policy\",\n    "
  },
  {
    "path": "scan/sample/services/cloudhsm.json",
    "chars": 764,
    "preview": "[\n  {\n    \"function\": \"describe_hapg\",\n    \"service\": \"cloudhsm\"\n  },\n  {\n    \"function\": \"describe_hsm\",\n    \"service\":"
  },
  {
    "path": "scan/sample/services/cloudhsmv2.json",
    "chars": 353,
    "preview": "[\n  {\n    \"function\": \"describe_backups\",\n    \"service\": \"cloudhsmv2\"\n  },\n  {\n    \"function\": \"describe_clusters\",\n    "
  },
  {
    "path": "scan/sample/services/cloudsearch.json",
    "chars": 960,
    "preview": "[\n  {\n    \"function\": \"describe_analysis_schemes\",\n    \"service\": \"cloudsearch\"\n  },\n  {\n    \"function\": \"describe_avail"
  },
  {
    "path": "scan/sample/services/cloudsearchdomain.json",
    "chars": 154,
    "preview": "[\n  {\n    \"function\": \"get_paginator\",\n    \"service\": \"cloudsearchdomain\"\n  },\n  {\n    \"function\": \"get_waiter\",\n    \"se"
  },
  {
    "path": "scan/sample/services/cloudtrail-data.json",
    "chars": 150,
    "preview": "[\n  {\n    \"function\": \"get_paginator\",\n    \"service\": \"cloudtrail-data\"\n  },\n  {\n    \"function\": \"get_waiter\",\n    \"serv"
  },
  {
    "path": "scan/sample/services/cloudtrail.json",
    "chars": 1509,
    "preview": "[\n  {\n    \"function\": \"describe_query\",\n    \"service\": \"cloudtrail\"\n  },\n  {\n    \"function\": \"describe_trails\",\n    \"ser"
  },
  {
    "path": "scan/sample/services/cloudwatch.json",
    "chars": 1369,
    "preview": "[\n  {\n    \"function\": \"describe_alarm_history\",\n    \"service\": \"cloudwatch\"\n  },\n  {\n    \"function\": \"describe_alarms\",\n"
  },
  {
    "path": "scan/sample/services/codeartifact.json",
    "chars": 1611,
    "preview": "[\n  {\n    \"function\": \"describe_domain\",\n    \"service\": \"codeartifact\"\n  },\n  {\n    \"function\": \"describe_package\",\n    "
  },
  {
    "path": "scan/sample/services/codebuild.json",
    "chars": 1370,
    "preview": "[\n  {\n    \"function\": \"describe_code_coverages\",\n    \"service\": \"codebuild\"\n  },\n  {\n    \"function\": \"describe_test_case"
  },
  {
    "path": "scan/sample/services/codecatalyst.json",
    "chars": 1315,
    "preview": "[\n  {\n    \"function\": \"get_dev_environment\",\n    \"service\": \"codecatalyst\"\n  },\n  {\n    \"function\": \"get_paginator\",\n   "
  },
  {
    "path": "scan/sample/services/codecommit.json",
    "chars": 2332,
    "preview": "[\n  {\n    \"function\": \"describe_merge_conflicts\",\n    \"service\": \"codecommit\"\n  },\n  {\n    \"function\": \"describe_pull_re"
  },
  {
    "path": "scan/sample/services/codedeploy.json",
    "chars": 1560,
    "preview": "[\n  {\n    \"function\": \"get_application\",\n    \"service\": \"codedeploy\"\n  },\n  {\n    \"function\": \"get_application_revision\""
  },
  {
    "path": "scan/sample/services/codeguru-reviewer.json",
    "chars": 864,
    "preview": "[\n  {\n    \"function\": \"describe_code_review\",\n    \"service\": \"codeguru-reviewer\"\n  },\n  {\n    \"function\": \"describe_reco"
  },
  {
    "path": "scan/sample/services/codeguru-security.json",
    "chars": 719,
    "preview": "[\n  {\n    \"function\": \"get_account_configuration\",\n    \"service\": \"codeguru-security\"\n  },\n  {\n    \"function\": \"get_find"
  },
  {
    "path": "scan/sample/services/codeguruprofiler.json",
    "chars": 993,
    "preview": "[\n  {\n    \"function\": \"describe_profiling_group\",\n    \"service\": \"codeguruprofiler\"\n  },\n  {\n    \"function\": \"get_findin"
  },
  {
    "path": "scan/sample/services/codepipeline.json",
    "chars": 1073,
    "preview": "[\n  {\n    \"function\": \"get_action_type\",\n    \"service\": \"codepipeline\"\n  },\n  {\n    \"function\": \"get_job_details\",\n    \""
  },
  {
    "path": "scan/sample/services/codestar-connections.json",
    "chars": 565,
    "preview": "[\n  {\n    \"function\": \"get_connection\",\n    \"service\": \"codestar-connections\"\n  },\n  {\n    \"function\": \"get_host\",\n    \""
  },
  {
    "path": "scan/sample/services/codestar-notifications.json",
    "chars": 608,
    "preview": "[\n  {\n    \"function\": \"describe_notification_rule\",\n    \"service\": \"codestar-notifications\"\n  },\n  {\n    \"function\": \"ge"
  },
  {
    "path": "scan/sample/services/codestar.json",
    "chars": 641,
    "preview": "[\n  {\n    \"function\": \"describe_project\",\n    \"service\": \"codestar\"\n  },\n  {\n    \"function\": \"describe_user_profile\",\n  "
  },
  {
    "path": "scan/sample/services/cognito-identity.json",
    "chars": 1085,
    "preview": "[\n  {\n    \"function\": \"describe_identity\",\n    \"service\": \"cognito-identity\"\n  },\n  {\n    \"function\": \"describe_identity"
  },
  {
    "path": "scan/sample/services/cognito-idp.json",
    "chars": 2174,
    "preview": "[\n  {\n    \"function\": \"describe_identity_provider\",\n    \"service\": \"cognito-idp\"\n  },\n  {\n    \"function\": \"describe_reso"
  },
  {
    "path": "scan/sample/services/cognito-sync.json",
    "chars": 864,
    "preview": "[\n  {\n    \"function\": \"describe_dataset\",\n    \"service\": \"cognito-sync\"\n  },\n  {\n    \"function\": \"describe_identity_pool"
  },
  {
    "path": "scan/sample/services/comprehend.json",
    "chars": 3028,
    "preview": "[\n  {\n    \"function\": \"describe_dataset\",\n    \"service\": \"comprehend\"\n  },\n  {\n    \"function\": \"describe_document_classi"
  },
  {
    "path": "scan/sample/services/comprehendmedical.json",
    "chars": 1083,
    "preview": "[\n  {\n    \"function\": \"describe_entities_detection_v2_job\",\n    \"service\": \"comprehendmedical\"\n  },\n  {\n    \"function\": "
  },
  {
    "path": "scan/sample/services/compute-optimizer.json",
    "chars": 1434,
    "preview": "[\n  {\n    \"function\": \"describe_recommendation_export_jobs\",\n    \"service\": \"compute-optimizer\"\n  },\n  {\n    \"function\":"
  },
  {
    "path": "scan/sample/services/config.json",
    "chars": 4528,
    "preview": "[\n  {\n    \"function\": \"describe_aggregate_compliance_by_config_rules\",\n    \"service\": \"config\"\n  },\n  {\n    \"function\": "
  },
  {
    "path": "scan/sample/services/connect-contact-lens.json",
    "chars": 266,
    "preview": "[\n  {\n    \"function\": \"get_paginator\",\n    \"service\": \"connect-contact-lens\"\n  },\n  {\n    \"function\": \"get_waiter\",\n    "
  },
  {
    "path": "scan/sample/services/connect.json",
    "chars": 5104,
    "preview": "[\n  {\n    \"function\": \"describe_agent_status\",\n    \"service\": \"connect\"\n  },\n  {\n    \"function\": \"describe_contact\",\n   "
  },
  {
    "path": "scan/sample/services/connectcampaigns.json",
    "chars": 749,
    "preview": "[\n  {\n    \"function\": \"describe_campaign\",\n    \"service\": \"connectcampaigns\"\n  },\n  {\n    \"function\": \"get_campaign_stat"
  },
  {
    "path": "scan/sample/services/connectcases.json",
    "chars": 1031,
    "preview": "[\n  {\n    \"function\": \"get_case\",\n    \"service\": \"connectcases\"\n  },\n  {\n    \"function\": \"get_case_event_configuration\","
  },
  {
    "path": "scan/sample/services/connectparticipant.json",
    "chars": 314,
    "preview": "[\n  {\n    \"function\": \"get_attachment\",\n    \"service\": \"connectparticipant\"\n  },\n  {\n    \"function\": \"get_paginator\",\n  "
  },
  {
    "path": "scan/sample/services/controltower.json",
    "chars": 304,
    "preview": "[\n  {\n    \"function\": \"get_control_operation\",\n    \"service\": \"controltower\"\n  },\n  {\n    \"function\": \"get_paginator\",\n "
  },
  {
    "path": "scan/sample/services/cur.json",
    "chars": 203,
    "preview": "[\n  {\n    \"function\": \"describe_report_definitions\",\n    \"service\": \"cur\"\n  },\n  {\n    \"function\": \"get_paginator\",\n    "
  },
  {
    "path": "scan/sample/services/customer-profiles.json",
    "chars": 2411,
    "preview": "[\n  {\n    \"function\": \"get_auto_merging_preview\",\n    \"service\": \"customer-profiles\"\n  },\n  {\n    \"function\": \"get_calcu"
  },
  {
    "path": "scan/sample/services/databrew.json",
    "chars": 1253,
    "preview": "[\n  {\n    \"function\": \"describe_dataset\",\n    \"service\": \"databrew\"\n  },\n  {\n    \"function\": \"describe_job\",\n    \"servic"
  },
  {
    "path": "scan/sample/services/dataexchange.json",
    "chars": 955,
    "preview": "[\n  {\n    \"function\": \"get_asset\",\n    \"service\": \"dataexchange\"\n  },\n  {\n    \"function\": \"get_data_set\",\n    \"service\":"
  },
  {
    "path": "scan/sample/services/datapipeline.json",
    "chars": 451,
    "preview": "[\n  {\n    \"function\": \"describe_objects\",\n    \"service\": \"datapipeline\"\n  },\n  {\n    \"function\": \"describe_pipelines\",\n "
  },
  {
    "path": "scan/sample/services/datasync.json",
    "chars": 2074,
    "preview": "[\n  {\n    \"function\": \"describe_agent\",\n    \"service\": \"datasync\"\n  },\n  {\n    \"function\": \"describe_discovery_job\",\n   "
  },
  {
    "path": "scan/sample/services/dax.json",
    "chars": 610,
    "preview": "[\n  {\n    \"function\": \"describe_clusters\",\n    \"service\": \"dax\"\n  },\n  {\n    \"function\": \"describe_default_parameters\",\n"
  },
  {
    "path": "scan/sample/services/detective.json",
    "chars": 749,
    "preview": "[\n  {\n    \"function\": \"describe_organization_configuration\",\n    \"service\": \"detective\"\n  },\n  {\n    \"function\": \"get_me"
  },
  {
    "path": "scan/sample/services/devicefarm.json",
    "chars": 3274,
    "preview": "[\n  {\n    \"function\": \"get_account_settings\",\n    \"service\": \"devicefarm\"\n  },\n  {\n    \"function\": \"get_device\",\n    \"se"
  },
  {
    "path": "scan/sample/services/devops-guru.json",
    "chars": 1868,
    "preview": "[\n  {\n    \"function\": \"describe_account_health\",\n    \"service\": \"devops-guru\"\n  },\n  {\n    \"function\": \"describe_account"
  },
  {
    "path": "scan/sample/services/directconnect.json",
    "chars": 1809,
    "preview": "[\n  {\n    \"function\": \"describe_connection_loa\",\n    \"service\": \"directconnect\"\n  },\n  {\n    \"function\": \"describe_conne"
  },
  {
    "path": "scan/sample/services/discovery.json",
    "chars": 909,
    "preview": "[\n  {\n    \"function\": \"describe_agents\",\n    \"service\": \"discovery\"\n  },\n  {\n    \"function\": \"describe_configurations\",\n"
  },
  {
    "path": "scan/sample/services/dlm.json",
    "chars": 340,
    "preview": "[\n  {\n    \"function\": \"get_lifecycle_policies\",\n    \"service\": \"dlm\"\n  },\n  {\n    \"function\": \"get_lifecycle_policy\",\n  "
  },
  {
    "path": "scan/sample/services/dms.json",
    "chars": 2846,
    "preview": "[\n  {\n    \"function\": \"describe_account_attributes\",\n    \"service\": \"dms\"\n  },\n  {\n    \"function\": \"describe_applicable_"
  },
  {
    "path": "scan/sample/services/docdb-elastic.json",
    "chars": 534,
    "preview": "[\n  {\n    \"function\": \"get_cluster\",\n    \"service\": \"docdb-elastic\"\n  },\n  {\n    \"function\": \"get_cluster_snapshot\",\n   "
  },
  {
    "path": "scan/sample/services/docdb.json",
    "chars": 1492,
    "preview": "[\n  {\n    \"function\": \"describe_certificates\",\n    \"service\": \"docdb\"\n  },\n  {\n    \"function\": \"describe_db_cluster_para"
  },
  {
    "path": "scan/sample/services/drs.json",
    "chars": 1209,
    "preview": "[\n  {\n    \"function\": \"describe_job_log_items\",\n    \"service\": \"drs\"\n  },\n  {\n    \"function\": \"describe_jobs\",\n    \"serv"
  },
  {
    "path": "scan/sample/services/ds.json",
    "chars": 1539,
    "preview": "[\n  {\n    \"function\": \"describe_certificate\",\n    \"service\": \"ds\"\n  },\n  {\n    \"function\": \"describe_client_authenticati"
  },
  {
    "path": "scan/sample/services/dynamodb.json",
    "chars": 1703,
    "preview": "[\n  {\n    \"function\": \"describe_backup\",\n    \"service\": \"dynamodb\"\n  },\n  {\n    \"function\": \"describe_continuous_backups"
  },
  {
    "path": "scan/sample/services/dynamodbstreams.json",
    "chars": 454,
    "preview": "[\n  {\n    \"function\": \"describe_stream\",\n    \"service\": \"dynamodbstreams\"\n  },\n  {\n    \"function\": \"get_paginator\",\n    "
  },
  {
    "path": "scan/sample/services/ebs.json",
    "chars": 333,
    "preview": "[\n  {\n    \"function\": \"get_paginator\",\n    \"service\": \"ebs\"\n  },\n  {\n    \"function\": \"get_snapshot_block\",\n    \"service\""
  },
  {
    "path": "scan/sample/services/ec2-instance-connect.json",
    "chars": 160,
    "preview": "[\n  {\n    \"function\": \"get_paginator\",\n    \"service\": \"ec2-instance-connect\"\n  },\n  {\n    \"function\": \"get_waiter\",\n    "
  },
  {
    "path": "scan/sample/services/ec2.json",
    "chars": 15415,
    "preview": "[\n  {\n    \"function\": \"describe_account_attributes\",\n    \"service\": \"ec2\"\n  },\n  {\n    \"function\": \"describe_address_tra"
  },
  {
    "path": "scan/sample/services/ecr-public.json",
    "chars": 845,
    "preview": "[\n  {\n    \"function\": \"describe_image_tags\",\n    \"service\": \"ecr-public\"\n  },\n  {\n    \"function\": \"describe_images\",\n   "
  },
  {
    "path": "scan/sample/services/ecr.json",
    "chars": 1228,
    "preview": "[\n  {\n    \"function\": \"describe_image_replication_status\",\n    \"service\": \"ecr\"\n  },\n  {\n    \"function\": \"describe_image"
  },
  {
    "path": "scan/sample/services/ecs.json",
    "chars": 1384,
    "preview": "[\n  {\n    \"function\": \"describe_capacity_providers\",\n    \"service\": \"ecs\"\n  },\n  {\n    \"function\": \"describe_clusters\",\n"
  },
  {
    "path": "scan/sample/services/efs.json",
    "chars": 957,
    "preview": "[\n  {\n    \"function\": \"describe_access_points\",\n    \"service\": \"efs\"\n  },\n  {\n    \"function\": \"describe_account_preferen"
  },
  {
    "path": "scan/sample/services/eks.json",
    "chars": 1171,
    "preview": "[\n  {\n    \"function\": \"describe_addon\",\n    \"service\": \"eks\"\n  },\n  {\n    \"function\": \"describe_addon_configuration\",\n  "
  },
  {
    "path": "scan/sample/services/elastic-inference.json",
    "chars": 509,
    "preview": "[\n  {\n    \"function\": \"describe_accelerator_offerings\",\n    \"service\": \"elastic-inference\"\n  },\n  {\n    \"function\": \"des"
  },
  {
    "path": "scan/sample/services/elasticache.json",
    "chars": 1746,
    "preview": "[\n  {\n    \"function\": \"describe_cache_clusters\",\n    \"service\": \"elasticache\"\n  },\n  {\n    \"function\": \"describe_cache_e"
  },
  {
    "path": "scan/sample/services/elasticbeanstalk.json",
    "chars": 1679,
    "preview": "[\n  {\n    \"function\": \"describe_account_attributes\",\n    \"service\": \"elasticbeanstalk\"\n  },\n  {\n    \"function\": \"describ"
  },
  {
    "path": "scan/sample/services/elastictranscoder.json",
    "chars": 476,
    "preview": "[\n  {\n    \"function\": \"get_paginator\",\n    \"service\": \"elastictranscoder\"\n  },\n  {\n    \"function\": \"get_waiter\",\n    \"se"
  },
  {
    "path": "scan/sample/services/elb.json",
    "chars": 658,
    "preview": "[\n  {\n    \"function\": \"describe_account_limits\",\n    \"service\": \"elb\"\n  },\n  {\n    \"function\": \"describe_instance_health"
  },
  {
    "path": "scan/sample/services/elbv2.json",
    "chars": 953,
    "preview": "[\n  {\n    \"function\": \"describe_account_limits\",\n    \"service\": \"elbv2\"\n  },\n  {\n    \"function\": \"describe_listener_cert"
  },
  {
    "path": "scan/sample/services/emr-containers.json",
    "chars": 980,
    "preview": "[\n  {\n    \"function\": \"describe_job_run\",\n    \"service\": \"emr-containers\"\n  },\n  {\n    \"function\": \"describe_job_templat"
  },
  {
    "path": "scan/sample/services/emr-serverless.json",
    "chars": 617,
    "preview": "[\n  {\n    \"function\": \"get_application\",\n    \"service\": \"emr-serverless\"\n  },\n  {\n    \"function\": \"get_dashboard_for_job"
  },
  {
    "path": "scan/sample/services/emr.json",
    "chars": 1854,
    "preview": "[\n  {\n    \"function\": \"describe_cluster\",\n    \"service\": \"emr\"\n  },\n  {\n    \"function\": \"describe_job_flows\",\n    \"servi"
  },
  {
    "path": "scan/sample/services/entityresolution.json",
    "chars": 806,
    "preview": "[\n  {\n    \"function\": \"get_match_id\",\n    \"service\": \"entityresolution\"\n  },\n  {\n    \"function\": \"get_matching_job\",\n   "
  },
  {
    "path": "scan/sample/services/es.json",
    "chars": 2079,
    "preview": "[\n  {\n    \"function\": \"describe_domain_auto_tunes\",\n    \"service\": \"es\"\n  },\n  {\n    \"function\": \"describe_domain_change"
  },
  {
    "path": "scan/sample/services/events.json",
    "chars": 1717,
    "preview": "[\n  {\n    \"function\": \"describe_api_destination\",\n    \"service\": \"events\"\n  },\n  {\n    \"function\": \"describe_archive\",\n "
  },
  {
    "path": "scan/sample/services/evidently.json",
    "chars": 1058,
    "preview": "[\n  {\n    \"function\": \"get_experiment\",\n    \"service\": \"evidently\"\n  },\n  {\n    \"function\": \"get_experiment_results\",\n  "
  },
  {
    "path": "scan/sample/services/finspace-data.json",
    "chars": 1338,
    "preview": "[\n  {\n    \"function\": \"get_changeset\",\n    \"service\": \"finspace-data\"\n  },\n  {\n    \"function\": \"get_data_view\",\n    \"ser"
  },
  {
    "path": "scan/sample/services/finspace.json",
    "chars": 1218,
    "preview": "[\n  {\n    \"function\": \"get_environment\",\n    \"service\": \"finspace\"\n  },\n  {\n    \"function\": \"get_kx_changeset\",\n    \"ser"
  },
  {
    "path": "scan/sample/services/firehose.json",
    "chars": 375,
    "preview": "[\n  {\n    \"function\": \"describe_delivery_stream\",\n    \"service\": \"firehose\"\n  },\n  {\n    \"function\": \"get_paginator\",\n  "
  },
  {
    "path": "scan/sample/services/fis.json",
    "chars": 748,
    "preview": "[\n  {\n    \"function\": \"get_action\",\n    \"service\": \"fis\"\n  },\n  {\n    \"function\": \"get_experiment\",\n    \"service\": \"fis\""
  },
  {
    "path": "scan/sample/services/fms.json",
    "chars": 1784,
    "preview": "[\n  {\n    \"function\": \"get_admin_account\",\n    \"service\": \"fms\"\n  },\n  {\n    \"function\": \"get_admin_scope\",\n    \"service"
  },
  {
    "path": "scan/sample/services/forecast.json",
    "chars": 2470,
    "preview": "[\n  {\n    \"function\": \"describe_auto_predictor\",\n    \"service\": \"forecast\"\n  },\n  {\n    \"function\": \"describe_dataset\",\n"
  },
  {
    "path": "scan/sample/services/forecastquery.json",
    "chars": 146,
    "preview": "[\n  {\n    \"function\": \"get_paginator\",\n    \"service\": \"forecastquery\"\n  },\n  {\n    \"function\": \"get_waiter\",\n    \"servic"
  },
  {
    "path": "scan/sample/services/frauddetector.json",
    "chars": 2023,
    "preview": "[\n  {\n    \"function\": \"describe_detector\",\n    \"service\": \"frauddetector\"\n  },\n  {\n    \"function\": \"describe_model_versi"
  },
  {
    "path": "scan/sample/services/fsx.json",
    "chars": 867,
    "preview": "[\n  {\n    \"function\": \"describe_backups\",\n    \"service\": \"fsx\"\n  },\n  {\n    \"function\": \"describe_data_repository_associ"
  },
  {
    "path": "scan/sample/services/gamelift.json",
    "chars": 3409,
    "preview": "[\n  {\n    \"function\": \"describe_alias\",\n    \"service\": \"gamelift\"\n  },\n  {\n    \"function\": \"describe_build\",\n    \"servic"
  },
  {
    "path": "scan/sample/services/gamesparks.json",
    "chars": 1405,
    "preview": "[\n  {\n    \"function\": \"get_extension\",\n    \"service\": \"gamesparks\"\n  },\n  {\n    \"function\": \"get_extension_version\",\n   "
  },
  {
    "path": "scan/sample/services/glacier.json",
    "chars": 1057,
    "preview": "[\n  {\n    \"function\": \"describe_job\",\n    \"service\": \"glacier\"\n  },\n  {\n    \"function\": \"describe_vault\",\n    \"service\":"
  },
  {
    "path": "scan/sample/services/globalaccelerator.json",
    "chars": 1814,
    "preview": "[\n  {\n    \"function\": \"describe_accelerator\",\n    \"service\": \"globalaccelerator\"\n  },\n  {\n    \"function\": \"describe_acce"
  },
  {
    "path": "scan/sample/services/glue.json",
    "chars": 5898,
    "preview": "[\n  {\n    \"function\": \"get_blueprint\",\n    \"service\": \"glue\"\n  },\n  {\n    \"function\": \"get_blueprint_run\",\n    \"service\""
  },
  {
    "path": "scan/sample/services/grafana.json",
    "chars": 661,
    "preview": "[\n  {\n    \"function\": \"describe_workspace\",\n    \"service\": \"grafana\"\n  },\n  {\n    \"function\": \"describe_workspace_authen"
  },
  {
    "path": "scan/sample/services/greengrass.json",
    "chars": 3875,
    "preview": "[\n  {\n    \"function\": \"get_associated_role\",\n    \"service\": \"greengrass\"\n  },\n  {\n    \"function\": \"get_bulk_deployment_s"
  },
  {
    "path": "scan/sample/services/greengrassv2.json",
    "chars": 1359,
    "preview": "[\n  {\n    \"function\": \"describe_component\",\n    \"service\": \"greengrassv2\"\n  },\n  {\n    \"function\": \"get_component\",\n    "
  },
  {
    "path": "scan/sample/services/groundstation.json",
    "chars": 1396,
    "preview": "[\n  {\n    \"function\": \"describe_contact\",\n    \"service\": \"groundstation\"\n  },\n  {\n    \"function\": \"describe_ephemeris\",\n"
  },
  {
    "path": "scan/sample/services/guardduty.json",
    "chars": 2325,
    "preview": "[\n  {\n    \"function\": \"describe_malware_scans\",\n    \"service\": \"guardduty\"\n  },\n  {\n    \"function\": \"describe_organizati"
  },
  {
    "path": "scan/sample/services/health.json",
    "chars": 1053,
    "preview": "[\n  {\n    \"function\": \"describe_affected_accounts_for_organization\",\n    \"service\": \"health\"\n  },\n  {\n    \"function\": \"d"
  },
  {
    "path": "scan/sample/services/healthlake.json",
    "chars": 694,
    "preview": "[\n  {\n    \"function\": \"describe_fhir_datastore\",\n    \"service\": \"healthlake\"\n  },\n  {\n    \"function\": \"describe_fhir_exp"
  },
  {
    "path": "scan/sample/services/honeycode.json",
    "chars": 585,
    "preview": "[\n  {\n    \"function\": \"describe_table_data_import_job\",\n    \"service\": \"honeycode\"\n  },\n  {\n    \"function\": \"get_paginat"
  }
]

// ... and 225 more files (download for full content)

About this extraction

This page contains the full source code of the aws-samples/aws-auto-inventory GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 425 files (1.1 MB), approximately 333.7k tokens, and a symbol index with 110 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!