Repository: humanitec-architecture/reference-architecture-aws Branch: main Commit: 1eef11e65809 Files: 33 Total size: 65.8 KB Directory structure: gitextract_c_xm77iv/ ├── .github/ │ ├── CODEOWNERS │ └── workflows/ │ └── ci.yaml ├── .gitignore ├── .tflint.hcl ├── LICENSE ├── Makefile ├── README.md ├── docs/ │ ├── .terraform-docs-example.yaml │ └── .terraform-docs.yaml ├── main.tf ├── modules/ │ ├── base/ │ │ ├── .terraform-docs.yaml │ │ ├── README.md │ │ ├── humanitec.tf │ │ ├── main.tf │ │ ├── meta.tf │ │ ├── outputs.tf │ │ ├── providers.tf │ │ ├── terraform.tfvars.example │ │ └── variables.tf │ ├── github/ │ │ ├── README.md │ │ ├── aws.tf │ │ ├── main.tf │ │ ├── providers.tf │ │ ├── terraform.tfvars.example │ │ └── variables.tf │ └── portal-backstage/ │ ├── README.md │ ├── main.tf │ ├── providers.tf │ ├── terraform.tfvars.example │ └── variables.tf ├── providers.tf ├── terraform.tfvars.example └── variables.tf ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/CODEOWNERS ================================================ * @humanitec-architecture/humanitec ================================================ FILE: .github/workflows/ci.yaml ================================================ name: CI on: push: branches: [main] pull_request: branches: [main] jobs: test: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: hashicorp/setup-terraform@v2 with: terraform_version: ~1.5 - name: Terraform Version run: terraform -version - name: Install terraform-docs run: | WORK_DIR=$(mktemp -d) curl -Lo ${WORK_DIR}/terraform-docs.tar.gz https://github.com/terraform-docs/terraform-docs/releases/download/v0.18.0/terraform-docs-v0.18.0-$(uname)-amd64.tar.gz cd ${WORK_DIR} tar -xzf terraform-docs.tar.gz chmod +x terraform-docs mv terraform-docs /usr/local/bin/terraform-docs - name: Generate docs run: make docs - name: Check git diff is clean (all files generated should be committed) run: git diff --exit-code - uses: terraform-linters/setup-tflint@v4 with: tflint_version: v0.51.1 - name: Terraform Lint run: make lint - name: Terraform Validate run: make validate ================================================ FILE: .gitignore ================================================ .idea .terraform .terraform.lock.hcl *.tfstate *.tfstate.* *.tfplan *.terraformrc *.tfvars terraform.rc github-app-credentials.json **/.DS_Store ================================================ FILE: .tflint.hcl ================================================ plugin "terraform" { enabled = true preset = "recommended" } plugin "aws" { enabled = true version = "0.31.0" source = "github.com/terraform-linters/tflint-ruleset-aws" } ================================================ FILE: LICENSE ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: Makefile ================================================ TF_DIRS = $(patsubst %/main.tf, %, $(shell find . -type d -name .terraform -prune -o -name 'main.tf' -print)) VALIDATE_TF_DIRS = $(addprefix validate-,$(TF_DIRS)) LINT_TF_DIRS = $(addprefix lint-,$(TF_DIRS)) DOCS_TF_DIRS = $(addprefix docs-,$(TF_DIRS)) # Generate docs for a terraform directories $(DOCS_TF_DIRS): docs-%: @echo "Docs $*" terraform-docs --config docs/.terraform-docs.yaml $* terraform-docs --config docs/.terraform-docs-example.yaml $* # Generate docs .PHONY: docs docs: $(DOCS_TF_DIRS) @echo "All docs generated" # Format all terraform files fmt: terraform fmt -recursive # Check if all terraform files are formatted fmt-check: terraform fmt -recursive -check # Validate a terraform directories $(VALIDATE_TF_DIRS): validate-%: @echo "Validate $*" terraform -chdir="$*" init -upgrade terraform -chdir="$*" validate # Validate all terraform directories validate: $(VALIDATE_TF_DIRS) @echo "All validated" # Lint a terraform directories $(LINT_TF_DIRS): lint-%: @echo "Lint $*" tflint --config "$(PWD)/.tflint.hcl" --chdir="$*" # Initialize tflint lint-init: tflint --init # Lint all terraform directories lint: lint-init $(LINT_TF_DIRS) fmt-check @echo "All linted" ================================================ FILE: README.md ================================================ # Deprecation notice ⚠️ The present reference architecture implementation for AWS has been deprecated. It will continue to be available at this location for some time, but not receive further updates. The deprecation only affects the reference implementation contained in this repository, not the underlying conceptual architectures and supplementary materials shown on the [Humanitec website](https://humanitec.com/reference-architectures). # Humanitec AWS Reference Architecture > TL;DR > > Skip the theory? Go [here](README.md#how-to-spin-up-your-humanitec-aws-reference-architecture) to spin up your Humanitec AWS Reference Architecture Implementation. > > [Follow this learning path to master your Internal Developer Platform](https://developer.humanitec.com/training/master-your-internal-developer-platform/introduction/). > Building an Internal Developer Platform (IDP) can come with many challenges. To give you a head start, we’ve created a set of [reference architectures](https://humanitec.com/reference-architectures) based on hundreds of real-world setups. These architectures described in code provide a starting point to build your own IDP within minutes, along with customization capabilities to ensure your platform meets the unique needs of your users (developers). The initial version of this reference architecture has been presented by Mike Gatto, Sr. DevOps Engineer, McKinsey and Stephan Schneider, Digital Expert Associate Partner, McKinsey at [PlartformCon 2023](https://www.youtube.com/watch?v=AimSwK8Mw-U). ## What is an Internal Developer Platform (IDP)? An [Internal Developer Platform (IDP)](https://humanitec.com/blog/what-is-an-internal-developer-platform) is the sum of all the tech and tools that a platform engineering team binds together to pave golden paths for developers. IDPs lower cognitive load across the engineering organization and enable developer self-service, without abstracting away context from developers or making the underlying tech inaccessible. Well-designed IDPs follow a Platform as a Product approach, where a platform team builds, maintains, and continuously improves the IDP, following product management principles and best practices. ## Understanding the different planes of the IDP reference architecture When McKinsey originally [published the reference architecture](https://www.youtube.com/watch?v=AimSwK8Mw-U) they proposed five planes that describe the different parts of a modern Internal Developer Platform (IDP). ![AWS reference architecture Humanitec](docs/images/AWS-reference-architecture-Humanitec.png) ### Developer Control Plane This plane is the primary configuration layer and interaction point for the platform users. It harbors the following components: * A **Version Control System**. GitHub is a prominent example, but this can be any system that contains two types of repositories: * Application Source Code * Platform Source Code, e.g. using Terraform * **Workload specifications**. The reference architecture uses [Score](https://developer.humanitec.com/score/overview/). * A **portal** for developers to interact with. It can be the Humanitec Portal, but you might also use [Backstage](https://backstage.io/) or any other portal on the market. ### Integration and Delivery Plane This plane is about building and storing the image, creating app and infra configs from the abstractions provided by the developers, and deploying the final state. It’s where the domains of developers and platform engineers meet. This plane usually contains four different tools: * A **CI pipeline**. It can be Github Actions or any CI tooling on the market. * The **image registry** holding your container images. Again, this can be any registry on the market. * An **orchestrator** which in our example, is the Humanitec Platform Orchestrator. * The **CD system**, which can be the Platform Orchestrator’s deployment pipeline capabilities — an external system triggered by the Orchestrator using a webhook, or a setup in tandem with GitOps operators like ArgoCD. ### Monitoring and Logging Plane The integration of monitoring and logging systems varies greatly depending on the system. This plane however is not a focus of the reference architecture. ### Security Plane The security plane of the reference architecture is focused on the secrets management system. The secrets manager stores configuration information such as database passwords, API keys, or TLS certificates needed by an Application at runtime. It allows the Platform Orchestrator to reference the secrets and inject them into the Workloads dynamically. You can learn more about secrets management and integration with other secrets management [here](https://developer.humanitec.com/platform-orchestrator/security/overview). The reference architecture sample implementations use the secrets store attached to the Humanitec SaaS system. ### Resource Plane This plane is where the actual infrastructure exists including clusters, databases, storage, or DNS services. The configuration of the Resources is managed by the Platform Orchestrator which dynamically creates app and infrastructure configurations with every deployment and creates, updates, or deletes dependent Resources as required. ## How to spin up your Humanitec AWS Reference Architecture This repo contains an implementation of part of the Humanitec Reference Architecture for an Internal Developer Platform, including Backstage as optional Portal solution. This repo covers the base layer of the implementation for AWS. By default, the following will be provisioned: * VPC * EKS Cluster * IAM User to access the cluster * Ingress NGINX in the cluster * Resource Definitions in Humanitec for: * Kubernetes Cluster ### Prerequisites * A Humanitec account with the `Administrator` role in an Organization. Get a [free trial](https://app.humanitec.io/auth/signup) if you are just starting. * An AWS account * [AWS CLI](https://aws.amazon.com/cli/) installed locally * [terraform](https://www.terraform.io/) installed locally * (Optional) [Humanitec CLI](https://developer.humanitec.com/platform-orchestrator/cli/) installed locally * (Optional) [GitHub CLI](https://cli.github.com/) installed locally ### Usage **Note: Using this Reference Architecture Implementation will incur costs for your AWS project.** It is recommended that you fully review the code before you run it to ensure you understand the impact of provisioning this infrastructure. Humanitec does not take responsibility for any costs incurred or damage caused when using the Reference Architecture Implementation. This reference architecture implementation uses Terraform. You will need to do the following: 1. [Fork this GitHub repo](https://github.com/humanitec-architecture/reference-architecture-aws/fork), clone it to your local machine and navigate to the root of the repository. 2. Set the required input variables. (see [Required input variables](#required-input-variables)) 3. Ensure you are logged in with `aws`. (Follow the [quickstart](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-quickstart.html) if you aren't) 4. 1. Login to Humanitec (your user needs to have the `Administrator` role on the Humanitec Organization) ```shell humctl login ``` 2. Alternatively: Set the `HUMANITEC_TOKEN` environment variable to an appropriate [Humanitec API token](https://developer.humanitec.com/platform-orchestrator/security/service-users/#generate-an-api-token-from-a-service-user) with the `Administrator` role on the Humanitec Organization. For example: ```shell export HUMANITEC_TOKEN="my-humanitec-api-token" ``` 5. Run terraform: ```shell terraform init terraform plan terraform apply ``` #### Required input variables Terraform reads variables by default from a file called `terraform.tfvars`. You can create your own file by renaming the `terraform.tfvars.example` file in the root of the repo and then filling in the missing values. You can see find a details about each of those variables and additional supported variables under [Inputs](#inputs). ### Verify your result Check for the existence of key elements of the reference architecture. This is a subset of all elements only. For a complete list of what was installed, review the Terraform code. 1. Set the `HUMANITEC_ORG` environment variable to the ID of your Humanitec Organization (must be all lowercase): ```shell export HUMANITEC_ORG="my-humanitec-org" ``` 2. Verify the existence of the Resource Definition for the EKS cluster in your Humanitec Organization: ```shell curl -s https://api.humanitec.io/orgs/${HUMANITEC_ORG}/resources/defs/ref-arch \ --header "Authorization: Bearer ${HUMANITEC_TOKEN}" \ | jq .id,.type ``` This should output: ```shell "ref-arch" "k8s-cluster" ``` 3. Verify the existence of the newly created EKS cluster: ```shell aws eks list-clusters --region ``` This should output: ```json { "clusters": [ "ref-arch", "[more previously existing clusters here]" ] } ``` ### Enable a portal (optional) #### Portal Prerequisites Backstage requires a GitHub connection, which in turn needs: * A GitHub organization and permission to create new repositories in it. Go to to create a new org (the "Free" option is fine). Note: is has to be an organization, a free account is not sufficient. * Configured GitHub access * Either using the GitHub CLI: ```shell gh auth login --scopes repo,workflow,admin:org,delete_repo ``` * Or using a token: * Create a classic github personal access token with `repo`, `workflow`, `delete_repo` and `admin:org` scope [here](https://github.com/settings/tokens). * Set the `GITHUB_TOKEN` environment variable to your token. ```shell export GITHUB_TOKEN="my-github-token" ``` * Set the `GITHUB_ORG_ID` environment variable to your GitHub organization ID. ```shell export GITHUB_ORG_ID="my-github-org-id" ``` * Install the GitHub App for Backstage into your GitHub organization * Run `docker run --rm -it -e GITHUB_ORG_ID -v $(pwd):/pwd -p 127.0.0.1:3000:3000 ghcr.io/humanitec-architecture/create-gh-app` ([image source](https://github.com/humanitec-architecture/create-gh-app/)) and follow the instructions: * “All repositories” ~> Install * “Okay, […] was installed on the […] account.” ~> You can close the window and server. #### Portal Usage * Enable `with_backstage` inside your `terraform.tfvars` and configure the additional variables that a required for Backstage. * Perform another `terraform apply` #### Verify portal setup * [Fetch the DNS entry](https://developer.humanitec.com/score/getting-started/get-dns/) of the Humanitec Application `backstage`, Environment `development`. * Open the host in your browser. * Click the "Create" button and scaffold your first application. ### Cleaning up Once you are finished with the reference architecture, you can remove all provisioned infrastructure and the resource definitions created in Humanitec with the following: 1. Delete all Humanitec Applications scaffolded using the Portal, if you used one, but not the `backstage` app itself. 2. Ensure you are (still) logged in with `aws`. 3. Ensure you still have the `HUMANITEC_TOKEN` environment variable set to an appropriate Humanitec API token with the `Administrator` role on the Humanitec Organization. 4. Run terraform: ```shell terraform destroy ``` ## Terraform docs ### Requirements | Name | Version | |------|---------| | terraform | >= 1.3.0 | | aws | ~> 5.17 | | github | ~> 5.38 | | helm | ~> 2.12 | | humanitec | ~> 1.0 | | kubernetes | ~> 2.25 | | random | ~> 3.5 | ### Providers | Name | Version | |------|---------| | humanitec | ~> 1.0 | ### Modules | Name | Source | Version | |------|--------|---------| | base | ./modules/base | n/a | | github | ./modules/github | n/a | | github\_app | github.com/humanitec-architecture/shared-terraform-modules//modules/github-app | v2024-06-12 | | portal\_backstage | ./modules/portal-backstage | n/a | ### Resources | Name | Type | |------|------| | [humanitec_service_user_token.deployer](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/service_user_token) | resource | | [humanitec_user.deployer](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/user) | resource | ### Inputs | Name | Description | Type | Default | Required | |------|-------------|------|---------|:--------:| | aws\_account\_id | AWS Account (ID) to use | `string` | n/a | yes | | aws\_region | AWS region | `string` | n/a | yes | | disk\_size | Disk size in GB to use for EKS nodes | `number` | `20` | no | | github\_org\_id | GitHub org id (required for Backstage) | `string` | `null` | no | | humanitec\_org\_id | Humanitec Organization ID (required for Backstage) | `string` | `null` | no | | instance\_types | List of EC2 instances types to use for EKS nodes | `list(string)` |
[
"t3.large"
]
| no | | with\_backstage | Deploy Backstage | `bool` | `false` | no | ## Learn more Expand your knowledge by heading over to our learning path, and discover how to: * Deploy the Humanitec reference architecture using a cloud provider of your choice * Deploy and manage Applications using the Humanitec Platform Orchestrator and Score * Provision additional Resources and connect to them * Achieve standardization by design * Deal with special scenarios [Master your Internal Developer Platform](https://developer.humanitec.com/training/master-your-internal-developer-platform/introduction/) * [Introduction](https://developer.humanitec.com/training/master-your-internal-developer-platform/introduction/) * [Design principles](https://developer.humanitec.com/training/master-your-internal-developer-platform/design-principles/) * [Structure and integration points](https://developer.humanitec.com/training/master-your-internal-developer-platform/structure-and-integration-points/) * [Dynamic Configuration Management](https://developer.humanitec.com/training/master-your-internal-developer-platform/dynamic-config-management/) * [Tutorial: Set up the reference architecture in your cloud](https://developer.humanitec.com/training/master-your-internal-developer-platform/setup-ref-arch-in-your-cloud/) * [Theory on developer workflows](https://developer.humanitec.com/training/master-your-internal-developer-platform/theory-on-dev-workflows/) * [Tutorial: Scaffold a new Workload and create staging and prod Environments](https://developer.humanitec.com/training/master-your-internal-developer-platform/scaffolding-a-new-workload/) * [Tutorial: Deploy an Amazon S3 Resource to production](https://developer.humanitec.com/training/master-your-internal-developer-platform/deploy-a-resource/) * [Tutorial: Perform daily developer activities (debug, rollback, diffs, logs)](https://developer.humanitec.com/training/master-your-internal-developer-platform/daily-activities/) * [Tutorial: Deploy ephemeral Environments](https://developer.humanitec.com/training/master-your-internal-developer-platform/deploy-ephemeral-environments/) * [Theory on platform engineering workflows](https://developer.humanitec.com/training/master-your-internal-developer-platform/theory-on-pe-workflows/) * [Resource management theory](https://developer.humanitec.com/training/master-your-internal-developer-platform/resource-management-theory/) * [Tutorial: Provision a Redis cluster on AWS using Terraform](https://developer.humanitec.com/training/master-your-internal-developer-platform/provision-redis-aws/) * [Tutorial: Update Resource Definitions for related Applications](https://developer.humanitec.com/training/master-your-internal-developer-platform/update-resource-definitions-for-related-applications/) ================================================ FILE: docs/.terraform-docs-example.yaml ================================================ formatter: "tfvars hcl" output: file: "./terraform.tfvars.example" mode: replace template: "{{ .Content }}" settings: description: true ================================================ FILE: docs/.terraform-docs.yaml ================================================ formatter: "markdown table" output: file: "./README.md" sort: enabled: true by: required settings: anchor: false indent: 3 hide-empty: true lockfile: false ================================================ FILE: main.tf ================================================ # AWS reference architecture module "base" { source = "./modules/base" region = var.aws_region instance_types = var.instance_types disk_size = var.disk_size } # User used for scaffolding and deploying apps resource "humanitec_user" "deployer" { count = var.with_backstage ? 1 : 0 name = "deployer" role = "administrator" type = "service" } resource "humanitec_service_user_token" "deployer" { count = var.with_backstage ? 1 : 0 id = "deployer" user_id = humanitec_user.deployer[0].id description = "Used by scaffolding and deploying" } module "github" { count = var.with_backstage ? 1 : 0 source = "./modules/github" humanitec_org_id = var.humanitec_org_id humanitec_ci_service_user_token = humanitec_service_user_token.deployer[0].token aws_region = var.aws_region github_org_id = var.github_org_id depends_on = [module.base] } # Configure GitHub variables & secrets for Backstage itself and for all scaffolded apps locals { github_app_credentials_file = "github-app-credentials.json" } module "github_app" { count = var.with_backstage ? 1 : 0 source = "github.com/humanitec-architecture/shared-terraform-modules//modules/github-app?ref=v2024-06-12" credentials_file = "${path.module}/${local.github_app_credentials_file}" } # Deploy Backstage as Portal module "portal_backstage" { count = var.with_backstage ? 1 : 0 source = "./modules/portal-backstage" humanitec_org_id = var.humanitec_org_id humanitec_ci_service_user_token = humanitec_service_user_token.deployer[0].token github_org_id = var.github_org_id github_app_client_id = module.github_app[0].client_id github_app_client_secret = module.github_app[0].client_secret github_app_id = module.github_app[0].app_id github_app_private_key = module.github_app[0].private_key github_webhook_secret = module.github_app[0].webhook_secret depends_on = [module.github] } ================================================ FILE: modules/base/.terraform-docs.yaml ================================================ formatter: "markdown table" output: file: "./README.md" sort: enabled: true by: required settings: hide-empty: true ================================================ FILE: modules/base/README.md ================================================ # base Module that provides the reference architecture. ## Terraform docs ### Requirements | Name | Version | |------|---------| | terraform | >= 1.3.0 | | aws | >= 4.50 | | helm | ~> 2.12 | | humanitec | ~> 1.0 | | kubernetes | >= 2.0.3 | | random | ~> 3.5 | ### Providers | Name | Version | |------|---------| | aws | >= 4.50 | | helm | ~> 2.12 | | humanitec | ~> 1.0 | | kubernetes | >= 2.0.3 | | random | ~> 3.5 | ### Modules | Name | Source | Version | |------|--------|---------| | aws\_eks | terraform-aws-modules/eks/aws | ~> 20.2 | | aws\_vpc | terraform-aws-modules/vpc/aws | ~> 5.1 | | default\_mysql | github.com/humanitec-architecture/resource-packs-in-cluster//humanitec-resource-defs/mysql/basic | v2024-06-05 | | default\_postgres | github.com/humanitec-architecture/resource-packs-in-cluster//humanitec-resource-defs/postgres/basic | v2024-06-05 | | ebs\_csi\_irsa\_role | terraform-aws-modules/iam/aws//modules/iam-role-for-service-accounts-eks | ~> 5.30 | ### Resources | Name | Type | |------|------| | [aws_iam_role.humanitec_svc](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | | [aws_iam_role_policy_attachment.humanitec_svc](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | | [helm_release.ingress_nginx](https://registry.terraform.io/providers/hashicorp/helm/latest/docs/resources/release) | resource | | [humanitec_resource_account.cluster_account](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/resource_account) | resource | | [humanitec_resource_definition.emptydir_volume](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/resource_definition) | resource | | [humanitec_resource_definition.k8s_cluster_driver](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/resource_definition) | resource | | [humanitec_resource_definition.k8s_namespace](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/resource_definition) | resource | | [humanitec_resource_definition_criteria.default_mysql](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/resource_definition_criteria) | resource | | [humanitec_resource_definition_criteria.default_postgres](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/resource_definition_criteria) | resource | | [humanitec_resource_definition_criteria.emptydir_volume](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/resource_definition_criteria) | resource | | [humanitec_resource_definition_criteria.k8s_cluster_driver](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/resource_definition_criteria) | resource | | [humanitec_resource_definition_criteria.k8s_namespace](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/resource_definition_criteria) | resource | | [random_password.external_id](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/password) | resource | | [aws_elb_hosted_zone_id.main](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/elb_hosted_zone_id) | data source | | [aws_iam_policy_document.instance_assume_role_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) | data source | | [aws_region.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source | | [kubernetes_service.ingress_nginx_controller](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/data-sources/service) | data source | ### Inputs | Name | Description | Type | Default | Required | |------|-------------|------|---------|:--------:| | additional\_k8s\_access\_entries | Additional access entries add to the k8s aws-auth configmap |
list(object({
id = string
principal_arn = string
groups = list(string)
}))
| `[]` | no | | capacity\_type | Defines whether to use ON\_DEMAND or SPOT EC2 instances for EKS nodes | `string` | `"ON_DEMAND"` | no | | cluster\_name | Name for the EKS cluster | `string` | `"ref-arch"` | no | | cluster\_version | Version of the EKS cluster to deploy | `string` | `null` | no | | disk\_size | Disk size in GB to use for EKS nodes | `number` | `20` | no | | eks\_public\_access\_cidrs | List of CIDRs that can access the EKS cluster's public endpoint | `list(string)` |
[
"0.0.0.0/0"
]
| no | | environment | Name of the environment to be deployed into | `string` | `"development"` | no | | iam\_role\_name | Name of the IAM user to create for Humanitec EKS access | `string` | `"svc-humanitec"` | no | | ingress\_nginx\_min\_unavailable | Number of allowed unavaiable replicas for the ingress-nginx controller | `number` | `1` | no | | ingress\_nginx\_replica\_count | Number of replicas for the ingress-nginx controller | `number` | `2` | no | | instance\_types | List of EC2 instances types to use for EKS nodes | `list(string)` |
[
"t3.large"
]
| no | | node\_group\_desired\_size | Desired number of nodes for the EKS node group | `number` | `3` | no | | node\_group\_max\_size | Maximum number of nodes for the EKS node group | `number` | `3` | no | | node\_group\_min\_size | Minimum number of nodes for the EKS node group | `number` | `2` | no | | region | AWS Region to deploy into | `string` | `"us-east-1"` | no | | vpc\_name | AWS VPC name | `string` | `"ref-arch"` | no | ### Outputs | Name | Description | |------|-------------| | eks\_cluster\_certificate\_authority\_data | Base64 encoded certificate data required to communicate with the cluster | | eks\_cluster\_endpoint | Endpoint for your Kubernetes API server | | eks\_cluster\_name | The name of the EKS cluster | | eks\_oidc\_provider | The OpenID Connect identity provider (issuer URL without leading `https://`) | | eks\_oidc\_provider\_arn | The ARN of the OIDC Provider | | environment | Name of the environment to be deployed into | | humanitec\_resource\_account\_id | Humanitec resource account id for the cluster | | ingress\_nginx\_external\_dns | External DNS entry for the Nginx ingress controller | | vpc\_id | VPC id | ================================================ FILE: modules/base/humanitec.tf ================================================ locals { res_def_prefix = "default-" } # Configure default resources for example apps locals { ingress_address = data.kubernetes_service.ingress_nginx_controller.status[0].load_balancer[0].ingress[0].hostname } data "aws_elb_hosted_zone_id" "main" {} # k8s-cluster resource "humanitec_resource_account" "cluster_account" { id = var.cluster_name name = var.cluster_name type = "aws-role" credentials = jsonencode({ aws_role = aws_iam_role.humanitec_svc.arn external_id = random_password.external_id.result }) depends_on = [aws_iam_role_policy_attachment.humanitec_svc] } resource "humanitec_resource_definition" "k8s_cluster_driver" { driver_type = "humanitec/k8s-cluster-eks" id = var.cluster_name name = var.cluster_name type = "k8s-cluster" driver_account = humanitec_resource_account.cluster_account.id driver_inputs = { values_string = jsonencode({ "name" = module.aws_eks.cluster_name "loadbalancer" = local.ingress_address "loadbalancer_hosted_zone" = data.aws_elb_hosted_zone_id.main.id "region" = var.region }) } } resource "humanitec_resource_definition_criteria" "k8s_cluster_driver" { resource_definition_id = humanitec_resource_definition.k8s_cluster_driver.id env_type = var.environment } # k8s-namespace resource "humanitec_resource_definition" "k8s_namespace" { driver_type = "humanitec/echo" id = "default-namespace" name = "default-namespace" type = "k8s-namespace" driver_inputs = { values_string = jsonencode({ "namespace" = "$${context.app.id}-$${context.env.id}" }) } } resource "humanitec_resource_definition_criteria" "k8s_namespace" { resource_definition_id = humanitec_resource_definition.k8s_namespace.id } # in-cluster postgres module "default_postgres" { source = "github.com/humanitec-architecture/resource-packs-in-cluster//humanitec-resource-defs/postgres/basic?ref=v2024-06-05" prefix = local.res_def_prefix } resource "humanitec_resource_definition_criteria" "default_postgres" { resource_definition_id = module.default_postgres.id env_type = var.environment } module "default_mysql" { source = "github.com/humanitec-architecture/resource-packs-in-cluster//humanitec-resource-defs/mysql/basic?ref=v2024-06-05" prefix = local.res_def_prefix } resource "humanitec_resource_definition_criteria" "default_mysql" { resource_definition_id = module.default_mysql.id env_type = var.environment } resource "humanitec_resource_definition" "emptydir_volume" { driver_type = "humanitec/template" id = "volume-emptydir" name = "volume-emptydir" type = "volume" driver_inputs = { values_string = jsonencode({ "templates" = { "manifests" = { "emptydir.yaml" = { "location" = "volumes" "data" = < { kubernetes_groups = [] principal_arn = s.principal_arn policy_associations = { cluster_admin = { policy_arn = "arn:aws:eks::aws:cluster-access-policy/AmazonEKSClusterAdminPolicy" access_scope = { namespaces = [] type = "cluster" } } } } } # required for ingress-nginx see https://github.com/terraform-aws-modules/terraform-aws-eks/issues/2513 node_security_group_additional_rules = { ingress_self_all = { description = "Node to node all ports/protocols" protocol = "-1" from_port = 0 to_port = 0 type = "ingress" self = true } } tags = local.tags } # Ingress controller resource "helm_release" "ingress_nginx" { name = "ingress-nginx" namespace = "ingress-nginx" create_namespace = true repository = "https://kubernetes.github.io/ingress-nginx" chart = "ingress-nginx" version = "4.12.1" wait = true timeout = 600 set { type = "string" name = "controller.replicaCount" value = var.ingress_nginx_replica_count } set { type = "string" name = "controller.minAvailable" value = var.ingress_nginx_min_unavailable } set { name = "controller.containerSecurityContext.runAsUser" value = 101 } set { name = "controller.containerSecurityContext.runAsGroup" value = 101 } set { name = "controller.containerSecurityContext.allowPrivilegeEscalation" value = false } set { name = "controller.containerSecurityContext.readOnlyRootFilesystem" value = false } set { name = "controller.containerSecurityContext.runAsNonRoot" value = true } set_list { name = "controller.containerSecurityContext.capabilities.drop" value = ["ALL"] } set_list { name = "controller.containerSecurityContext.capabilities.add" value = ["NET_BIND_SERVICE"] } depends_on = [module.aws_eks.eks_managed_node_groups] } ================================================ FILE: modules/base/meta.tf ================================================ data "kubernetes_service" "ingress_nginx_controller" { metadata { name = "ingress-nginx-controller" namespace = "ingress-nginx" } depends_on = [helm_release.ingress_nginx] } ================================================ FILE: modules/base/outputs.tf ================================================ # General outputs output "environment" { description = "Name of the environment to be deployed into" value = var.environment } # VPC outputs output "vpc_id" { description = "VPC id" value = module.aws_vpc.vpc_id } # EKS outputs output "eks_oidc_provider" { description = "The OpenID Connect identity provider (issuer URL without leading `https://`)" value = module.aws_eks.oidc_provider } output "eks_oidc_provider_arn" { description = "The ARN of the OIDC Provider" value = module.aws_eks.oidc_provider_arn } output "eks_cluster_endpoint" { description = "Endpoint for your Kubernetes API server" value = module.aws_eks.cluster_endpoint } output "eks_cluster_certificate_authority_data" { description = "Base64 encoded certificate data required to communicate with the cluster" value = module.aws_eks.cluster_certificate_authority_data } output "eks_cluster_name" { description = "The name of the EKS cluster" value = module.aws_eks.cluster_name } # Ingress outputs output "ingress_nginx_external_dns" { description = "External DNS entry for the Nginx ingress controller" value = local.ingress_address } # Humanitec output "humanitec_resource_account_id" { description = "Humanitec resource account id for the cluster" value = humanitec_resource_account.cluster_account.id } ================================================ FILE: modules/base/providers.tf ================================================ terraform { required_providers { aws = { source = "hashicorp/aws" version = ">= 4.50" } kubernetes = { source = "hashicorp/kubernetes" version = ">= 2.0.3" } helm = { source = "hashicorp/helm" version = "~> 2.12" } humanitec = { source = "humanitec/humanitec" version = "~> 1.0" } random = { source = "hashicorp/random" version = "~> 3.5" } } required_version = ">= 1.3.0" } ================================================ FILE: modules/base/terraform.tfvars.example ================================================ # Additional access entries add to the k8s aws-auth configmap additional_k8s_access_entries = [] # Defines whether to use ON_DEMAND or SPOT EC2 instances for EKS nodes capacity_type = "ON_DEMAND" # Name for the EKS cluster cluster_name = "ref-arch" # Version of the EKS cluster to deploy cluster_version = "" # Disk size in GB to use for EKS nodes disk_size = 20 # List of CIDRs that can access the EKS cluster's public endpoint eks_public_access_cidrs = [ "0.0.0.0/0" ] # Name of the environment to be deployed into environment = "development" # Name of the IAM user to create for Humanitec EKS access iam_role_name = "svc-humanitec" # Number of allowed unavaiable replicas for the ingress-nginx controller ingress_nginx_min_unavailable = 1 # Number of replicas for the ingress-nginx controller ingress_nginx_replica_count = 2 # List of EC2 instances types to use for EKS nodes instance_types = [ "t3.large" ] # Desired number of nodes for the EKS node group node_group_desired_size = 3 # Maximum number of nodes for the EKS node group node_group_max_size = 3 # Minimum number of nodes for the EKS node group node_group_min_size = 2 # AWS Region to deploy into region = "us-east-1" # AWS VPC name vpc_name = "ref-arch" ================================================ FILE: modules/base/variables.tf ================================================ variable "eks_public_access_cidrs" { description = "List of CIDRs that can access the EKS cluster's public endpoint" type = list(string) default = ["0.0.0.0/0"] } variable "region" { description = "AWS Region to deploy into" type = string default = "us-east-1" } variable "vpc_name" { description = "AWS VPC name" type = string default = "ref-arch" } variable "environment" { description = "Name of the environment to be deployed into" type = string default = "development" } variable "cluster_name" { description = "Name for the EKS cluster" type = string default = "ref-arch" } variable "cluster_version" { description = "Version of the EKS cluster to deploy" type = string default = null } variable "node_group_min_size" { description = "Minimum number of nodes for the EKS node group" type = number default = 2 } variable "node_group_max_size" { description = "Maximum number of nodes for the EKS node group" type = number default = 3 } variable "node_group_desired_size" { description = "Desired number of nodes for the EKS node group" type = number default = 3 } variable "instance_types" { description = "List of EC2 instances types to use for EKS nodes" type = list(string) default = [ "t3.large" ] } variable "capacity_type" { description = "Defines whether to use ON_DEMAND or SPOT EC2 instances for EKS nodes" type = string default = "ON_DEMAND" } variable "iam_role_name" { description = "Name of the IAM user to create for Humanitec EKS access" type = string default = "svc-humanitec" } variable "additional_k8s_access_entries" { description = "Additional access entries add to the k8s aws-auth configmap" type = list(object({ id = string principal_arn = string groups = list(string) })) default = [] } variable "ingress_nginx_replica_count" { description = "Number of replicas for the ingress-nginx controller" type = number default = 2 } variable "ingress_nginx_min_unavailable" { description = "Number of allowed unavaiable replicas for the ingress-nginx controller" type = number default = 1 } variable "disk_size" { description = "Disk size in GB to use for EKS nodes" type = number default = 20 } ================================================ FILE: modules/github/README.md ================================================ # GitHub This module prepares a GitHub Organization to be used for scaffolding using a Portal. ## Terraform docs ### Requirements | Name | Version | |------|---------| | terraform | >= 1.3.0 | | aws | ~> 5.17 | | github | ~> 5.38 | ### Providers | Name | Version | |------|---------| | aws | ~> 5.17 | | github | ~> 5.38 | ### Modules | Name | Source | Version | |------|--------|---------| | iam\_github\_oidc\_provider | terraform-aws-modules/iam/aws//modules/iam-github-oidc-provider | ~> 5.30 | | iam\_github\_oidc\_role | terraform-aws-modules/iam/aws//modules/iam-github-oidc-role | ~> 5.30 | ### Resources | Name | Type | |------|------| | [aws_iam_policy.ecr_push_policy](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | | [github_actions_organization_secret.backstage_humanitec_token](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/actions_organization_secret) | resource | | [github_actions_organization_variable.backstage_aws_region](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/actions_organization_variable) | resource | | [github_actions_organization_variable.backstage_aws_role_arn](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/actions_organization_variable) | resource | | [github_actions_organization_variable.backstage_cloud_provider](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/actions_organization_variable) | resource | | [github_actions_organization_variable.backstage_humanitec_org_id](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/actions_organization_variable) | resource | ### Inputs | Name | Description | Type | Default | Required | |------|-------------|------|---------|:--------:| | aws\_region | AWS region | `string` | n/a | yes | | github\_org\_id | GitHub org id | `string` | n/a | yes | | humanitec\_ci\_service\_user\_token | Humanitec CI Service User Token | `string` | n/a | yes | | humanitec\_org\_id | Humanitec Organization ID | `string` | n/a | yes | ================================================ FILE: modules/github/aws.tf ================================================ locals { name = "gha-ecr-push" } # Create a role for GitHub Actions to push to ECR using OpenID Connect (OIDC) so we don't need to store AWS credentials in GitHub # Reference https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-amazon-web-services # Source https://github.com/terraform-aws-modules/terraform-aws-iam module "iam_github_oidc_provider" { source = "terraform-aws-modules/iam/aws//modules/iam-github-oidc-provider" version = "~> 5.30" } module "iam_github_oidc_role" { source = "terraform-aws-modules/iam/aws//modules/iam-github-oidc-role" version = "~> 5.30" name = local.name subjects = [ "${var.github_org_id}/*", ] policies = { ecr_push_policy = aws_iam_policy.ecr_push_policy.arn } } # Reference https://docs.aws.amazon.com/AmazonECR/latest/userguide/image-push.html#image-push-iam resource "aws_iam_policy" "ecr_push_policy" { name = local.name description = "GitHub Actions ECR Push Policy" policy = jsonencode({ Version = "2012-10-17" Statement = [ { Action = [ "ecr:CompleteLayerUpload", "ecr:GetAuthorizationToken", "ecr:UploadLayerPart", "ecr:InitiateLayerUpload", "ecr:BatchCheckLayerAvailability", "ecr:PutImage", "ecr:DescribeRepositories", "ecr:CreateRepository" ] Effect = "Allow" Resource = "*" }, ] }) } ================================================ FILE: modules/github/main.tf ================================================ locals { cloud_provider = "aws" } # Configure GitHub variables & secrets for all scaffolded apps resource "github_actions_organization_variable" "backstage_cloud_provider" { variable_name = "CLOUD_PROVIDER" visibility = "all" value = local.cloud_provider } resource "github_actions_organization_variable" "backstage_aws_region" { variable_name = "AWS_REGION" visibility = "all" value = var.aws_region } resource "github_actions_organization_variable" "backstage_aws_role_arn" { variable_name = "AWS_ROLE_ARN" visibility = "all" value = module.iam_github_oidc_role.arn } resource "github_actions_organization_variable" "backstage_humanitec_org_id" { variable_name = "HUMANITEC_ORG_ID" visibility = "all" value = var.humanitec_org_id } resource "github_actions_organization_secret" "backstage_humanitec_token" { secret_name = "HUMANITEC_TOKEN" visibility = "all" plaintext_value = var.humanitec_ci_service_user_token } ================================================ FILE: modules/github/providers.tf ================================================ terraform { required_providers { aws = { source = "hashicorp/aws" version = "~> 5.17" } github = { source = "integrations/github" version = "~> 5.38" } } required_version = ">= 1.3.0" } ================================================ FILE: modules/github/terraform.tfvars.example ================================================ # AWS region aws_region = "" # GitHub org id github_org_id = "" # Humanitec CI Service User Token humanitec_ci_service_user_token = "" # Humanitec Organization ID humanitec_org_id = "" ================================================ FILE: modules/github/variables.tf ================================================ variable "aws_region" { description = "AWS region" type = string } variable "humanitec_org_id" { description = "Humanitec Organization ID" type = string validation { condition = var.humanitec_org_id != null error_message = "Humanitec Organization ID must not be empty" } } variable "humanitec_ci_service_user_token" { description = "Humanitec CI Service User Token" type = string sensitive = true validation { condition = var.humanitec_ci_service_user_token != null error_message = "Humanitec CI Service User Token must not be empty" } } variable "github_org_id" { description = "GitHub org id" type = string validation { condition = var.github_org_id != null error_message = "GitHub org id must not be empty" } } ================================================ FILE: modules/portal-backstage/README.md ================================================ # Portal: Backstage This module deploys the [Humanitec Reference Architecture Backstage](https://github.com/humanitec-architecture/backstage) as Application into a specific Humanitec Organization. ## Terraform docs ### Requirements | Name | Version | |------|---------| | terraform | >= 1.3.0 | | humanitec | ~> 1.0 | ### Providers | Name | Version | |------|---------| | humanitec | ~> 1.0 | ### Modules | Name | Source | Version | |------|--------|---------| | backstage\_postgres | github.com/humanitec-architecture/resource-packs-in-cluster//humanitec-resource-defs/postgres/basic | v2024-06-05 | | portal\_backstage | github.com/humanitec-architecture/shared-terraform-modules//modules/portal-backstage | v2024-06-12 | ### Resources | Name | Type | |------|------| | [humanitec_application.backstage](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/application) | resource | | [humanitec_resource_definition_criteria.backstage_postgres](https://registry.terraform.io/providers/humanitec/humanitec/latest/docs/resources/resource_definition_criteria) | resource | ### Inputs | Name | Description | Type | Default | Required | |------|-------------|------|---------|:--------:| | github\_app\_client\_id | GitHub App Client ID | `string` | n/a | yes | | github\_app\_client\_secret | GitHub App Client Secret | `string` | n/a | yes | | github\_app\_id | GitHub App ID | `string` | n/a | yes | | github\_app\_private\_key | GitHub App Private Key | `string` | n/a | yes | | github\_org\_id | GitHub org id | `string` | n/a | yes | | github\_webhook\_secret | GitHub Webhook Secret | `string` | n/a | yes | | humanitec\_ci\_service\_user\_token | Humanitec CI Service User Token | `string` | n/a | yes | | humanitec\_org\_id | Humanitec Organization ID | `string` | n/a | yes | ================================================ FILE: modules/portal-backstage/main.tf ================================================ resource "humanitec_application" "backstage" { id = "backstage" name = "backstage" } locals { secrets = { humanitec-token = var.humanitec_ci_service_user_token github-app-client-id = var.github_app_client_id github-app-client-secret = var.github_app_client_secret github-app-private-key = indent(2, var.github_app_private_key) github-webhook-secret = var.github_webhook_secret } secret_refs = { for key, value in local.secrets : key => { value = value } } } module "portal_backstage" { source = "github.com/humanitec-architecture/shared-terraform-modules//modules/portal-backstage?ref=v2024-06-12" cloud_provider = "aws" humanitec_org_id = var.humanitec_org_id humanitec_app_id = humanitec_application.backstage.id humanitec_ci_service_user_token_ref = local.secret_refs["humanitec-token"] github_org_id = var.github_org_id github_app_client_id_ref = local.secret_refs["github-app-client-id"] github_app_client_secret_ref = local.secret_refs["github-app-client-secret"] github_app_id = var.github_app_id github_app_private_key_ref = local.secret_refs["github-app-private-key"] github_webhook_secret_ref = local.secret_refs["github-webhook-secret"] } # Configure required resources for backstage locals { res_def_prefix = "backstage-" } # in-cluster postgres module "backstage_postgres" { source = "github.com/humanitec-architecture/resource-packs-in-cluster//humanitec-resource-defs/postgres/basic?ref=v2024-06-05" prefix = local.res_def_prefix } resource "humanitec_resource_definition_criteria" "backstage_postgres" { resource_definition_id = module.backstage_postgres.id app_id = humanitec_application.backstage.id force_delete = true } ================================================ FILE: modules/portal-backstage/providers.tf ================================================ terraform { required_providers { humanitec = { source = "humanitec/humanitec" version = "~> 1.0" } } required_version = ">= 1.3.0" } ================================================ FILE: modules/portal-backstage/terraform.tfvars.example ================================================ # GitHub App Client ID github_app_client_id = "" # GitHub App Client Secret github_app_client_secret = "" # GitHub App ID github_app_id = "" # GitHub App Private Key github_app_private_key = "" # GitHub org id github_org_id = "" # GitHub Webhook Secret github_webhook_secret = "" # Humanitec CI Service User Token humanitec_ci_service_user_token = "" # Humanitec Organization ID humanitec_org_id = "" ================================================ FILE: modules/portal-backstage/variables.tf ================================================ variable "humanitec_org_id" { description = "Humanitec Organization ID" type = string } variable "humanitec_ci_service_user_token" { description = "Humanitec CI Service User Token" type = string sensitive = true } variable "github_org_id" { description = "GitHub org id" type = string } variable "github_app_client_id" { description = "GitHub App Client ID" type = string } variable "github_app_client_secret" { description = "GitHub App Client Secret" type = string } variable "github_app_id" { description = "GitHub App ID" type = string } variable "github_webhook_secret" { description = "GitHub Webhook Secret" type = string } variable "github_app_private_key" { description = "GitHub App Private Key" type = string } ================================================ FILE: providers.tf ================================================ terraform { required_providers { aws = { source = "hashicorp/aws" version = "~> 5.17" } github = { source = "integrations/github" version = "~> 5.38" } helm = { source = "hashicorp/helm" version = "~> 2.12" } humanitec = { source = "humanitec/humanitec" version = "~> 1.0" } kubernetes = { source = "hashicorp/kubernetes" version = "~> 2.25" } random = { source = "hashicorp/random" version = "~> 3.5" } } required_version = ">= 1.3.0" } provider "humanitec" { org_id = var.humanitec_org_id } provider "github" { owner = var.github_org_id } provider "aws" { region = var.aws_region allowed_account_ids = [var.aws_account_id] } provider "kubernetes" { host = module.base.eks_cluster_endpoint cluster_ca_certificate = base64decode(module.base.eks_cluster_certificate_authority_data) exec { api_version = "client.authentication.k8s.io/v1beta1" command = "aws" # This requires the awscli to be installed locally where Terraform is executed args = ["eks", "get-token", "--cluster-name", module.base.eks_cluster_name] } } provider "helm" { kubernetes { host = module.base.eks_cluster_endpoint cluster_ca_certificate = base64decode(module.base.eks_cluster_certificate_authority_data) exec { api_version = "client.authentication.k8s.io/v1beta1" command = "aws" # This requires the awscli to be installed locally where Terraform is executed args = ["eks", "get-token", "--cluster-name", module.base.eks_cluster_name] } } } ================================================ FILE: terraform.tfvars.example ================================================ # AWS Account (ID) to use aws_account_id = "" # AWS region aws_region = "" # Disk size in GB to use for EKS nodes disk_size = 20 # GitHub org id (required for Backstage) github_org_id = "" # Humanitec Organization ID (required for Backstage) humanitec_org_id = "" # List of EC2 instances types to use for EKS nodes instance_types = [ "t3.large" ] # Deploy Backstage with_backstage = false ================================================ FILE: variables.tf ================================================ variable "aws_account_id" { description = "AWS Account (ID) to use" type = string } variable "aws_region" { description = "AWS region" type = string } variable "instance_types" { description = "List of EC2 instances types to use for EKS nodes" type = list(string) default = [ "t3.large" ] } variable "disk_size" { description = "Disk size in GB to use for EKS nodes" type = number default = 20 } variable "with_backstage" { description = "Deploy Backstage" type = bool default = false } variable "github_org_id" { description = "GitHub org id (required for Backstage)" type = string default = null } variable "humanitec_org_id" { description = "Humanitec Organization ID (required for Backstage)" type = string default = null }