[
  {
    "path": ".devcontainer/Dockerfile",
    "content": "FROM --platform=linux/arm64 mcr.microsoft.com/vscode/devcontainers/go:dev-1.24\nRUN apt-get update -y && \\\n    # Docker\n    curl -fsSL https://get.docker.com | sh && \\\n    # https://developer.hashicorp.com/terraform/downloads\n    wget -O- https://apt.releases.hashicorp.com/gpg | sudo gpg --dearmor -o /usr/share/keyrings/hashicorp-archive-keyring.gpg && \\\n    echo \"deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main\" | tee /etc/apt/sources.list.d/hashicorp.list && \\\n    apt update -y && apt install -y terraform && \\\n    # go-jsonnet\n    go install github.com/google/go-jsonnet/cmd/jsonnet@latest && \\\n    go install github.com/google/go-jsonnet/cmd/jsonnetfmt@latest && \\\n    go install github.com/google/go-jsonnet/cmd/jsonnet-lint@latest && \\\n    # AWS CLI\n    apt-get install -y awscli && \\\n    # python\n    apt-get install -y python3 python3-boto3 black && \\\n    # GCP CLI\n    curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo gpg --dearmor -o /usr/share/keyrings/cloud.google.gpg && \\\n    echo \"deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt cloud-sdk main\" | sudo tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \\\n    apt-get update -y && apt-get install -y google-cloud-cli\n"
  },
  {
    "path": ".devcontainer/devcontainer.json",
    "content": "{\n\t\"name\": \"Go\",\n\t\"build\": {\n\t\t\"dockerfile\": \"Dockerfile\"\n\t},\n\t\"remoteUser\": \"root\",\n\t\"runArgs\": [\n\t\t\"--memory=2g\",\n\t\t\"--cpus=2\"\n\t],\n\t\"mounts\": [\n\t\t\"source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind\"\n\t],\n\t\"postStartCommand\": \"sh .devcontainer/post_start.sh\",\n\t\"customizations\": {\n\t\t\"vscode\": {\n\t\t\t\"extensions\": [\n\t\t\t\t\"ms-vscode.go\",\n\t\t\t\t\"hashicorp.terraform\",\n\t\t\t\t\"grafana.vscode-jsonnet\"\n\t\t\t]\n\t\t}\n\t}\n}\n"
  },
  {
    "path": ".devcontainer/post_start.sh",
    "content": "git config --global --add safe.directory /workspaces/substation\nrm -f ~/.docker/config.json\n"
  },
  {
    "path": ".dockerignore",
    "content": ".devcontainer\n.git\n.github\n.vscode\nbuild/container\nbuild/scripts\n!build/scripts/aws/lambda/extension.zip\nbuild/terraform\n**/examples\n.dockerignore\n.gitignore\n.golangci.yml\nCODEOWNERS\n**/*.md\n"
  },
  {
    "path": ".git/HEAD",
    "content": "ref: refs/heads/main\n"
  },
  {
    "path": ".git/config",
    "content": "[core]\n\trepositoryformatversion = 1\n\tfilemode = true\n\tbare = false\n\tlogallrefupdates = true\n[remote \"origin\"]\n\turl = https://github.com/brexhq/substation\n\ttagOpt = --no-tags\n\tfetch = +refs/heads/main:refs/remotes/origin/main\n\tpromisor = true\n\tpartialclonefilter = blob:limit=1048576\n[branch \"main\"]\n\tremote = origin\n\tmerge = refs/heads/main\n"
  },
  {
    "path": ".git/description",
    "content": "Unnamed repository; edit this file 'description' to name the repository.\n"
  },
  {
    "path": ".git/hooks/applypatch-msg.sample",
    "content": "#!/bin/sh\n#\n# An example hook script to check the commit log message taken by\n# applypatch from an e-mail message.\n#\n# The hook should exit with non-zero status after issuing an\n# appropriate message if it wants to stop the commit.  The hook is\n# allowed to edit the commit message file.\n#\n# To enable this hook, rename this file to \"applypatch-msg\".\n\n. git-sh-setup\ncommitmsg=\"$(git rev-parse --git-path hooks/commit-msg)\"\ntest -x \"$commitmsg\" && exec \"$commitmsg\" ${1+\"$@\"}\n:\n"
  },
  {
    "path": ".git/hooks/commit-msg.sample",
    "content": "#!/bin/sh\n#\n# An example hook script to check the commit log message.\n# Called by \"git commit\" with one argument, the name of the file\n# that has the commit message.  The hook should exit with non-zero\n# status after issuing an appropriate message if it wants to stop the\n# commit.  The hook is allowed to edit the commit message file.\n#\n# To enable this hook, rename this file to \"commit-msg\".\n\n# Uncomment the below to add a Signed-off-by line to the message.\n# Doing this in a hook is a bad idea in general, but the prepare-commit-msg\n# hook is more suited to it.\n#\n# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\\(.*>\\).*$/Signed-off-by: \\1/p')\n# grep -qs \"^$SOB\" \"$1\" || echo \"$SOB\" >> \"$1\"\n\n# This example catches duplicate Signed-off-by lines.\n\ntest \"\" = \"$(grep '^Signed-off-by: ' \"$1\" |\n\t sort | uniq -c | sed -e '/^[ \t]*1[ \t]/d')\" || {\n\techo >&2 Duplicate Signed-off-by lines.\n\texit 1\n}\n"
  },
  {
    "path": ".git/hooks/fsmonitor-watchman.sample",
    "content": "#!/usr/bin/perl\n\nuse strict;\nuse warnings;\nuse IPC::Open2;\n\n# An example hook script to integrate Watchman\n# (https://facebook.github.io/watchman/) with git to speed up detecting\n# new and modified files.\n#\n# The hook is passed a version (currently 2) and last update token\n# formatted as a string and outputs to stdout a new update token and\n# all files that have been modified since the update token. Paths must\n# be relative to the root of the working tree and separated by a single NUL.\n#\n# To enable this hook, rename this file to \"query-watchman\" and set\n# 'git config core.fsmonitor .git/hooks/query-watchman'\n#\nmy ($version, $last_update_token) = @ARGV;\n\n# Uncomment for debugging\n# print STDERR \"$0 $version $last_update_token\\n\";\n\n# Check the hook interface version\nif ($version ne 2) {\n\tdie \"Unsupported query-fsmonitor hook version '$version'.\\n\" .\n\t    \"Falling back to scanning...\\n\";\n}\n\nmy $git_work_tree = get_working_dir();\n\nmy $retry = 1;\n\nmy $json_pkg;\neval {\n\trequire JSON::XS;\n\t$json_pkg = \"JSON::XS\";\n\t1;\n} or do {\n\trequire JSON::PP;\n\t$json_pkg = \"JSON::PP\";\n};\n\nlaunch_watchman();\n\nsub launch_watchman {\n\tmy $o = watchman_query();\n\tif (is_work_tree_watched($o)) {\n\t\toutput_result($o->{clock}, @{$o->{files}});\n\t}\n}\n\nsub output_result {\n\tmy ($clockid, @files) = @_;\n\n\t# Uncomment for debugging watchman output\n\t# open (my $fh, \">\", \".git/watchman-output.out\");\n\t# binmode $fh, \":utf8\";\n\t# print $fh \"$clockid\\n@files\\n\";\n\t# close $fh;\n\n\tbinmode STDOUT, \":utf8\";\n\tprint $clockid;\n\tprint \"\\0\";\n\tlocal $, = \"\\0\";\n\tprint @files;\n}\n\nsub watchman_clock {\n\tmy $response = qx/watchman clock \"$git_work_tree\"/;\n\tdie \"Failed to get clock id on '$git_work_tree'.\\n\" .\n\t\t\"Falling back to scanning...\\n\" if $? != 0;\n\n\treturn $json_pkg->new->utf8->decode($response);\n}\n\nsub watchman_query {\n\tmy $pid = open2(\\*CHLD_OUT, \\*CHLD_IN, 'watchman -j --no-pretty')\n\tor die \"open2() failed: $!\\n\" .\n\t\"Falling back to scanning...\\n\";\n\n\t# In the query expression below we're asking for names of files that\n\t# changed since $last_update_token but not from the .git folder.\n\t#\n\t# To accomplish this, we're using the \"since\" generator to use the\n\t# recency index to select candidate nodes and \"fields\" to limit the\n\t# output to file names only. Then we're using the \"expression\" term to\n\t# further constrain the results.\n\tmy $last_update_line = \"\";\n\tif (substr($last_update_token, 0, 1) eq \"c\") {\n\t\t$last_update_token = \"\\\"$last_update_token\\\"\";\n\t\t$last_update_line = qq[\\n\"since\": $last_update_token,];\n\t}\n\tmy $query = <<\"\tEND\";\n\t\t[\"query\", \"$git_work_tree\", {$last_update_line\n\t\t\t\"fields\": [\"name\"],\n\t\t\t\"expression\": [\"not\", [\"dirname\", \".git\"]]\n\t\t}]\n\tEND\n\n\t# Uncomment for debugging the watchman query\n\t# open (my $fh, \">\", \".git/watchman-query.json\");\n\t# print $fh $query;\n\t# close $fh;\n\n\tprint CHLD_IN $query;\n\tclose CHLD_IN;\n\tmy $response = do {local $/; <CHLD_OUT>};\n\n\t# Uncomment for debugging the watch response\n\t# open ($fh, \">\", \".git/watchman-response.json\");\n\t# print $fh $response;\n\t# close $fh;\n\n\tdie \"Watchman: command returned no output.\\n\" .\n\t\"Falling back to scanning...\\n\" if $response eq \"\";\n\tdie \"Watchman: command returned invalid output: $response\\n\" .\n\t\"Falling back to scanning...\\n\" unless $response =~ /^\\{/;\n\n\treturn $json_pkg->new->utf8->decode($response);\n}\n\nsub is_work_tree_watched {\n\tmy ($output) = @_;\n\tmy $error = $output->{error};\n\tif ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) {\n\t\t$retry--;\n\t\tmy $response = qx/watchman watch \"$git_work_tree\"/;\n\t\tdie \"Failed to make watchman watch '$git_work_tree'.\\n\" .\n\t\t    \"Falling back to scanning...\\n\" if $? != 0;\n\t\t$output = $json_pkg->new->utf8->decode($response);\n\t\t$error = $output->{error};\n\t\tdie \"Watchman: $error.\\n\" .\n\t\t\"Falling back to scanning...\\n\" if $error;\n\n\t\t# Uncomment for debugging watchman output\n\t\t# open (my $fh, \">\", \".git/watchman-output.out\");\n\t\t# close $fh;\n\n\t\t# Watchman will always return all files on the first query so\n\t\t# return the fast \"everything is dirty\" flag to git and do the\n\t\t# Watchman query just to get it over with now so we won't pay\n\t\t# the cost in git to look up each individual file.\n\t\tmy $o = watchman_clock();\n\t\t$error = $output->{error};\n\n\t\tdie \"Watchman: $error.\\n\" .\n\t\t\"Falling back to scanning...\\n\" if $error;\n\n\t\toutput_result($o->{clock}, (\"/\"));\n\t\t$last_update_token = $o->{clock};\n\n\t\teval { launch_watchman() };\n\t\treturn 0;\n\t}\n\n\tdie \"Watchman: $error.\\n\" .\n\t\"Falling back to scanning...\\n\" if $error;\n\n\treturn 1;\n}\n\nsub get_working_dir {\n\tmy $working_dir;\n\tif ($^O =~ 'msys' || $^O =~ 'cygwin') {\n\t\t$working_dir = Win32::GetCwd();\n\t\t$working_dir =~ tr/\\\\/\\//;\n\t} else {\n\t\trequire Cwd;\n\t\t$working_dir = Cwd::cwd();\n\t}\n\n\treturn $working_dir;\n}\n"
  },
  {
    "path": ".git/hooks/post-update.sample",
    "content": "#!/bin/sh\n#\n# An example hook script to prepare a packed repository for use over\n# dumb transports.\n#\n# To enable this hook, rename this file to \"post-update\".\n\nexec git update-server-info\n"
  },
  {
    "path": ".git/hooks/pre-applypatch.sample",
    "content": "#!/bin/sh\n#\n# An example hook script to verify what is about to be committed\n# by applypatch from an e-mail message.\n#\n# The hook should exit with non-zero status after issuing an\n# appropriate message if it wants to stop the commit.\n#\n# To enable this hook, rename this file to \"pre-applypatch\".\n\n. git-sh-setup\nprecommit=\"$(git rev-parse --git-path hooks/pre-commit)\"\ntest -x \"$precommit\" && exec \"$precommit\" ${1+\"$@\"}\n:\n"
  },
  {
    "path": ".git/hooks/pre-commit.sample",
    "content": "#!/bin/sh\n#\n# An example hook script to verify what is about to be committed.\n# Called by \"git commit\" with no arguments.  The hook should\n# exit with non-zero status after issuing an appropriate message if\n# it wants to stop the commit.\n#\n# To enable this hook, rename this file to \"pre-commit\".\n\nif git rev-parse --verify HEAD >/dev/null 2>&1\nthen\n\tagainst=HEAD\nelse\n\t# Initial commit: diff against an empty tree object\n\tagainst=$(git hash-object -t tree /dev/null)\nfi\n\n# If you want to allow non-ASCII filenames set this variable to true.\nallownonascii=$(git config --type=bool hooks.allownonascii)\n\n# Redirect output to stderr.\nexec 1>&2\n\n# Cross platform projects tend to avoid non-ASCII filenames; prevent\n# them from being added to the repository. We exploit the fact that the\n# printable range starts at the space character and ends with tilde.\nif [ \"$allownonascii\" != \"true\" ] &&\n\t# Note that the use of brackets around a tr range is ok here, (it's\n\t# even required, for portability to Solaris 10's /usr/bin/tr), since\n\t# the square bracket bytes happen to fall in the designated range.\n\ttest $(git diff-index --cached --name-only --diff-filter=A -z $against |\n\t  LC_ALL=C tr -d '[ -~]\\0' | wc -c) != 0\nthen\n\tcat <<\\EOF\nError: Attempt to add a non-ASCII file name.\n\nThis can cause problems if you want to work with people on other platforms.\n\nTo be portable it is advisable to rename the file.\n\nIf you know what you are doing you can disable this check using:\n\n  git config hooks.allownonascii true\nEOF\n\texit 1\nfi\n\n# If there are whitespace errors, print the offending file names and fail.\nexec git diff-index --check --cached $against --\n"
  },
  {
    "path": ".git/hooks/pre-merge-commit.sample",
    "content": "#!/bin/sh\n#\n# An example hook script to verify what is about to be committed.\n# Called by \"git merge\" with no arguments.  The hook should\n# exit with non-zero status after issuing an appropriate message to\n# stderr if it wants to stop the merge commit.\n#\n# To enable this hook, rename this file to \"pre-merge-commit\".\n\n. git-sh-setup\ntest -x \"$GIT_DIR/hooks/pre-commit\" &&\n        exec \"$GIT_DIR/hooks/pre-commit\"\n:\n"
  },
  {
    "path": ".git/hooks/pre-push.sample",
    "content": "#!/bin/sh\n\n# An example hook script to verify what is about to be pushed.  Called by \"git\n# push\" after it has checked the remote status, but before anything has been\n# pushed.  If this script exits with a non-zero status nothing will be pushed.\n#\n# This hook is called with the following parameters:\n#\n# $1 -- Name of the remote to which the push is being done\n# $2 -- URL to which the push is being done\n#\n# If pushing without using a named remote those arguments will be equal.\n#\n# Information about the commits which are being pushed is supplied as lines to\n# the standard input in the form:\n#\n#   <local ref> <local oid> <remote ref> <remote oid>\n#\n# This sample shows how to prevent push of commits where the log message starts\n# with \"WIP\" (work in progress).\n\nremote=\"$1\"\nurl=\"$2\"\n\nzero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')\n\nwhile read local_ref local_oid remote_ref remote_oid\ndo\n\tif test \"$local_oid\" = \"$zero\"\n\tthen\n\t\t# Handle delete\n\t\t:\n\telse\n\t\tif test \"$remote_oid\" = \"$zero\"\n\t\tthen\n\t\t\t# New branch, examine all commits\n\t\t\trange=\"$local_oid\"\n\t\telse\n\t\t\t# Update to existing branch, examine new commits\n\t\t\trange=\"$remote_oid..$local_oid\"\n\t\tfi\n\n\t\t# Check for WIP commit\n\t\tcommit=$(git rev-list -n 1 --grep '^WIP' \"$range\")\n\t\tif test -n \"$commit\"\n\t\tthen\n\t\t\techo >&2 \"Found WIP commit in $local_ref, not pushing\"\n\t\t\texit 1\n\t\tfi\n\tfi\ndone\n\nexit 0\n"
  },
  {
    "path": ".git/hooks/pre-rebase.sample",
    "content": "#!/bin/sh\n#\n# Copyright (c) 2006, 2008 Junio C Hamano\n#\n# The \"pre-rebase\" hook is run just before \"git rebase\" starts doing\n# its job, and can prevent the command from running by exiting with\n# non-zero status.\n#\n# The hook is called with the following parameters:\n#\n# $1 -- the upstream the series was forked from.\n# $2 -- the branch being rebased (or empty when rebasing the current branch).\n#\n# This sample shows how to prevent topic branches that are already\n# merged to 'next' branch from getting rebased, because allowing it\n# would result in rebasing already published history.\n\npublish=next\nbasebranch=\"$1\"\nif test \"$#\" = 2\nthen\n\ttopic=\"refs/heads/$2\"\nelse\n\ttopic=`git symbolic-ref HEAD` ||\n\texit 0 ;# we do not interrupt rebasing detached HEAD\nfi\n\ncase \"$topic\" in\nrefs/heads/??/*)\n\t;;\n*)\n\texit 0 ;# we do not interrupt others.\n\t;;\nesac\n\n# Now we are dealing with a topic branch being rebased\n# on top of master.  Is it OK to rebase it?\n\n# Does the topic really exist?\ngit show-ref -q \"$topic\" || {\n\techo >&2 \"No such branch $topic\"\n\texit 1\n}\n\n# Is topic fully merged to master?\nnot_in_master=`git rev-list --pretty=oneline ^master \"$topic\"`\nif test -z \"$not_in_master\"\nthen\n\techo >&2 \"$topic is fully merged to master; better remove it.\"\n\texit 1 ;# we could allow it, but there is no point.\nfi\n\n# Is topic ever merged to next?  If so you should not be rebasing it.\nonly_next_1=`git rev-list ^master \"^$topic\" ${publish} | sort`\nonly_next_2=`git rev-list ^master           ${publish} | sort`\nif test \"$only_next_1\" = \"$only_next_2\"\nthen\n\tnot_in_topic=`git rev-list \"^$topic\" master`\n\tif test -z \"$not_in_topic\"\n\tthen\n\t\techo >&2 \"$topic is already up to date with master\"\n\t\texit 1 ;# we could allow it, but there is no point.\n\telse\n\t\texit 0\n\tfi\nelse\n\tnot_in_next=`git rev-list --pretty=oneline ^${publish} \"$topic\"`\n\t/usr/bin/perl -e '\n\t\tmy $topic = $ARGV[0];\n\t\tmy $msg = \"* $topic has commits already merged to public branch:\\n\";\n\t\tmy (%not_in_next) = map {\n\t\t\t/^([0-9a-f]+) /;\n\t\t\t($1 => 1);\n\t\t} split(/\\n/, $ARGV[1]);\n\t\tfor my $elem (map {\n\t\t\t\t/^([0-9a-f]+) (.*)$/;\n\t\t\t\t[$1 => $2];\n\t\t\t} split(/\\n/, $ARGV[2])) {\n\t\t\tif (!exists $not_in_next{$elem->[0]}) {\n\t\t\t\tif ($msg) {\n\t\t\t\t\tprint STDERR $msg;\n\t\t\t\t\tundef $msg;\n\t\t\t\t}\n\t\t\t\tprint STDERR \" $elem->[1]\\n\";\n\t\t\t}\n\t\t}\n\t' \"$topic\" \"$not_in_next\" \"$not_in_master\"\n\texit 1\nfi\n\n<<\\DOC_END\n\nThis sample hook safeguards topic branches that have been\npublished from being rewound.\n\nThe workflow assumed here is:\n\n * Once a topic branch forks from \"master\", \"master\" is never\n   merged into it again (either directly or indirectly).\n\n * Once a topic branch is fully cooked and merged into \"master\",\n   it is deleted.  If you need to build on top of it to correct\n   earlier mistakes, a new topic branch is created by forking at\n   the tip of the \"master\".  This is not strictly necessary, but\n   it makes it easier to keep your history simple.\n\n * Whenever you need to test or publish your changes to topic\n   branches, merge them into \"next\" branch.\n\nThe script, being an example, hardcodes the publish branch name\nto be \"next\", but it is trivial to make it configurable via\n$GIT_DIR/config mechanism.\n\nWith this workflow, you would want to know:\n\n(1) ... if a topic branch has ever been merged to \"next\".  Young\n    topic branches can have stupid mistakes you would rather\n    clean up before publishing, and things that have not been\n    merged into other branches can be easily rebased without\n    affecting other people.  But once it is published, you would\n    not want to rewind it.\n\n(2) ... if a topic branch has been fully merged to \"master\".\n    Then you can delete it.  More importantly, you should not\n    build on top of it -- other people may already want to\n    change things related to the topic as patches against your\n    \"master\", so if you need further changes, it is better to\n    fork the topic (perhaps with the same name) afresh from the\n    tip of \"master\".\n\nLet's look at this example:\n\n\t\t   o---o---o---o---o---o---o---o---o---o \"next\"\n\t\t  /       /           /           /\n\t\t /   a---a---b A     /           /\n\t\t/   /               /           /\n\t       /   /   c---c---c---c B         /\n\t      /   /   /             \\         /\n\t     /   /   /   b---b C     \\       /\n\t    /   /   /   /             \\     /\n    ---o---o---o---o---o---o---o---o---o---o---o \"master\"\n\n\nA, B and C are topic branches.\n\n * A has one fix since it was merged up to \"next\".\n\n * B has finished.  It has been fully merged up to \"master\" and \"next\",\n   and is ready to be deleted.\n\n * C has not merged to \"next\" at all.\n\nWe would want to allow C to be rebased, refuse A, and encourage\nB to be deleted.\n\nTo compute (1):\n\n\tgit rev-list ^master ^topic next\n\tgit rev-list ^master        next\n\n\tif these match, topic has not merged in next at all.\n\nTo compute (2):\n\n\tgit rev-list master..topic\n\n\tif this is empty, it is fully merged to \"master\".\n\nDOC_END\n"
  },
  {
    "path": ".git/hooks/pre-receive.sample",
    "content": "#!/bin/sh\n#\n# An example hook script to make use of push options.\n# The example simply echoes all push options that start with 'echoback='\n# and rejects all pushes when the \"reject\" push option is used.\n#\n# To enable this hook, rename this file to \"pre-receive\".\n\nif test -n \"$GIT_PUSH_OPTION_COUNT\"\nthen\n\ti=0\n\twhile test \"$i\" -lt \"$GIT_PUSH_OPTION_COUNT\"\n\tdo\n\t\teval \"value=\\$GIT_PUSH_OPTION_$i\"\n\t\tcase \"$value\" in\n\t\techoback=*)\n\t\t\techo \"echo from the pre-receive-hook: ${value#*=}\" >&2\n\t\t\t;;\n\t\treject)\n\t\t\texit 1\n\t\tesac\n\t\ti=$((i + 1))\n\tdone\nfi\n"
  },
  {
    "path": ".git/hooks/prepare-commit-msg.sample",
    "content": "#!/bin/sh\n#\n# An example hook script to prepare the commit log message.\n# Called by \"git commit\" with the name of the file that has the\n# commit message, followed by the description of the commit\n# message's source.  The hook's purpose is to edit the commit\n# message file.  If the hook fails with a non-zero status,\n# the commit is aborted.\n#\n# To enable this hook, rename this file to \"prepare-commit-msg\".\n\n# This hook includes three examples. The first one removes the\n# \"# Please enter the commit message...\" help message.\n#\n# The second includes the output of \"git diff --name-status -r\"\n# into the message, just before the \"git status\" output.  It is\n# commented because it doesn't cope with --amend or with squashed\n# commits.\n#\n# The third example adds a Signed-off-by line to the message, that can\n# still be edited.  This is rarely a good idea.\n\nCOMMIT_MSG_FILE=$1\nCOMMIT_SOURCE=$2\nSHA1=$3\n\n/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' \"$COMMIT_MSG_FILE\"\n\n# case \"$COMMIT_SOURCE,$SHA1\" in\n#  ,|template,)\n#    /usr/bin/perl -i.bak -pe '\n#       print \"\\n\" . `git diff --cached --name-status -r`\n# \t if /^#/ && $first++ == 0' \"$COMMIT_MSG_FILE\" ;;\n#  *) ;;\n# esac\n\n# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\\(.*>\\).*$/Signed-off-by: \\1/p')\n# git interpret-trailers --in-place --trailer \"$SOB\" \"$COMMIT_MSG_FILE\"\n# if test -z \"$COMMIT_SOURCE\"\n# then\n#   /usr/bin/perl -i.bak -pe 'print \"\\n\" if !$first_line++' \"$COMMIT_MSG_FILE\"\n# fi\n"
  },
  {
    "path": ".git/hooks/push-to-checkout.sample",
    "content": "#!/bin/sh\n\n# An example hook script to update a checked-out tree on a git push.\n#\n# This hook is invoked by git-receive-pack(1) when it reacts to git\n# push and updates reference(s) in its repository, and when the push\n# tries to update the branch that is currently checked out and the\n# receive.denyCurrentBranch configuration variable is set to\n# updateInstead.\n#\n# By default, such a push is refused if the working tree and the index\n# of the remote repository has any difference from the currently\n# checked out commit; when both the working tree and the index match\n# the current commit, they are updated to match the newly pushed tip\n# of the branch. This hook is to be used to override the default\n# behaviour; however the code below reimplements the default behaviour\n# as a starting point for convenient modification.\n#\n# The hook receives the commit with which the tip of the current\n# branch is going to be updated:\ncommit=$1\n\n# It can exit with a non-zero status to refuse the push (when it does\n# so, it must not modify the index or the working tree).\ndie () {\n\techo >&2 \"$*\"\n\texit 1\n}\n\n# Or it can make any necessary changes to the working tree and to the\n# index to bring them to the desired state when the tip of the current\n# branch is updated to the new commit, and exit with a zero status.\n#\n# For example, the hook can simply run git read-tree -u -m HEAD \"$1\"\n# in order to emulate git fetch that is run in the reverse direction\n# with git push, as the two-tree form of git read-tree -u -m is\n# essentially the same as git switch or git checkout that switches\n# branches while keeping the local changes in the working tree that do\n# not interfere with the difference between the branches.\n\n# The below is a more-or-less exact translation to shell of the C code\n# for the default behaviour for git's push-to-checkout hook defined in\n# the push_to_deploy() function in builtin/receive-pack.c.\n#\n# Note that the hook will be executed from the repository directory,\n# not from the working tree, so if you want to perform operations on\n# the working tree, you will have to adapt your code accordingly, e.g.\n# by adding \"cd ..\" or using relative paths.\n\nif ! git update-index -q --ignore-submodules --refresh\nthen\n\tdie \"Up-to-date check failed\"\nfi\n\nif ! git diff-files --quiet --ignore-submodules --\nthen\n\tdie \"Working directory has unstaged changes\"\nfi\n\n# This is a rough translation of:\n#\n#   head_has_history() ? \"HEAD\" : EMPTY_TREE_SHA1_HEX\nif git cat-file -e HEAD 2>/dev/null\nthen\n\thead=HEAD\nelse\n\thead=$(git hash-object -t tree --stdin </dev/null)\nfi\n\nif ! git diff-index --quiet --cached --ignore-submodules $head --\nthen\n\tdie \"Working directory has staged changes\"\nfi\n\nif ! git read-tree -u -m \"$commit\"\nthen\n\tdie \"Could not update working tree to new HEAD\"\nfi\n"
  },
  {
    "path": ".git/hooks/sendemail-validate.sample",
    "content": "#!/bin/sh\n\n# An example hook script to validate a patch (and/or patch series) before\n# sending it via email.\n#\n# The hook should exit with non-zero status after issuing an appropriate\n# message if it wants to prevent the email(s) from being sent.\n#\n# To enable this hook, rename this file to \"sendemail-validate\".\n#\n# By default, it will only check that the patch(es) can be applied on top of\n# the default upstream branch without conflicts in a secondary worktree. After\n# validation (successful or not) of the last patch of a series, the worktree\n# will be deleted.\n#\n# The following config variables can be set to change the default remote and\n# remote ref that are used to apply the patches against:\n#\n#   sendemail.validateRemote (default: origin)\n#   sendemail.validateRemoteRef (default: HEAD)\n#\n# Replace the TODO placeholders with appropriate checks according to your\n# needs.\n\nvalidate_cover_letter () {\n\tfile=\"$1\"\n\t# TODO: Replace with appropriate checks (e.g. spell checking).\n\ttrue\n}\n\nvalidate_patch () {\n\tfile=\"$1\"\n\t# Ensure that the patch applies without conflicts.\n\tgit am -3 \"$file\" || return\n\t# TODO: Replace with appropriate checks for this patch\n\t# (e.g. checkpatch.pl).\n\ttrue\n}\n\nvalidate_series () {\n\t# TODO: Replace with appropriate checks for the whole series\n\t# (e.g. quick build, coding style checks, etc.).\n\ttrue\n}\n\n# main -------------------------------------------------------------------------\n\nif test \"$GIT_SENDEMAIL_FILE_COUNTER\" = 1\nthen\n\tremote=$(git config --default origin --get sendemail.validateRemote) &&\n\tref=$(git config --default HEAD --get sendemail.validateRemoteRef) &&\n\tworktree=$(mktemp --tmpdir -d sendemail-validate.XXXXXXX) &&\n\tgit worktree add -fd --checkout \"$worktree\" \"refs/remotes/$remote/$ref\" &&\n\tgit config --replace-all sendemail.validateWorktree \"$worktree\"\nelse\n\tworktree=$(git config --get sendemail.validateWorktree)\nfi || {\n\techo \"sendemail-validate: error: failed to prepare worktree\" >&2\n\texit 1\n}\n\nunset GIT_DIR GIT_WORK_TREE\ncd \"$worktree\" &&\n\nif grep -q \"^diff --git \" \"$1\"\nthen\n\tvalidate_patch \"$1\"\nelse\n\tvalidate_cover_letter \"$1\"\nfi &&\n\nif test \"$GIT_SENDEMAIL_FILE_COUNTER\" = \"$GIT_SENDEMAIL_FILE_TOTAL\"\nthen\n\tgit config --unset-all sendemail.validateWorktree &&\n\ttrap 'git worktree remove -ff \"$worktree\"' EXIT &&\n\tvalidate_series\nfi\n"
  },
  {
    "path": ".git/hooks/update.sample",
    "content": "#!/bin/sh\n#\n# An example hook script to block unannotated tags from entering.\n# Called by \"git receive-pack\" with arguments: refname sha1-old sha1-new\n#\n# To enable this hook, rename this file to \"update\".\n#\n# Config\n# ------\n# hooks.allowunannotated\n#   This boolean sets whether unannotated tags will be allowed into the\n#   repository.  By default they won't be.\n# hooks.allowdeletetag\n#   This boolean sets whether deleting tags will be allowed in the\n#   repository.  By default they won't be.\n# hooks.allowmodifytag\n#   This boolean sets whether a tag may be modified after creation. By default\n#   it won't be.\n# hooks.allowdeletebranch\n#   This boolean sets whether deleting branches will be allowed in the\n#   repository.  By default they won't be.\n# hooks.denycreatebranch\n#   This boolean sets whether remotely creating branches will be denied\n#   in the repository.  By default this is allowed.\n#\n\n# --- Command line\nrefname=\"$1\"\noldrev=\"$2\"\nnewrev=\"$3\"\n\n# --- Safety check\nif [ -z \"$GIT_DIR\" ]; then\n\techo \"Don't run this script from the command line.\" >&2\n\techo \" (if you want, you could supply GIT_DIR then run\" >&2\n\techo \"  $0 <ref> <oldrev> <newrev>)\" >&2\n\texit 1\nfi\n\nif [ -z \"$refname\" -o -z \"$oldrev\" -o -z \"$newrev\" ]; then\n\techo \"usage: $0 <ref> <oldrev> <newrev>\" >&2\n\texit 1\nfi\n\n# --- Config\nallowunannotated=$(git config --type=bool hooks.allowunannotated)\nallowdeletebranch=$(git config --type=bool hooks.allowdeletebranch)\ndenycreatebranch=$(git config --type=bool hooks.denycreatebranch)\nallowdeletetag=$(git config --type=bool hooks.allowdeletetag)\nallowmodifytag=$(git config --type=bool hooks.allowmodifytag)\n\n# check for no description\nprojectdesc=$(sed -e '1q' \"$GIT_DIR/description\")\ncase \"$projectdesc\" in\n\"Unnamed repository\"* | \"\")\n\techo \"*** Project description file hasn't been set\" >&2\n\texit 1\n\t;;\nesac\n\n# --- Check types\n# if $newrev is 0000...0000, it's a commit to delete a ref.\nzero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')\nif [ \"$newrev\" = \"$zero\" ]; then\n\tnewrev_type=delete\nelse\n\tnewrev_type=$(git cat-file -t $newrev)\nfi\n\ncase \"$refname\",\"$newrev_type\" in\n\trefs/tags/*,commit)\n\t\t# un-annotated tag\n\t\tshort_refname=${refname##refs/tags/}\n\t\tif [ \"$allowunannotated\" != \"true\" ]; then\n\t\t\techo \"*** The un-annotated tag, $short_refname, is not allowed in this repository\" >&2\n\t\t\techo \"*** Use 'git tag [ -a | -s ]' for tags you want to propagate.\" >&2\n\t\t\texit 1\n\t\tfi\n\t\t;;\n\trefs/tags/*,delete)\n\t\t# delete tag\n\t\tif [ \"$allowdeletetag\" != \"true\" ]; then\n\t\t\techo \"*** Deleting a tag is not allowed in this repository\" >&2\n\t\t\texit 1\n\t\tfi\n\t\t;;\n\trefs/tags/*,tag)\n\t\t# annotated tag\n\t\tif [ \"$allowmodifytag\" != \"true\" ] && git rev-parse $refname > /dev/null 2>&1\n\t\tthen\n\t\t\techo \"*** Tag '$refname' already exists.\" >&2\n\t\t\techo \"*** Modifying a tag is not allowed in this repository.\" >&2\n\t\t\texit 1\n\t\tfi\n\t\t;;\n\trefs/heads/*,commit)\n\t\t# branch\n\t\tif [ \"$oldrev\" = \"$zero\" -a \"$denycreatebranch\" = \"true\" ]; then\n\t\t\techo \"*** Creating a branch is not allowed in this repository\" >&2\n\t\t\texit 1\n\t\tfi\n\t\t;;\n\trefs/heads/*,delete)\n\t\t# delete branch\n\t\tif [ \"$allowdeletebranch\" != \"true\" ]; then\n\t\t\techo \"*** Deleting a branch is not allowed in this repository\" >&2\n\t\t\texit 1\n\t\tfi\n\t\t;;\n\trefs/remotes/*,commit)\n\t\t# tracking branch\n\t\t;;\n\trefs/remotes/*,delete)\n\t\t# delete tracking branch\n\t\tif [ \"$allowdeletebranch\" != \"true\" ]; then\n\t\t\techo \"*** Deleting a tracking branch is not allowed in this repository\" >&2\n\t\t\texit 1\n\t\tfi\n\t\t;;\n\t*)\n\t\t# Anything else (is there anything else?)\n\t\techo \"*** Update hook: unknown type of update to ref $refname of type $newrev_type\" >&2\n\t\texit 1\n\t\t;;\nesac\n\n# --- Finished\nexit 0\n"
  },
  {
    "path": ".git/info/exclude",
    "content": "# git ls-files --others --exclude-from=.git/info/exclude\n# Lines that start with '#' are comments.\n# For a project mostly in C, the following would be a good set of\n# exclude patterns (uncomment them if you want to use them):\n# *.[oa]\n# *~\n"
  },
  {
    "path": ".git/logs/HEAD",
    "content": "0000000000000000000000000000000000000000 28f1b349b4da090a4cf04af91adf1821ca7fd687 appuser <appuser@a0b7d3daa2ed.(none)> 1776671609 +0000\tclone: from https://github.com/brexhq/substation\n"
  },
  {
    "path": ".git/logs/refs/heads/main",
    "content": "0000000000000000000000000000000000000000 28f1b349b4da090a4cf04af91adf1821ca7fd687 appuser <appuser@a0b7d3daa2ed.(none)> 1776671609 +0000\tclone: from https://github.com/brexhq/substation\n"
  },
  {
    "path": ".git/logs/refs/remotes/origin/HEAD",
    "content": "0000000000000000000000000000000000000000 28f1b349b4da090a4cf04af91adf1821ca7fd687 appuser <appuser@a0b7d3daa2ed.(none)> 1776671609 +0000\tclone: from https://github.com/brexhq/substation\n"
  },
  {
    "path": ".git/objects/pack/pack-1c76da16733430db3ab67c8402e095f5f4fcf914.promisor",
    "content": ""
  },
  {
    "path": ".git/objects/pack/pack-e1e73a8714333d259732152371bb7e014c5153a6.promisor",
    "content": "28f1b349b4da090a4cf04af91adf1821ca7fd687 refs/heads/main\n"
  },
  {
    "path": ".git/packed-refs",
    "content": "# pack-refs with: peeled fully-peeled sorted \n28f1b349b4da090a4cf04af91adf1821ca7fd687 refs/remotes/origin/main\n"
  },
  {
    "path": ".git/refs/heads/main",
    "content": "28f1b349b4da090a4cf04af91adf1821ca7fd687\n"
  },
  {
    "path": ".git/refs/remotes/origin/HEAD",
    "content": "ref: refs/remotes/origin/main\n"
  },
  {
    "path": ".git/shallow",
    "content": "28f1b349b4da090a4cf04af91adf1821ca7fd687\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/adopters.yaml",
    "content": "name: Become an Adopter\ndescription: Add the name of your organization to the list of adopters.\ntitle: '[organization] has adopted Substation!'\nbody:\n  - type: markdown\n    attributes:\n      value: |\n        Thank you for supporting Substation! By adding your organization to the list of adopters, you help raise awareness for the project and grow our community of users. Please fill out the information below to be added to the [list of adopters](https://github.com/brexhq/substation/blob/main/ADOPTERS.md).\n\n  - type: input\n    id: org-name\n    attributes:\n      label: Organization Name\n      description: Name of your organization.\n      placeholder: ex. Acme Corp\n    validations:\n      required: true\n  - type: input\n    id: org-url\n    attributes:\n      label: Organization Website\n      description: Link to your organization's website.\n      placeholder: ex. https://www.example.com\n    validations:\n      required: true\n  - type: dropdown\n    id: stage\n    attributes:\n      label: Stage of Adoption\n      description: What is your current stage of adoption?\n      options:\n        - We're learning about Substation\n        - We're testing Substation\n        - We're using Substation in production\n        - We're driving broad adoption of Substation\n      default: 0\n    validations:\n      required: true\n  - type: textarea\n    id: use-case\n    attributes:\n      label: Description of Use\n      description: Write one or two sentences about how your organization is using Substation.\n    validations:\n      required: true\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.md",
    "content": "---\nname: Bug report\nabout: Create a report to help us improve\ntitle: ''\nlabels: ''\nassignees: ''\n\n---\n\n**Describe the bug**\nA clear and concise description of what the bug is.\n\n**To Reproduce**\nSteps to reproduce the behavior:\n1. Go to '...'\n2. Click on '....'\n3. Scroll down to '....'\n4. See error\n\n**Expected behavior**\nA clear and concise description of what you expected to happen.\n\n**Screenshots**\nIf applicable, add screenshots to help explain your problem.\n\n**Build (please complete the following information):**\n - Architecture: [e.g. amd64]\n - Version [e.g. 0.y.z]\n\n**Cloud (please complete the following information):**\n - Provider: [e.g. AWS]\n - Service: [e.g. Kinesis, Lambda]\n\n**Additional context**\nAdd any other context about the problem here.\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature_request.md",
    "content": "---\nname: Feature request\nabout: Suggest an idea for this project\ntitle: ''\nlabels: ''\nassignees: ''\n\n---\n\n**Is your feature request related to a problem? Please describe.**\nA clear and concise description of what the problem is. Ex. I'm always frustrated when [...]\n\n**Describe the solution you'd like**\nA clear and concise description of what you want to happen.\n\n**Describe alternatives you've considered**\nA clear and concise description of any alternative solutions or features you've considered.\n\n**Additional context**\nAdd any other context or screenshots about the feature request here.\n"
  },
  {
    "path": ".github/pull_request_template.md",
    "content": "<!--- Provide a general summary of your changes in the Title above -->\n\n## Description\n\n<!--- Describe your changes in detail -->\n\n## Motivation and Context\n\n<!--- Why is this change required? What problem does it solve? -->\n<!--- If it fixes an open issue, please link to the issue here. -->\n\n## How Has This Been Tested?\n\n<!--- Please describe in detail how you tested your changes. -->\n<!--- Include details of your testing environment, and the tests you ran to -->\n<!--- see how your change affects other areas of the code, etc. -->\n\n## Types of changes\n\n<!--- What types of changes does your code introduce? Put an `x` in all the boxes that apply: -->\n* [ ] Bug fix (non-breaking change which fixes an issue)\n* [ ] New feature (non-breaking change which adds functionality)\n* [ ] Breaking change (fix or feature that would cause existing functionality to change)\n\n## Checklist:\n\n<!--- Go over all the following points, and put an `x` in all the boxes that apply. -->\n<!--- If you're unsure about any of these, don't hesitate to ask. We're here to help! -->\n* [ ] My code follows the code style of this project.\n* [ ] My change requires a change to the documentation.\n* [ ] I have updated the documentation accordingly.\n"
  },
  {
    "path": ".github/workflows/code.yml",
    "content": "name: code\n\non:\n  pull_request:\n    branches: [main]\n\npermissions:\n  contents: read\n\njobs:\n  go:\n    permissions:\n      contents: read # fetch code\n      pull-requests: read # fetch pull requests\n\n    runs-on: ubuntu-latest\n    steps:\n      - name: Checkout Repository\n        uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1\n        with:\n          fetch-depth: 1\n\n      - name: Setup Go\n        uses: actions/setup-go@0a12ed9d6a96ab950c8f026ed9f722fe0da7ef32 # v5.0.2\n        with:\n          go-version: 1.23\n\n      - name: Testing\n        run: go test -timeout 30s -v ./...\n\n      - name: Linting\n        uses: golangci/golangci-lint-action@971e284b6050e8a5849b72094c50ab08da042db8 # v6.1.1\n        with:\n          version: v1.61.0\n\n  # Inherits workflow permissions.\n  python:\n    runs-on: ubuntu-latest\n    steps:\n      - name: Checkout Repository\n        uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1\n\n      - name: Linting\n        uses: psf/black@1b2427a2b785cc4aac97c19bb4b9a0de063f9547 # v24.10.0\n        with:\n          # This recursively scans the entire project. Note that `exclude` must be\n          # an empty string: \"An empty value means no paths are excluded.\"\n          options: \"--check --exclude=''\"\n\n  # Inherits workflow permissions.\n  jsonnet:\n    runs-on: ubuntu-latest\n    steps:\n      - name: Checkout Repository\n        uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1\n        with:\n          fetch-depth: 1\n\n      - name: Setup Go\n        uses: actions/setup-go@0a12ed9d6a96ab950c8f026ed9f722fe0da7ef32 # v5.0.2\n        with:\n          go-version: 1.23\n\n      - name: Compiling\n        run: |\n          go install github.com/google/go-jsonnet/cmd/jsonnet@v0.20.0\n          sh .github/workflows/code_jsonnet.sh\n"
  },
  {
    "path": ".github/workflows/code_jsonnet.sh",
    "content": "#!/bin/sh\nfiles=$(find . -name \"*.jsonnet\")\n\nfor file in $files\ndo\n  # 'rev | cut | rev' converts \"path/to/file.jsonnet\" to \"path/to/file.json\"\n  f=$(echo $file | rev | cut -c 4- | rev)\n  # This is run from the root of the repo.\n  jsonnet --ext-code-file sub=\"./substation.libsonnet\" $file > $f\ndone\n"
  },
  {
    "path": ".github/workflows/conventional_commits.yml",
    "content": "name: conventional_commits\n\non:\n  pull_request_target:\n    types:\n      - opened\n      - edited\n      - synchronize\n\npermissions:\n  contents: read\n\njobs:\n  conventional_commits:\n    permissions:\n      pull-requests: read # analyze PRs\n      statuses: write # update status of analyzed PR\n\n    runs-on: ubuntu-latest\n    steps:\n      - uses: amannn/action-semantic-pull-request@0723387faaf9b38adef4775cd42cfd5155ed6017 # v5.5.3\n        env:\n          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n"
  },
  {
    "path": ".github/workflows/release_please.yml",
    "content": "name: release_please\n\non:\n  push:\n    branches:\n      - main\n\npermissions:\n  contents: read\n\njobs:\n  release_please:\n    permissions:\n      contents: write # create release commit\n      pull-requests: write # create release PR\n\n    runs-on: ubuntu-latest\n    steps:\n      - name: Tag Release\n        id: release\n        uses: googleapis/release-please-action@7987652d64b4581673a76e33ad5e98e3dd56832f  # v4.1.3\n        with:\n          release-type: go\n          package-name: release-please-action\n\n      - name: Checkout Repository\n        uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1\n        with:\n          fetch-depth: 0\n\n      - name: Setup Go\n        uses: actions/setup-go@0a12ed9d6a96ab950c8f026ed9f722fe0da7ef32 # v5.0.2\n        with:\n          go-version: 1.22\n\n      - name: Upload Release Artifact\n        if: github.event_name == 'release' && github.event.prerelease == false\n        uses: goreleaser/goreleaser-action@9ed2f89a662bf1735a48bc8557fd212fa902bebf # v6.1.0\n        with:\n          version: latest\n          args: release --clean\n        env:\n          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n          TAG: ${{ steps.release.outputs.tag_name }}\n"
  },
  {
    "path": ".github/workflows/scorecard.yml",
    "content": "# This workflow uses actions that are not certified by GitHub. They are provided\n# by a third-party and are governed by separate terms of service, privacy\n# policy, and support documentation.\n\nname: Scorecard supply-chain security\non:\n  # For Branch-Protection check. Only the default branch is supported. See\n  # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection\n  branch_protection_rule:\n  # To guarantee Maintained check is occasionally updated. See\n  # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained\n  schedule:\n    - cron: \"17 17 * * 2\"\n  push:\n    branches: [\"main\"]\n\n# Declare default permissions as read only.\npermissions: read-all\n\njobs:\n  analysis:\n    name: Scorecard analysis\n    runs-on: ubuntu-latest\n    permissions:\n      # Needed to upload the results to code-scanning dashboard.\n      security-events: write\n      # Needed to publish results and get a badge (see publish_results below).\n      id-token: write\n      # Uncomment the permissions below if installing in a private repository.\n      # contents: read\n      # actions: read\n\n    steps:\n      - name: \"Checkout code\"\n        uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1\n        with:\n          persist-credentials: false\n\n      - name: \"Run analysis\"\n        uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1\n        with:\n          results_file: results.sarif\n          results_format: sarif\n          # (Optional) \"write\" PAT token. Uncomment the `repo_token` line below if:\n          # - you want to enable the Branch-Protection check on a *public* repository, or\n          # - you are installing Scorecard on a *private* repository\n          # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional.\n          # repo_token: ${{ secrets.SCORECARD_TOKEN }}\n\n          # Public repositories:\n          #   - Publish results to OpenSSF REST API for easy access by consumers\n          #   - Allows the repository to include the Scorecard badge.\n          #   - See https://github.com/ossf/scorecard-action#publishing-results.\n          # For private repositories:\n          #   - `publish_results` will always be set to `false`, regardless\n          #     of the value entered here.\n          publish_results: true\n\n      # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF\n      # format to the repository Actions tab.\n      - name: \"Upload artifact\"\n        uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1\n        with:\n          name: sarif-results\n          path: results.sarif\n          retention-days: 5\n\n      # Upload the results to GitHub's code scanning dashboard (optional).\n      # Commenting out will disable upload of results to your repo's Code Scanning dashboard\n      - name: \"Upload to code-scanning\"\n        uses: github/codeql-action/upload-sarif@cf5b0a9041d3c1d336516f1944c96d96598193cc  # v2.19.1\n        with:\n          sarif_file: results.sarif\n"
  },
  {
    "path": ".gitignore",
    "content": "# Ignore all\n*\n\n# Unignore all with extensions\n!*.*\n\n# Unignore all dirs\n!*/\n\n# CHANGELOG is handled by Release Please\nCHANGELOG.md\n\n# ignore all JSON by default\n*.json\n*.jsonl\n# allow Visual Studio Code devcontainer config file\n!.devcontainer/devcontainer.json\n# allow Visual Studio Code settings config file\n!.vscode/settings.json\n# allow specific JSON files in the examples/ directory\n!examples/**/data*.json\n!examples/**/data*.jsonl\n!examples/**/stdout.jsonl\n\n# Go profiling files\n*.prof\n\n# Ignore macOS system files\n.DS_Store\n\n# Archive files\n*.zip\n\n# Terraform\n*.terraform*\nterraform.tfstate*\n\n# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nsrc/build/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\npip-wheel-metadata/\nshare/python-wheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.nox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n*.py,cover\n.hypothesis/\n.pytest_cache/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\ndb.sqlite3-journal\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# IPython\nprofile_default/\nipython_config.py\n\n# pyenv\n.python-version\n\n# pipenv\n#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.\n#   However, in case of collaboration, if having platform-specific dependencies or dependencies\n#   having no cross-platform support, pipenv may install dependencies that don't work, or not\n#   install all needed dependencies.\n#Pipfile.lock\n\n# PEP 582; used by e.g. github.com/David-OConnor/pyflow\n__pypackages__/\n\n# Celery stuff\ncelerybeat-schedule\ncelerybeat.pid\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mkdocs documentation\n/site\n\n# mypy\n.mypy_cache/\n.dmypy.json\ndmypy.json\n\n# Pyre type checker\n.pyre/\n"
  },
  {
    "path": ".golangci.yml",
    "content": "# This code is licensed under the terms of the MIT license.\n\n## Config for golangci-lint v1.49.0 based on https://gist.github.com/maratori/47a4d00457a92aa426dbd48a18776322\n\nrun:\n  # Timeout for analysis, e.g. 30s, 5m.\n  # Default: 1m\n  timeout: 3m\n\n\n# This file contains only configs which differ from defaults.\n# All possible options can be found here https://github.com/golangci/golangci-lint/blob/master/.golangci.reference.yml\nlinters-settings:\n  cyclop:\n    # The maximal code complexity to report.\n    # Default: 10\n    max-complexity: 30\n\n  errcheck:\n    # Report about not checking of errors in type assertions: `a := b.(MyStruct)`.\n    # Such cases aren't reported by default.\n    # Default: false\n    check-type-assertions: true\n\n  gocognit:\n    # Minimal code complexity to report\n    # Default: 30\n    min-complexity: 45\n\n  # gomodguard:\n  #   blocked:\n  #     # List of blocked modules.\n  #     # Default: []\n  #     modules:\n  #       - github.com/golang/protobuf:\n  #           recommendations:\n  #             - google.golang.org/protobuf\n  #           reason: \"see https://developers.google.com/protocol-buffers/docs/reference/go/faq#modules\"\n\n  nakedret:\n    # Make an issue if func has more lines of code than this setting, and it has naked returns.\n    # Default: 30\n    max-func-lines: 0\n\n  nolintlint:\n    # Exclude following linters from requiring an explanation.\n    # Default: []\n    allow-no-explanation: [ gocognit ]\n    # Enable to require an explanation of nonzero length after each nolint directive.\n    # Default: false\n    require-explanation: true\n    # Enable to require nolint directives to mention the specific linter being suppressed.\n    # Default: false\n    require-specific: true\n\n  rowserrcheck:\n    # database/sql is always checked\n    # Default: []\n    packages:\n      - github.com/jmoiron/sqlx\n\n  tenv:\n    # The option `all` will run against whole test files (`_test.go`) regardless of method/function signatures.\n    # Otherwise, only methods that take `*testing.T`, `*testing.B`, and `testing.TB` as arguments are checked.\n    # Default: false\n    all: true\n\n\nlinters:\n  disable-all: true\n  enable:\n    ## enabled by default\n    - errcheck # checking for unchecked errors, these unchecked errors can be critical bugs in some cases\n    - gosimple # specializes in simplifying a code\n    - govet # reports suspicious constructs, such as Printf calls whose arguments do not align with the format string\n    - ineffassign # detects when assignments to existing variables are not used\n    - staticcheck # is a go vet on steroids, applying a ton of static analysis checks\n    - typecheck # like the front-end of a Go compiler, parses and type-checks Go code\n    - unused # checks for unused constants, variables, functions and types\n\n    ## disabled by default\n    - bidichk # checks for dangerous unicode character sequences\n    - bodyclose # checks whether HTTP response body is closed successfully\n    - cyclop # checks function and package cyclomatic complexity\n    - durationcheck # checks for two durations multiplied together\n    - errname # checks that sentinel errors are prefixed with the Err and error types are suffixed with the Error\n    - execinquery # checks query string in Query function which reads your Go src files and warning it finds\n    - exhaustive # checks exhaustiveness of enum switch statements\n    - gocognit # computes and checks the cognitive complexity of functions\n    - gocyclo # computes and checks the cyclomatic complexity of functions\n    - gomoddirectives # manages the use of 'replace', 'retract', and 'excludes' directives in go.mod\n    - gomodguard # allow and block lists linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations\n    - goprintffuncname # checks that printf-like functions are named with f at the end\n    - nakedret # finds naked returns in functions greater than a specified function length\n    - nestif # reports deeply nested if statements\n    - nilerr # finds the code that returns nil even if it checks that the error is not nil\n    - noctx # finds sending http request without context.Context\n    - nolintlint # reports ill-formed or insufficient nolint directives\n    - nosprintfhostport # checks for misuse of Sprintf to construct a host with port in a URL\n    - predeclared # finds code that shadows one of Go's predeclared identifiers\n    - reassign # checks that package variables are not reassigned\n    - rowserrcheck # checks whether Err of rows is checked successfully\n    - sqlclosecheck # checks that sql.Rows and sql.Stmt are closed\n    - tenv # detects using os.Setenv instead of t.Setenv since Go1.17\n    - tparallel # detects inappropriate usage of t.Parallel() method in your Go test codes\n    - unconvert # removes unnecessary type conversions\n    - unparam # reports unused function parameters\n    - usestdlibvars # detects the possibility to use variables/constants from the Go standard library\n    - wastedassign # finds wasted assignment statements\n    - whitespace # detects leading and trailing whitespace\n    - misspell # finds commonly misspelled English words in comments\n\nissues:\n  # Maximum count of issues with the same text.\n  # Set to 0 to disable.\n  # Default: 3\n  max-same-issues: 50\n  \n  exclude:\n    - 'declaration of \"(err|ctx)\" shadows declaration at'\n\n  exclude-rules:\n    - source: \"^//\\\\s*go:generate\\\\s\"\n      linters: [ lll ]\n    - source: \"(noinspection|TODO)\"\n      linters: [ godot ]\n    - source: \"//noinspection\"\n      linters: [ gocritic ]\n    - source: \"^\\\\s+if _, ok := err\\\\.\\\\([^.]+\\\\.InternalError\\\\); ok {\"\n      linters: [ errorlint ]\n    - path: \"_test\\\\.go\"\n      linters:\n        - bodyclose\n        - dupl\n        - funlen\n        - goconst\n        - gosec\n        - noctx\n        - wrapcheck\n"
  },
  {
    "path": ".goreleaser.yaml",
    "content": "version: 2\n\nbefore:\n  hooks:\n    - go mod tidy\n\nbuilds:\n  - env:\n      - CGO_ENABLED=0\n    goos:\n      - linux\n      - darwin\n    goarch:\n      - amd64\n      - arm64\n\n    main: ./cmd/substation\n    binary: substation\n\nrelease:\n  mode: keep-existing  # required for compatibility with release-please\n\narchives:\n  - format: tar.gz\n    # this name template makes the OS and Arch compatible with the results of `uname`.\n    name_template: >-\n      {{ .ProjectName }}_\n      {{- .Os }}_\n      {{- if eq .Arch \"amd64\" }}x86_64\n      {{- else if eq .Arch \"386\" }}i386\n      {{- else }}{{ .Arch }}{{ end }}\n      {{- if .Arm }}v{{ .Arm }}{{ end }}\n\nchangelog:\n  sort: asc\n  filters:\n    exclude:\n      - \"^docs:\"\n      - \"^test:\"\n"
  },
  {
    "path": ".vscode/settings.json",
    "content": "{\n  \"terminal.integrated.defaultProfile.linux\": \"bash\",\n  \"go.useLanguageServer\": true,\n  \"gopls\": {\n    \"formatting.gofumpt\": true,\n  },\n  \"go.formatTool\": \"gofumpt\",\n  \"go.inferGopath\": false,\n  \"go.lintOnSave\": \"workspace\",\n  \"go.lintTool\": \"golangci-lint\",\n  \"cSpell.enabled\": false\n}\n"
  },
  {
    "path": "ADOPTERS.md",
    "content": "# Adopters\n\nIf you're using Substation in your organization, please try to add your company name to this list. By [adding your name to this list](https://github.com/brexhq/substation/issues/new?assignees=&labels=&projects=&template=adopters.yaml&title=%5Borganization%5D+has+adopted+Substation%21), you help raise awareness for the project and grow our community of users!\n\n| Organization | Contact | Description of Use |\n|--------------|---------|--------------------|\n| [Brex](https://www.brex.com) | [@jshlbrd](https://github.com/jshlbrd) | All security event and audit logs (~5 TB/day) used by the security org are handled by Substation. |\n| [Verkada](https://www.verkada.com) | [@chencaoverkada](https://github.com/chencaoverkada) | Substation enriches and normalizes **all** of Verkada's logging pipelines, and filters logs before they are ingested into an in-house SIEM. |\n"
  },
  {
    "path": "CODE_OF_CONDUCT.md",
    "content": "# Contributor Covenant Code of Conduct\n\n## Our Pledge\n\nWe as members, contributors, and leaders pledge to make participation in our\ncommunity a harassment-free experience for everyone, regardless of age, body\nsize, visible or invisible disability, ethnicity, sex characteristics, gender\nidentity and expression, level of experience, education, socio-economic status, \nnationality, personal appearance, race, caste, color, religion, or sexual\nidentity and orientation.\n\nWe pledge to act and interact in ways that contribute to an open, welcoming, \ndiverse, inclusive, and healthy community.\n\n## Our Standards\n\nExamples of behavior that contributes to a positive environment for our\ncommunity include:\n\n* Demonstrating empathy and kindness toward other people\n* Being respectful of differing opinions, viewpoints, and experiences\n* Giving and gracefully accepting constructive feedback\n* Accepting responsibility and apologizing to those affected by our mistakes, \n  and learning from the experience\n* Focusing on what is best not just for us as individuals, but for the overall\n  community\n\nExamples of unacceptable behavior include:\n\n* The use of sexualized language or imagery, and sexual attention or advances of\n  any kind\n* Trolling, insulting or derogatory comments, and personal or political attacks\n* Public or private harassment\n* Publishing others' private information, such as a physical or email address, \n  without their explicit permission\n* Other conduct which could reasonably be considered inappropriate in a\n  professional setting\n\n## Enforcement Responsibilities\n\nCommunity leaders are responsible for clarifying and enforcing our standards of\nacceptable behavior and will take appropriate and fair corrective action in\nresponse to any behavior that they deem inappropriate, threatening, offensive, \nor harmful.\n\nCommunity leaders have the right and responsibility to remove, edit, or reject\ncomments, commits, code, wiki edits, issues, and other contributions that are\nnot aligned to this Code of Conduct, and will communicate reasons for moderation\ndecisions when appropriate.\n\n## Scope\n\nThis Code of Conduct applies within all community spaces, and also applies when\nan individual is officially representing the community in public spaces.\nExamples of representing our community include using an official e-mail address, \nposting via an official social media account, or acting as an appointed\nrepresentative at an online or offline event.\n\n## Enforcement\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be\nreported to the community leaders responsible for enforcement at\n[INSERT CONTACT METHOD].\nAll complaints will be reviewed and investigated promptly and fairly.\n\nAll community leaders are obligated to respect the privacy and security of the\nreporter of any incident.\n\n## Enforcement Guidelines\n\nCommunity leaders will follow these Community Impact Guidelines in determining\nthe consequences for any action they deem in violation of this Code of Conduct:\n\n### 1. Correction\n\n**Community Impact**: Use of inappropriate language or other behavior deemed\nunprofessional or unwelcome in the community.\n\n**Consequence**: A private, written warning from community leaders, providing\nclarity around the nature of the violation and an explanation of why the\nbehavior was inappropriate. A public apology may be requested.\n\n### 2. Warning\n\n**Community Impact**: A violation through a single incident or series of\nactions.\n\n**Consequence**: A warning with consequences for continued behavior. No\ninteraction with the people involved, including unsolicited interaction with\nthose enforcing the Code of Conduct, for a specified period of time. This\nincludes avoiding interactions in community spaces as well as external channels\nlike social media. Violating these terms may lead to a temporary or permanent\nban.\n\n### 3. Temporary Ban\n\n**Community Impact**: A serious violation of community standards, including\nsustained inappropriate behavior.\n\n**Consequence**: A temporary ban from any sort of interaction or public\ncommunication with the community for a specified period of time. No public or\nprivate interaction with the people involved, including unsolicited interaction\nwith those enforcing the Code of Conduct, is allowed during this period.\nViolating these terms may lead to a permanent ban.\n\n### 4. Permanent Ban\n\n**Community Impact**: Demonstrating a pattern of violation of community\nstandards, including sustained inappropriate behavior, harassment of an\nindividual, or aggression toward or disparagement of classes of individuals.\n\n**Consequence**: A permanent ban from any sort of public interaction within the\ncommunity.\n\n## Attribution\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage], \nversion 2.1, available at\n[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].\n\nCommunity Impact Guidelines were inspired by\n[Mozilla's code of conduct enforcement ladder][Mozilla CoC].\n\nFor answers to common questions about this code of conduct, see the FAQ at\n[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at\n[https://www.contributor-covenant.org/translations][translations].\n\n[homepage]: https://www.contributor-covenant.org\n[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html\n[Mozilla CoC]: https://github.com/mozilla/diversity\n[FAQ]: https://www.contributor-covenant.org/faq\n[translations]: https://www.contributor-covenant.org/translations\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing to Substation\n\nThank you so much for your interest in contributing to Substation! This document contains guidelines to follow when contributing to the project.\n\n## Table Of Contents\n\n[Code of Conduct](#code-of-conduct)\n\n[Submissions](#submissions)\n  + [Changes](#submitting-changes)\n  + [Bugs](#submitting-bugs)\n  + [Enhancements](#submitting-enhancements)\n\n[Development](#development)\n  + [Development Environment](#development-environment)\n  + [Conditions](#conditions)\n  + [Transforms](#transforms)\n  + [Testing](#testing)\n    + [Config Unit Tests](#config-unit-tests)\n\n[Style Guides](#style-guides)\n  + [Design Patterns](#design-patterns)\n  + [Naming Conventions](#naming-conventions)\n  + [Go](#go-style-guide)\n  + [Python](#python-style-guide)\n\n## Code of Conduct\n\nThe Code of Conduct can be reviewed [here](CODE_OF_CONDUCT.md).\n\n## Submissions\n\n### Submitting Changes\n\nPull requests should be submitted using the pull request template. Changes will be validated through automation and by the project maintainers before merging to main.\n\n### Submitting Bugs\n\nBugs should be submitted as issues using the issue template.\n\n### Submitting Enhancements\n\nEnhancements should be submitted as issues using the issue template.\n\n## Development\n\n### Development Environment\n\nThe project supports development through the use of [Visual Studio Code configurations](https://code.visualstudio.com/docs/remote/containers). The VS Code [development container](.devcontainer/Dockerfile) contains all packages required to develop and test changes locally before submitting pull requests.\n\n### [Conditions](condition/)\n\nEach condition should be functional and solve a single problem, and each one is nested under a \"family\" of conditions. (We may ask that you split complex condition logic into multiple conditions.) For example, there is a family for string comparisons:\n   - Equal To (`cnd.string.equal_to`, `cnd.str.eq`)\n   - Starts With (`cnd.string.starts_with`, `cnd.str.prefix`)\n   - Ends With (`cnd.string.ends_with`, `cnd.str.suffix`)\n   - Contains (`cnd.string.contains`, `cnd.str.has`)\n   - Match (regular expression) (`cnd.string.match`)\n   - Greater Than (`cnd.string.greater_than`, `cnd.str.gt`)\n   - Less Than (`cnd.string.less_than`, `cnd.str.lt`)\n\nConditions may require changes to the [configuration library](substation.libsonnet) (usually when adding features or making breaking changes). For new conditions, we typically ask that you add a new [example](examples/) that uses a config unit test.\n\nConditions may reuse these field structures:\n   - `object`: For reading from JSON objects.\n\nIn some cases, we may ask you to rename fields for consistency.\n\n### [Transforms](transform/)\n\nEach transform should be functional and solve a single problem, and each one is nested under a \"family\" of transforms. (We may ask that you split complex transform logic into multiple transforms.) For example, there is a family for JSON object operations:\n   - Copy (`tf.object.copy`, `tf.obj.cp`)\n   - Delete (`tf.object.delete`, `tf.obj.del`)\n   - Insert (`tf.object.insert`)\n   - To Boolean (`tf.object.to.boolean`, `tf.obj.to.bool`)\n   - To String (`tf.object.to.string`, `tf.obj.to.str`)\n   - To Float (`tf.object.to.float`)\n   - To Integer (`tf.object.to.integer`, `tf.obj.to.int`)\n   - To Unsigned Integer (`tf.object.to.unsigned_integer`, `tf.obj.to.uint`)\n\nTransforms may require changes to the [configuration library](substation.libsonnet) (usually when adding features or making breaking changes). For new transforms, we typically ask that you add a new [example](examples/) that uses a config unit test.\n\nTransforms may reuse these field structures:\n   - `id`: For uniquely identifying a transform. (If not configured, then this is automatically generated when a configuration is compiled by Jsonnet.)\n   - `object`: For reading from and writing to JSON objects.\n   - `batch`: For stateful collection of multiple messages in a transform.\n   - `transforms`: For chaining multiple transforms together. (Used in `meta` transforms.)\n   - `aux_transforms`: For chaining multiple transforms together, _after_ the primary transform has executed. (Used in `send` transforms.)\n\nIn some cases, we may ask you to rename fields for consistency.\n\n### Testing\n\nWe rely on contributors to test changes before they are submitted as pull requests. Any components added or changed should be tested and public packages should be supported by unit tests.\n\n#### Config Unit Tests\n\nConfiguration examples should use config unit tests to demo new concepts or features, like this:\n\n```jsonnet\n{\n  tests: [\n    {\n      // Every test should have a unique name.\n      name: 'my-passing-test',\n      // Generates the test message '{\"a\": true}' which\n      // is run through the configured transforms and\n      // then checked against the condition.\n      transforms: [\n        sub.tf.test.message({ value: {a: true} }),\n      ],\n      // Checks if key 'x' == 'true'.\n      condition: sub.cnd.all([\n        sub.cnd.str.eq({ object: {source_key: 'x'}, value: 'true' }),\n      ])\n    },\n  ],\n  // These transforms process the test message and the result\n  // is checked against the condition.\n  transforms: [\n    // Copies the value of key 'a' to key 'x'.\n    sub.tf.obj.cp({ object: { source_key: 'a', target_key: 'x' } }),\n  ],\n}\n```\n\n## Style Guides\n\n### Design Patterns\n\n#### Environment Variables\n\nApplications may implement runtime settings that are managed by environment variables. For example, the [AWS Lambda application](/cmd/aws/lambda/substation/) uses `SUBSTATION_LAMBDA_HANDLER` to manage [invocation settings](https://docs.aws.amazon.com/lambda/latest/dg/lambda-invocation.html). These should reference the application by name, if possible.\n\n#### Configurations\n\nSubstation uses a single configuration pattern for all components in the system (see `Config` in [config/config.go](/config/config.go)). This pattern is highly reusable and should be embedded to create custom configurations. Below is an example that shows how configurations should be designed:\n\n```json\n   \"foo\": {\n\t  \"settings\": { ... },\n\t  \"type\": \"fooer\"\n   },\n   \"bar\": {\n      \"settings\": {\n         \"baz\": [\n            {\n               \"settings\": { ... },\n               \"type\": \"bazar\"\n            },\n         ]\n      },\n      \"type\": \"barre\"\n   }\n```\n\nRepeating this pattern allows components and applications to integrate with Substation's factory patterns.\n\n#### Factories\n\nSubstation relies on [factory methods](https://refactoring.guru/design-patterns/factory-method) to create objects that [satisfy interfaces](https://go.dev/doc/effective_go#interface_methods) across the project. Factories should be combined with the configuration design pattern to create new components.\n\nFactories are the preferred method for allowing users to customize the system. Example factories can be seen in [condition](/condition/condition.go) and [transform](/transform/transform.go).\n\n#### Reading and Writing Streaming Data\n\nWe prefer to use the io package for reading (e.g., io.Reader) and writing (e.g., io.Writer) streams of data. This reduces memory usage and decreases the likelihood that we will need to refactor methods and functions that handle streaming data.\n\nSubstation commonly uses these io compatible containers:\n\n- open files are created by calling `os.CreateTemp(\"\", \"substation\")`\n\n- bytes buffers are created by calling `new(bytes.Buffer)`\n\n### Naming Conventions\n\n#### Breaking Changes\n\nAny change that modifies the public API of Go packages and applications is a breaking change, and any source code that has non-obvious impact on the public API should be tagged with `BREAKING CHANGE` in a comment.\n\n#### Errors\n\nErrors should always start with `err` (or `Err`, if they are public). Commonly used errors are defined in [internal/errors.go](internal/errors.go).\n\nIf the error is related to a specific component, then the component name should be included in the error. For example, if the error is related to the `Foo` component, then the error should be named `errFooShortDescription`.\n\n#### Environment Variables\n\nEnvironment variable keys and values specific to the Substation application should always use SCREAMING_SNAKE_CASE. If the key or value refers to a cloud service provider, then it should always directly refer to that provider (for example, AWS_API_GATEWAY).\n\nAny environment variable that changes a default runtime setting should always start with SUBSTATION (for example, SUBSTATION_CONCURRENCY).\n\n#### Application Variables\n\nVariable names should always follow conventions from [Effective Go](https://go.dev/doc/effective_go#names), the [Go Code Review Comments](https://github.com/golang/go/wiki/CodeReviewComments#variable-names) and avoid [predeclared identifiers](https://go.dev/ref/spec#Predeclared_identifiers).\n\n#### Source Metadata\n\nSources that [add metadata during message creation](/message/) should use lowerCamelCase for their JSON keys.\n\n#### Package Configurations\n\nConfigurations for packages (for example, conditions and transforms) should always use lower_snake_case in their JSON keys. This helps maintain readability when reviewing large configuration files.\n\nWe strongly urge everyone to use Jsonnet for managing configurations.\n\n### Go Style Guide\n\nGo code should follow [Effective Go](https://go.dev/doc/effective_go) as a baseline.\n\n### Python Style Guide\n\nPython code should follow [Google's Python Style Guide](https://google.github.io/styleguide/pyguide.html) as a baseline.\n"
  },
  {
    "path": "MIGRATION.md",
    "content": "# Migration\n\nUse this as a guide for migrating between major versions of Substation.\n\n## v2.0.0\n\n### Applications (cmd/)\n\n#### AWS Lambda Handlers\n\nMultiple AWS Lambda handlers were renamed to better reflect the AWS service they interact with:\n- Renamed `AWS_KINESIS_DATA_FIREHOSE` to `AWS_DATA_FIREHOSE`.\n- Renamed `AWS_KINESIS` to `AWS_KINESIS_DATA_STREAM`.\n- Renamed `AWS_DYNAMODB` to `AWS_DYNAMODB_STREAM`.\n\nv1.x.x:\n\n```hcl\nmodule \"node\" {\n  source    = \"build/terraform/aws/lambda\"\n\n  config = {\n    name        = \"node\"\n    description = \"Substation node that is invoked by a Kinesis Data Stream.\"\n    image_uri   = \"123456789012.dkr.ecr.us-east-1.amazonaws.com/substation:v1.0.0\"\n    image_arm   = true\n\n    env = {\n      \"SUBSTATION_CONFIG\" : \"http://localhost:2772/applications/substation/environments/example/configurations/node\"\n      \"SUBSTATION_LAMBDA_HANDLER\" : \"AWS_KINESIS\"\n    }\n  }\n}\n```\n\nv2.x.x:\n\n```hcl\nmodule \"node\" {\n  source    = \"build/terraform/aws/lambda\"\n\n  config = {\n    name        = \"node\"\n    description = \"Substation node that is invoked by a Kinesis Data Stream.\"\n    image_uri   = \"123456789012.dkr.ecr.us-east-1.amazonaws.com/substation:v2.0.0\"\n    image_arm   = true\n\n    env = {\n      \"SUBSTATION_CONFIG\" : \"http://localhost:2772/applications/substation/environments/example/configurations/node\"\n      \"SUBSTATION_LAMBDA_HANDLER\" : \"AWS_KINESIS_DATA_STREAM\"\n    }\n  }\n}\n```\n\n### Conditions (condition/)\n\n#### Conditioner Interface\n\nThe `Inspector` interface was renamed to `Conditioner` to standardize the naming convention used across the project.\n\n#### `meta.condition` Condition\n\nThis is replaced by the `meta.all`, `meta.any`, and `meta.none` conditions.\n\nv1.x.x:\n\n```jsonnet\nsub.cnd.all([\n  sub.cnd.str.eq({ value: 'FOO' }),\n  sub.cnd.meta.condition({ condition: sub.cnd.any([\n    sub.cnd.str.eq({ value: 'BAR' }),\n    sub.cnd.str.eq({ value: 'BAZ' }),\n  ]) }),\n]),\n```\n\nv2.x.x:\n\n```jsonnet\nsub.cnd.all([\n  sub.cnd.str.eq({ value: 'FOO' }),\n  sub.cnd.any([\n    sub.cnd.str.eq({ value: 'BAR' }),\n    sub.cnd.str.eq({ value: 'BAZ' }),\n  ]),\n]),\n```\n\n#### `meta.for_each` Condition\n\nThis is replaced by the `meta.all`, `meta.any`, and `meta.none` conditions. If the `object.source_key` value is an array, then the data is treated as a list of elements.\n\nv1.x.x:\n\n```jsonnet\nsub.cnd.meta.for_each({\n  object: { source_key: 'field' },\n  type: 'any',\n  inspector: sub.cnd.str.eq({ value: 'FOO' }),\n})\n```\n\nv2.x.x:\n\n```jsonnet\nsub.cnd.meta.any({\n  object: { source_key: 'field' },\n  conditions: [ sub.cnd.str.eq({ value: 'FOO' }) ],\n})\n```\n\n#### `meta.negate` Condition\n\nThis is replaced by the `meta.none` Condition.\n\nv1.x.x:\n\n```jsonnet\nsub.cnd.meta.negate({ inspector: sub.cnd.str.eq({ value: 'FOO' }) })\n```\n\nv2.x.x:\n\n```jsonnet\nsub.cnd.meta.none({ conditions: [ sub.cnd.str.eq({ value: 'FOO' }) ] })\n```\n\n\n```jsonnet\nsub.cnd.none([ sub.cnd.str.eq({ value: 'FOO' }) ])\n```\n\n#### `meta.err` Condition\n\nThis is removed and was not replaced. Remove any references to this inspector.\n\n### Transforms (transforms)\n\n#### `send.aws.*` Transforms\n\nThe AWS resource fields were replaced by an `aws` object field that contains the sub-fields `arn` and `assume_role_arn`. The region for each AWS client is derived from either the resource ARN or assumed role ARN.\n\nv1.x.x:\n\n```jsonnet\nsub.tf.send.aws.s3({\n  bucket_name: 'substation',\n  file_path: { time_format: '2006/01/02/15', uuid: true, suffix: '.json' },\n}),\n```\n\nv2.x.x:\n\n```jsonnet\nsub.tf.send.aws.s3({\n  aws: { arn: 'arn:aws:s3:::substation' },\n  file_path: { time_format: '2006/01/02/15', uuid: true, suffix: '.json' },\n}),\n```\n\n**NOTE: This change also applies to every configuration that relies on an AWS resource.**\n\n#### `meta.*` Transforms\n\nThe `transform` field is removed from all transforms and was replaced with the `transforms` field.\n\nv1.x.x:\n\n```jsonnet\nsub.tf.meta.switch({ cases: [\n  {\n    condition: sub.cnd.all([\n      sub.cnd.str.eq({ obj: { source_key: 'field' }, value: 'FOO' }),\n    ]),\n    transform: sub.tf.obj.insert({ object: { target_key: 'field' }, value: 'BAR' }),\n  },\n]})\n```\n\nv2.x.x:\n\n```jsonnet\nsub.tf.meta.switch({ cases: [\n  {\n    condition: sub.cnd.str.eq({ obj: { source_key: 'field' }, value: 'FOO' }),\n    transforms: [\n      sub.tf.obj.insert({ object: { target_key: 'field' }, value: 'BAR' })\n    ],\n  },\n]})\n```\n\n#### `meta.retry` Transform\n\nRetry settings were removed from all transforms and replaced by the `meta.retry` transform. It is recommended to create a reusable pattern for common retry scenarios.\n\nv1.x.x:\n\n```jsonnet\nsub.tf.send.aws.sqs({\n  arn: 'arn:aws:sqs:us-east-1:123456789012:substation',\n  retry: { count: 3 },\n})\n```\n\nv2.x.x:\n\n```jsonnet\nsub.tf.meta.retry({\n  retry: { count: 3, delay: '1s' },\n  transforms: [\n    sub.tf.send.aws.sqs({\n      aws: { arn: 'arn:aws:sqs:us-east-1:123456789012:substation' },\n    }),\n  ],\n})\n```\n\n**NOTE: For AWS services, retries for the client can be configured in Terraform by using the AWS_MAX_ATTEMPTS environment variable. This is used _in addition_ the `meta.retry` transform.**\n\n#### `meta.pipeline` Transform\n\nThis is removed and was not replaced. Remove any references to this transform and replace it with the `transforms` field used in other meta transforms.\n\n#### `send.aws.dynamodb` Transform\n\nThe `send.aws.dynamodb` transform was renamed to `send.aws.dynamodb.put`.\n\nv1.x.x:\n\n```jsonnet\nsub.tf.send.aws.dynamodb({\n  table_name: 'substation',\n}),\n```\n\nv2.x.x:\n\n```jsonnet\nsub.tf.send.aws.dynamodb.put({\n  aws: { arn: 'arn:aws:dynamodb:us-east-1:123456789012:table/substation' },\n}),\n```\n\n#### `enrich.aws.dynamodb` Transform\n\nThe `enrich.aws.dynamodb` transform was renamed to `enrich.aws.dynamodb.query`, and had these additional changes:\n- `PartitionKey` and `SortKey` now reference the column names in the DynamoDB table and are nested under the `Attributes` field.\n- By default, the value retrieved from `Object.SourceKey` is used as the `PartitionKey` value. If the `SortKey` is provided and the value from `Object.SourceKey` is an array, then the first element is used as the `PartitionKey` value and the second element is used as the `SortKey` value.\n- The `KeyConditionExpression` field was removed because this is now a derived value.\n\nv1.x.x:\n\n```jsonnet\n// In v1.x.x, the DynamoDB column names must always be 'PK' and/or 'SK'.\nsub.tf.obj.cp({ object: { src: 'id', trg: 'meta ddb.PK' } }),\nsub.transform.enrich.aws.dynamodb({\n  object: { source_key: 'meta ddb', target_key: 'user' },\n  table_name: 'substation',\n  partition_key: 'PK',\n  key_condition_expression: 'PK = :PK',\n}),\n```\n\nv2.x.x:\n\n```jsonnet\nsub.transform.enrich.aws.dynamodb.query({\n  object: { source_key: 'id', target_key: 'user' },\n  aws: { arn: 'arn:aws:dynamodb:us-east-1:123456789012:table/substation' },\n  attributes: {\n    partition_key: 'PK',\n  },\n}),\n```\n\n#### `send.aws.kinesis_data_firehose` Transform\n\nThe `send.aws.kinesis_data_firehose` transform was renamed to `send.aws.data_firehose`.\n\nv1.x.x:\n\n```jsonnet\nsub.tf.send.aws.kinesis_data_firehose({\n  stream_name: 'substation',\n}),\n```\n\nv2.x.x:\n\n```jsonnet\nsub.tf.send.aws.data_firehose({\n  aws: { arn: 'arn:aws:kinesis:us-east-1:123456789012:stream/substation' },\n}),\n```\n"
  },
  {
    "path": "README.md",
    "content": "# Substation\n\n![Substation Banner](.github/media/substation_banner.png)\n\n<p align=\"center\"><b>Substation is a toolkit for routing, normalizing, and enriching security event and audit logs.</b></p>\n\n<div align=\"center\">\n\n[Releases][releases]&nbsp;&nbsp;&nbsp;|&nbsp;&nbsp;&nbsp;[Documentation][docs]&nbsp;&nbsp;&nbsp;|&nbsp;&nbsp;&nbsp;[Adopters][adopters]&nbsp;&nbsp;&nbsp;|&nbsp;&nbsp;&nbsp;[Announcement (2022)][announcement]&nbsp;&nbsp;&nbsp;|&nbsp;&nbsp;&nbsp;[v1.0 Release (2024)][v1_release] \n\n[![OSSF-Scorecard Score](https://img.shields.io/ossf-scorecard/github.com/brexhq/substation?style=for-the-badge)](https://scorecard.dev/viewer/?uri=github.com/brexhq/substation)\n![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/brexhq/substation/code.yml?style=for-the-badge)\n[![GitHub Release](https://img.shields.io/github/v/release/brexhq/substation?sort=semver&style=for-the-badge&link=https%3A%2F%2Fgithub.com%2Fbrexhq%2Fsubstation%2Freleases%2Flatest)](https://github.com/brexhq/substation/releases)\n![GitHub Created At](https://img.shields.io/github/created-at/brexhq/substation?style=for-the-badge&label=created)\n[![GitHub License](https://img.shields.io/github/license/brexhq/substation?style=for-the-badge)](https://github.com/brexhq/substation/blob/main/LICENSE)\n\n</div>\n\n## Quickstart\n\nWant to see a demo before diving into the documentation? Run this command:\n\n```sh\nexport PATH=$PATH:$(go env GOPATH)/bin && \\\ngo install github.com/brexhq/substation/v2/cmd/substation@latest && \\\nsubstation demo\n```\n\n## At a Glance\n\nSubstation is inspired by data pipeline systems like Logstash and Fluentd, but is built for modern security teams:\n\n- **Extensible Data Processing**: Build data processing pipeline systems and microservices using out-of-the-box applications and 100+ data transformation functions, or create your own written in Go.\n- **Route Data Across the Cloud**: Conditionally route data to, from, and between AWS cloud services, including S3, Kinesis, SQS, and Lambda, or to any HTTP endpoint.\n- **Bring Your Own Schema**: Format, normalize, and enrich event logs to comply with the Elastic Common Schema (ECS), Open Cybersecurity Schema Framework (OCSF), or any other schema.\n- **Unlimited Data Enrichment**: Use external APIs to enrich event logs affordably and at scale with enterprise and threat intelligence, or build a microservice that reduces spend in expensive security APIs.\n- **No Servers, No Maintenance**: Deploys as a serverless application in your AWS account, launches in minutes using Terraform, and requires no maintenance after deployment.\n- **Runs Almost Anywhere**: Create applications that run on most platforms supported by Go and transform data consistently across laptops, servers, containers, and serverless functions.\n- **High Performance, Low Cost**: Transform 100,000+ events per second while keeping cloud costs as low as a few cents per GB. Vendor solutions, like [Cribl](https://cribl.io/cribl-pricing/) and [Datadog](https://www.datadoghq.com/pricing/?product=observability-pipelines#products), can cost up to 10x more.\n\nAll of these data pipeline and microservice systems, and many more, can be built with Substation:\n\n![Example Substation architectures](.github/media/substation_architecture.png)\n\n## Transforming Event Logs\n\nSubstation excels at formatting, normalizing, and enriching event logs. For example, Zeek connection logs can be transformed to comply with the Elastic Common Schema:\n\n<table>\n<tr>\n<th><code>Original Event</code></th>\n<th><code>Transformed Event</code></th>\n</tr>\n<tr>\n<td>\n\n```json\n{\n  \"ts\": 1591367999.430166,\n  \"uid\": \"C5bLoe2Mvxqhawzqqd\",\n  \"id.orig_h\": \"192.168.4.76\",\n  \"id.orig_p\": 46378,\n  \"id.resp_h\": \"31.3.245.133\",\n  \"id.resp_p\": 80,\n  \"proto\": \"tcp\",\n  \"service\": \"http\",\n  \"duration\": 0.25411510467529297,\n  \"orig_bytes\": 77,\n  \"resp_bytes\": 295,\n  \"conn_state\": \"SF\",\n  \"missed_bytes\": 0,\n  \"history\": \"ShADadFf\",\n  \"orig_pkts\": 6,\n  \"orig_ip_bytes\": 397,\n  \"resp_pkts\": 4,\n  \"resp_ip_bytes\": 511\n}\n```\n</td>\n<td>\n\n```json\n{\n  \"event\": {\n    \"original\": {\n      \"ts\": 1591367999.430166,\n      \"uid\": \"C5bLoe2Mvxqhawzqqd\",\n      \"id.orig_h\": \"192.168.4.76\",\n      \"id.orig_p\": 46378,\n      \"id.resp_h\": \"31.3.245.133\",\n      \"id.resp_p\": 80,\n      \"proto\": \"tcp\",\n      \"service\": \"http\",\n      \"duration\": 0.25411510467529297,\n      \"orig_bytes\": 77,\n      \"resp_bytes\": 295,\n      \"conn_state\": \"SF\",\n      \"missed_bytes\": 0,\n      \"history\": \"ShADadFf\",\n      \"orig_pkts\": 6,\n      \"orig_ip_bytes\": 397,\n      \"resp_pkts\": 4,\n      \"resp_ip_bytes\": 511\n    },\n    \"hash\": \"af70ea0b38e1fb529e230d3eca6badd54cd6a080d7fcb909cac4ee0191bb788f\",\n    \"created\": \"2022-12-30T17:20:41.027505Z\",\n    \"id\": \"C5bLoe2Mvxqhawzqqd\",\n    \"kind\": \"event\",\n    \"category\": [\n      \"network\"\n    ],\n    \"action\": \"network-connection\",\n    \"outcome\": \"success\",\n    \"duration\": 254115104.675293\n  },\n  \"@timestamp\": \"2020-06-05T14:39:59.430166Z\",\n  \"client\": {\n    \"address\": \"192.168.4.76\",\n    \"ip\": \"192.168.4.76\",\n    \"port\": 46378,\n    \"packets\": 6,\n    \"bytes\": 77\n  },\n  \"server\": {\n    \"address\": \"31.3.245.133\",\n    \"ip\": \"31.3.245.133\",\n    \"port\": 80,\n    \"packets\": 4,\n    \"bytes\": 295,\n    \"domain\": \"h31-3-245-133.host.redstation.co.uk\",\n    \"top_level_domain\": \"co.uk\",\n    \"subdomain\": \"h31-3-245-133.host\",\n    \"registered_domain\": \"redstation.co.uk\",\n    \"as\": {\n      \"number\": 20860,\n      \"organization\": {\n        \"name\": \"Iomart Cloud Services Limited\"\n      }\n    },\n    \"geo\": {\n      \"continent_name\": \"Europe\",\n      \"country_name\": \"United Kingdom\",\n      \"city_name\": \"Manchester\",\n      \"location\": {\n        \"latitude\": 53.5039,\n        \"longitude\": -2.1959\n      },\n      \"accuracy\": 1000\n    }\n  },\n  \"network\": {\n    \"protocol\": \"tcp\",\n    \"bytes\": 372,\n    \"packets\": 10,\n    \"direction\": \"outbound\"\n  }\n}\n```\n</td>\n</tr>\n</table>\n\n## Routing Data\n\nSubstation can route data to several destinations from a single process and, unlike most other data pipeline systems,\ndata transformation and routing are functionally equivalent -- this means that data can be transformed or routed in any order.\n\nIn this configuration, data is:\n\n- Written to AWS S3\n- Printed to stdout\n- Conditionally dropped (filtered, removed)\n- Sent to an HTTPS endpoint\n\n```jsonnet\n// The input is a JSON array of objects, such as:\n// [\n//   { \"field1\": \"a\", \"field2\": 1, \"field3\": true },\n//   { \"field1\": \"b\", \"field2\": 2, \"field3\": false },\n//   ...\n// ]\nlocal sub = import 'substation.libsonnet';\n\n// This filters events based on the value of field3.\nlocal is_false = sub.cnd.str.eq({ object: { source_key: 'field3' }, value: 'false' });\n\n{\n  transforms: [\n    // Pre-transformed data is written to an object in AWS S3 for long-term storage.\n    sub.tf.send.aws.s3({ aws: { arn: 'arn:aws:s3:::example-bucket-name' } }),\n    // The JSON array is split into individual events that go through\n    // the remaining transforms. Each event is printed to stdout.\n    sub.tf.agg.from.array(),\n    sub.tf.send.stdout(),\n    // Events where field3 is false are removed from the pipeline.\n    sub.pattern.tf.conditional(condition=is_false, transform=sub.tf.util.drop()),\n    // The remaining events are sent to an HTTPS endpoint.\n    sub.tf.send.http.post({ url: 'https://example-http-endpoint.com' }),\n  ],\n}\n```\n\nAlternatively, the data can be conditionally routed to different destinations:\n\n```jsonnet\nlocal sub = import 'substation.libsonnet';\n\n{\n  transforms: [\n    // If field3 is false, then the event is sent to an HTTPS endpoint; otherwise,\n    // the event is written to an object in AWS S3.\n    sub.tf.meta.switch({ cases: [\n      {\n        condition: sub.cnd.str.eq({ object: { source_key: 'field3' }, value: 'false' }),\n        transforms: [\n          sub.tf.send.http.post({ url: 'https://example-http-endpoint.com' }),\n        ],\n      },\n      {\n        transforms: [\n          sub.tf.send.aws.s3({ aws: { arn: 'arn:aws:s3:::example-bucket-name' } }),\n        ],\n      },\n    ] }),\n    // The event is always available to any remaining transforms.\n    sub.tf.send.stdout(),\n  ],\n}\n```\n\n## Configuring Applications\n\nSubstation applications run almost anywhere (laptops, servers, containers, serverless functions) and all transform functions behave identically regardless of where they are run. This makes it easy to develop configuration changes locally, validate them in a build (CI/CD) pipeline, and run integration tests in a staging environment before deploying to production.\n\nConfigurations are written in Jsonnet and can be expressed as functional code, simplifying version control and making it easy to build custom data processing libraries. For power users, configurations also have abbreviations that make them easier to write. Compare the configuration below to similar configurations for Logstash and Fluentd:\n\n<table>\n<tr>\n<th><code>Substation</code></th>\n<th><code>Logstash</code></th>\n<th><code>Fluentd</code></th>\n</tr>\n<tr>\n<td>\n\n```jsonnet\nlocal sub = import 'substation.libsonnet';\n\n{\n  transforms: [\n    sub.tf.obj.cp({ object: { source_key: 'src_field_1', target_key: 'dest_field_1' } }),\n    sub.tf.obj.cp({ obj: { src: 'src_field_2', trg: 'dest_field_2' } }),\n    sub.tf.send.stdout(),\n    sub.tf.send.http.post({ url: 'https://example-http-endpoint.com' }),\n  ],\n}\n```\n</td>\n<td>\n\n```ruby\ninput {\n  file {\n    path => \"/path/to/your/file.log\"\n    start_position => \"beginning\"\n    sincedb_path => \"/dev/null\"\n    codec => \"json\"\n  }\n}\n\nfilter {\n  json {\n    source => \"message\"\n  }\n\n  mutate {\n    copy => { \"src_field_1\" => \"dest_field_1\" }\n    copy => { \"src_field_2\" => \"dest_field_2\" }\n  }\n}\n\noutput {\n  stdout {\n    codec => rubydebug\n  }\n\n  http {\n    url => \"https://example-http-endpoint.com\"\n    http_method => \"post\"\n    format => \"json\"\n  }\n}\n```\n</td>\n<td>\n\n```xml\n<source>\n  @type tail\n  path /path/to/your/file.log\n  pos_file /dev/null\n  tag file.log\n  format json\n</source>\n\n<filter file.log>\n  @type record_transformer\n  enable_ruby\n  <record>\n    dest_field_1 ${record['src_field_1']}\n    dest_field_2 ${record['src_field_2']}\n  </record>\n</filter>\n\n<match file.log>\n  @type copy\n  <store>\n    @type stdout\n  </store>\n  <store>\n    @type http\n    url https://example-http-endpoint.com\n    http_method post\n    <format>\n      @type json\n    </format>\n  </store>\n</match>\n```\n</td>\n</tr>\n</table>\n\n## Deploying to AWS\n\nSubstation includes Terraform modules for securely deploying data pipelines and microservices in AWS. These modules are designed for ease of use, but are also flexible enough to support managing complex systems. This configuration deploys a data pipeline that is capable of receiving data from API Gateway and storing it in an S3 bucket:\n\n<table>\n<tr>\n<th><code>resources.tf</code></th>\n<th><code>node.tf</code></th>\n</tr>\n<tr>\n<td>\n\n```tcl\n# These resources are deployed once and are used by all Substation infrastructure.\n\n# Substation resources can be encrypted using a customer-managed KMS key.\nmodule \"kms\" {\n  source = \"build/terraform/aws/kms\"\n\n  config = {\n    name   = \"alias/substation\"\n  }\n}\n\n# Substation typically uses AppConfig to manage configuration files, but\n# configurations can also be loaded from an S3 URI or an HTTP endpoint.\nmodule \"appconfig\" {\n  source = \"build/terraform/aws/appconfig\"\n\n  config = {\n    name = \"substation\"\n    environments = [{ name = \"example\" }]\n  }\n}\n\nmodule \"ecr\" {\n  source = \"build/terraform/aws/ecr\"\n  kms    = module.kms\n\n  config = {\n    name         = \"substation\"\n    force_delete = true\n  }\n}\n\nresource \"random_uuid\" \"s3\" {}\n\nmodule \"s3\" {\n  source = \"build/terraform/aws/s3\"\n  kms    = module.kms\n\n  config = {\n    # Bucket name is randomized to avoid collisions.\n    name = \"${random_uuid.s3.result}-substation\"\n  }\n\n  # Access is granted by providing the role name of a\n  # resource. This access applies least privilege and\n  # grants access to dependent resources, such as KMS.\n  access = [\n    # Lambda functions create unique roles that are\n    # used to access resources.\n    module.node.role.name,\n  ]\n}\n```\n</td>\n<td>\n\n```tcl\n# Deploys an unauthenticated API Gateway that forwards data to the node.\nmodule \"node_gateway\" {\n  source = \"build/terraform/aws/api_gateway/lambda\"\n  lambda = module.node\n\n  config = {\n    name = \"node_gateway\"\n  }\n\n  depends_on = [\n    module.node\n  ]\n}\n\nmodule \"node\" {\n  source = \"build/terraform/aws/lambda\"\n  kms       = module.kms  # Optional\n  appconfig = module.appconfig  # Optional\n\n  config = {\n    name        = \"node\"\n    description = \"Substation node that writes data to S3.\"\n    image_uri   = \"${module.ecr.url}:latest\"\n    image_arm   = true\n\n    env = {\n      \"SUBSTATION_CONFIG\" : \"https://localhost:2772/applications/substation/environments/example/configurations/node\"\n      \"SUBSTATION_DEBUG\" : true\n      # This Substation node will ingest data from API Gateway. More nodes can be\n      # deployed to ingest data from other sources, such as Kinesis or SQS.\n      \"SUBSTATION_LAMBDA_HANDLER\" : \"AWS_API_GATEWAY\"\n    }\n  }\n\n  depends_on = [\n    module.appconfig.name,\n    module.ecr.url,\n  ]\n}\n```\n</td>\n</tr>\n</table>\n\n## Getting Started\n\nYou can run Substation on:\n\n- [Docker](https://substation.readme.io/docs/try-substation-on-docker)\n- [macOS / Linux](https://substation.readme.io/docs/try-substation-on-macos-linux)\n- [AWS](https://substation.readme.io/docs/try-substation-on-aws)\n\n### Testing\n\nUse the Substation CLI tool to run through [examples](examples/) and unit test configurations:\n\n```sh\nsubstation test -h\n```\n\nExamples can be tested by running this command from the root of the project. For example:\n\n```sh\n% substation test -R examples/transform/time/str_conversion \n{\"time\":\"2024-01-01T01:02:03.123Z\"}\n{\"time\":\"2024-01-01T01:02:03\"}\nok\texamples/transform/time/str_conversion/config.jsonnet\t133µs\n```\n\n### Development\n\n[VS Code](https://code.visualstudio.com/docs/devcontainers/containers) is the recommended development environment for Substation. The project includes a [development container](.devcontainer/Dockerfile) that should be used to develop and test the system. Refer to the [development guide](CONTRIBUTING.md) for more information.\n\nIf you don't use VS Code, then you should run the development container from the command line:\n\n```sh\ngit clone https://github.com/brexhq/substation.git && cd substation && \\\ndocker build -t substation-dev .devcontainer/ && \\\ndocker run -v $(pwd):/workspaces/substation/  -w /workspaces/substation -v /var/run/docker.sock:/var/run/docker.sock -it substation-dev\n```\n\n### Deployment\n\nThe [Terraform documentation](build/terraform/aws/) includes guidance for deploying Substation to AWS.\n\n## Licensing\n\nSubstation and its associated code is released under the terms of the [MIT License](LICENSE).\n\n<!--Links-->\n[releases]:https://github.com/brexhq/substation/releases \"Substation Releases\"\n[docs]:https://substation.readme.io/docs \"Substation Documentation\"\n[adopters]:https://github.com/brexhq/substation/blob/main/ADOPTERS.md \"Substation Adopters\"\n[announcement]:https://medium.com/brexeng/announcing-substation-188d049d979b \"Substation Announcement Post\"\n[v1_release]:https://medium.com/brexeng/releasing-substation-v1-0-4d0314cbc45b \"Substation v1.0 Release Post\"\n"
  },
  {
    "path": "SECURITY.md",
    "content": "# Responsible Disclosure\n\nTo report security issues in Substation, please follow [Brex's Responsible Disclosure process](https://www.brex.com/security/responsible-disclosure/).\n"
  },
  {
    "path": "VERSIONING.md",
    "content": "# Versioning\n\nSubstation uses [Semantic Versioning 2.0](https://semver.org/). Versions are managed using Git tags and are updated by the maintainers when releases are made. The version applies to the [Go module](https://pkg.go.dev/github.com/brexhq/substation) and the components below:\n\n- cmd/aws/*\n- condition/*\n- config/*\n- message/*\n- transform/*\n- substation.go\n- substation.libsonnet\n- go.mod\n\nSome features may be labeled as \"experimental\" in the documentation. These features are not subject to the same versioning guarantees as the rest of the project and may be changed or removed at any time.\n\n## Go Versioning\n\nSubstation follows the [Go Release Policy](https://golang.org/doc/devel/release.html#policy). This means that the project will maintain compatibility with the latest two major versions of Go. For example, if the latest version of Go is 1.21, Substation will support Go 1.20 and 1.21. When Go 1.22 is released, Substation will drop support for Go 1.20 and support Go 1.21 and 1.22.\n\n## Dependency Versioning\n\nDependencies that are directly accessible via exported packages will cause version updates in Substation. For example, if an exported package dependency is patched, then Substation will also be patched. This also applies to minor and major updates.\n\nThese dependencies can be identified in the `go.mod` file by the comment `// Upgrades require SemVer bump.`.\n\n## Version Support\n\nThe maintainers will actively support the latest release of Substation with features, bug fixes, and security patches. Older versions will only receive security patches. If you are using an old version of Substation, we recommend upgrading to the latest version.\n"
  },
  {
    "path": "go.mod",
    "content": "module github.com/brexhq/substation/v2\n\ngo 1.23.0\n\nrequire (\n\tgithub.com/aws/aws-lambda-go v1.47.0\n\tgithub.com/aws/aws-sdk-go-v2 v1.37.1\n\tgithub.com/aws/aws-sdk-go-v2/config v1.28.6\n\tgithub.com/aws/aws-sdk-go-v2/credentials v1.17.47\n\tgithub.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue v1.15.21\n\tgithub.com/aws/aws-sdk-go-v2/feature/dynamodb/expression v1.7.56\n\tgithub.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.43\n\tgithub.com/aws/aws-sdk-go-v2/service/cloudwatch v1.42.2\n\tgithub.com/aws/aws-sdk-go-v2/service/dynamodb v1.38.0\n\tgithub.com/aws/aws-sdk-go-v2/service/eventbridge v1.36.0\n\tgithub.com/aws/aws-sdk-go-v2/service/firehose v1.35.2\n\tgithub.com/aws/aws-sdk-go-v2/service/kinesis v1.32.7\n\tgithub.com/aws/aws-sdk-go-v2/service/lambda v1.69.1\n\tgithub.com/aws/aws-sdk-go-v2/service/s3 v1.85.1\n\tgithub.com/aws/aws-sdk-go-v2/service/secretsmanager v1.34.7\n\tgithub.com/aws/aws-sdk-go-v2/service/sns v1.33.7\n\tgithub.com/aws/aws-sdk-go-v2/service/sqs v1.37.2\n\tgithub.com/aws/aws-sdk-go-v2/service/sts v1.33.2\n\tgithub.com/aws/aws-xray-sdk-go/v2 v2.0.0\n\tgithub.com/aws/smithy-go v1.22.5\n\tgithub.com/awslabs/kinesis-aggregation/go/v2 v2.0.0-20241004223953-c2774b1ab29b\n\tgithub.com/golang/protobuf v1.5.4\n\tgithub.com/google/go-jsonnet v0.20.0\n\tgithub.com/google/uuid v1.6.0\n\tgithub.com/hashicorp/go-retryablehttp v0.7.7\n\tgithub.com/iancoleman/strcase v0.3.0\n\tgithub.com/itchyny/gojq v0.12.17\n\tgithub.com/klauspost/compress v1.17.11\n\tgithub.com/oschwald/maxminddb-golang v1.13.1\n\tgithub.com/sirupsen/logrus v1.9.3\n\tgithub.com/spf13/cobra v1.8.1\n\tgithub.com/tidwall/gjson v1.18.0 // Upgrades require SemVer bump.\n\tgithub.com/tidwall/sjson v1.2.5\n\tgolang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c\n\tgolang.org/x/net v0.41.0\n\tgolang.org/x/sync v0.16.0\n)\n\nrequire (\n\tcloud.google.com/go/storage v1.54.0\n\tgithub.com/GoogleCloudPlatform/functions-framework-go v1.9.2\n\tgithub.com/cloudevents/sdk-go/v2 v2.15.2\n)\n\nrequire (\n\tcel.dev/expr v0.20.0 // indirect\n\tcloud.google.com/go v0.121.0 // indirect\n\tcloud.google.com/go/auth v0.16.1 // indirect\n\tcloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect\n\tcloud.google.com/go/compute/metadata v0.6.0 // indirect\n\tcloud.google.com/go/functions v1.19.3 // indirect\n\tcloud.google.com/go/iam v1.5.2 // indirect\n\tcloud.google.com/go/monitoring v1.24.0 // indirect\n\tgithub.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 // indirect\n\tgithub.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0 // indirect\n\tgithub.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0 // indirect\n\tgithub.com/andybalholm/brotli v1.1.1 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.0 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.21 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/internal/configsources v1.4.1 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.1 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/internal/v4a v1.4.1 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/service/dynamodbstreams v1.24.9 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/service/internal/checksum v1.8.1 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.10.6 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.1 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.1 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/service/sso v1.24.7 // indirect\n\tgithub.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.6 // indirect\n\tgithub.com/cespare/xxhash/v2 v2.3.0 // indirect\n\tgithub.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 // indirect\n\tgithub.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect\n\tgithub.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect\n\tgithub.com/felixge/httpsnoop v1.0.4 // indirect\n\tgithub.com/go-jose/go-jose/v4 v4.1.2 // indirect\n\tgithub.com/go-logr/logr v1.4.2 // indirect\n\tgithub.com/go-logr/stdr v1.2.2 // indirect\n\tgithub.com/google/s2a-go v0.1.9 // indirect\n\tgithub.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect\n\tgithub.com/googleapis/gax-go/v2 v2.14.1 // indirect\n\tgithub.com/hashicorp/go-cleanhttp v0.5.2 // indirect\n\tgithub.com/inconshreveable/mousetrap v1.1.0 // indirect\n\tgithub.com/itchyny/timefmt-go v0.1.6 // indirect\n\tgithub.com/jmespath/go-jmespath v0.4.0 // indirect\n\tgithub.com/json-iterator/go v1.1.10 // indirect\n\tgithub.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 // indirect\n\tgithub.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 // indirect\n\tgithub.com/parquet-go/parquet-go v0.25.1 // indirect\n\tgithub.com/pierrec/lz4/v4 v4.1.21 // indirect\n\tgithub.com/pkg/errors v0.9.1 // indirect\n\tgithub.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect\n\tgithub.com/spf13/pflag v1.0.5 // indirect\n\tgithub.com/spiffe/go-spiffe/v2 v2.5.0 // indirect\n\tgithub.com/tidwall/match v1.1.1 // indirect\n\tgithub.com/tidwall/pretty v1.2.1 // indirect\n\tgithub.com/valyala/bytebufferpool v1.0.0 // indirect\n\tgithub.com/valyala/fasthttp v1.58.0 // indirect\n\tgithub.com/zeebo/errs v1.4.0 // indirect\n\tgo.opentelemetry.io/auto/sdk v1.1.0 // indirect\n\tgo.opentelemetry.io/contrib/detectors/gcp v1.35.0 // indirect\n\tgo.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 // indirect\n\tgo.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect\n\tgo.opentelemetry.io/otel v1.35.0 // indirect\n\tgo.opentelemetry.io/otel/metric v1.35.0 // indirect\n\tgo.opentelemetry.io/otel/sdk v1.35.0 // indirect\n\tgo.opentelemetry.io/otel/sdk/metric v1.35.0 // indirect\n\tgo.opentelemetry.io/otel/trace v1.35.0 // indirect\n\tgo.uber.org/atomic v1.4.0 // indirect\n\tgo.uber.org/multierr v1.1.0 // indirect\n\tgo.uber.org/zap v1.10.0 // indirect\n\tgolang.org/x/crypto v0.40.0 // indirect\n\tgolang.org/x/oauth2 v0.30.0 // indirect\n\tgolang.org/x/sys v0.34.0 // indirect\n\tgolang.org/x/text v0.27.0 // indirect\n\tgolang.org/x/time v0.11.0 // indirect\n\tgoogle.golang.org/api v0.232.0 // indirect\n\tgoogle.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb // indirect\n\tgoogle.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 // indirect\n\tgoogle.golang.org/genproto/googleapis/rpc v0.0.0-20250505200425-f936aa4a68b2 // indirect\n\tgoogle.golang.org/grpc v1.72.0 // indirect\n\tgoogle.golang.org/protobuf v1.36.6 // indirect\n\tgopkg.in/yaml.v2 v2.4.0 // indirect\n\tsigs.k8s.io/yaml v1.4.0 // indirect\n)\n"
  },
  {
    "path": "go.sum",
    "content": "cel.dev/expr v0.20.0 h1:OunBvVCfvpWlt4dN7zg3FM6TDkzOePe1+foGJ9AXeeI=\ncel.dev/expr v0.20.0/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw=\ncloud.google.com/go v0.121.0 h1:pgfwva8nGw7vivjZiRfrmglGWiCJBP+0OmDpenG/Fwg=\ncloud.google.com/go v0.121.0/go.mod h1:rS7Kytwheu/y9buoDmu5EIpMMCI4Mb8ND4aeN4Vwj7Q=\ncloud.google.com/go/auth v0.16.1 h1:XrXauHMd30LhQYVRHLGvJiYeczweKQXZxsTbV9TiguU=\ncloud.google.com/go/auth v0.16.1/go.mod h1:1howDHJ5IETh/LwYs3ZxvlkXF48aSqqJUM+5o02dNOI=\ncloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc=\ncloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c=\ncloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I=\ncloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg=\ncloud.google.com/go/functions v1.19.3 h1:V0vCHSgFTUqKn57+PUXp1UfQY0/aMkveAw7wXeM3Lq0=\ncloud.google.com/go/functions v1.19.3/go.mod h1:nOZ34tGWMmwfiSJjoH/16+Ko5106x+1Iji29wzrBeOo=\ncloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8=\ncloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE=\ncloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc=\ncloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA=\ncloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE=\ncloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY=\ncloud.google.com/go/monitoring v1.24.0 h1:csSKiCJ+WVRgNkRzzz3BPoGjFhjPY23ZTcaenToJxMM=\ncloud.google.com/go/monitoring v1.24.0/go.mod h1:Bd1PRK5bmQBQNnuGwHBfUamAV1ys9049oEPHnn4pcsc=\ncloud.google.com/go/storage v1.54.0 h1:Du3XEyliAiftfyW0bwfdppm2MMLdpVAfiIg4T2nAI+0=\ncloud.google.com/go/storage v1.54.0/go.mod h1:hIi9Boe8cHxTyaeqh7KMMwKg088VblFK46C2x/BWaZE=\ncloud.google.com/go/trace v1.11.3 h1:c+I4YFjxRQjvAhRmSsmjpASUKq88chOX854ied0K/pE=\ncloud.google.com/go/trace v1.11.3/go.mod h1:pt7zCYiDSQjC9Y2oqCsh9jF4GStB/hmjrYLsxRR27q8=\ngithub.com/DATA-DOG/go-sqlmock v1.5.1 h1:FK6RCIUSfmbnI/imIICmboyQBkOckutaa6R5YYlLZyo=\ngithub.com/DATA-DOG/go-sqlmock v1.5.1/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU=\ngithub.com/GoogleCloudPlatform/functions-framework-go v1.9.2 h1:Cev/PdoxY86bJjGwHJcpiWMhrZMVEoKp9wuEp9gCUvw=\ngithub.com/GoogleCloudPlatform/functions-framework-go v1.9.2/go.mod h1:wLEV4uSJztSBI+QyUy2fkHBuGFjRIAEDOqcEQ2hwmgE=\ngithub.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 h1:ErKg/3iS1AKcTkf3yixlZ54f9U1rljCkQyEXWUnIUxc=\ngithub.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0/go.mod h1:yAZHSGnqScoU556rBOVkwLze6WP5N+U11RHuWaGVxwY=\ngithub.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0 h1:fYE9p3esPxA/C0rQ0AHhP0drtPXDRhaWiwg1DPqO7IU=\ngithub.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0/go.mod h1:BnBReJLvVYx2CS/UHOgVz2BXKXD9wsQPxZug20nZhd0=\ngithub.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.51.0 h1:OqVGm6Ei3x5+yZmSJG1Mh2NwHvpVmZ08CB5qJhT9Nuk=\ngithub.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.51.0/go.mod h1:SZiPHWGOOk3bl8tkevxkoiwPgsIl6CwrWcbwjfHZpdM=\ngithub.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0 h1:6/0iUd0xrnX7qt+mLNRwg5c0PGv8wpE8K90ryANQwMI=\ngithub.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0/go.mod h1:otE2jQekW/PqXk1Awf5lmfokJx4uwuqcj1ab5SpGeW0=\ngithub.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=\ngithub.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=\ngithub.com/aws/aws-lambda-go v1.47.0 h1:0H8s0vumYx/YKs4sE7YM0ktwL2eWse+kfopsRI1sXVI=\ngithub.com/aws/aws-lambda-go v1.47.0/go.mod h1:dpMpZgvWx5vuQJfBt0zqBha60q7Dd7RfgJv23DymV8A=\ngithub.com/aws/aws-sdk-go-v2 v1.9.0/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4=\ngithub.com/aws/aws-sdk-go-v2 v1.37.1 h1:SMUxeNz3Z6nqGsXv0JuJXc8w5YMtrQMuIBmDx//bBDY=\ngithub.com/aws/aws-sdk-go-v2 v1.37.1/go.mod h1:9Q0OoGQoboYIAJyslFyF1f5K1Ryddop8gqMhWx/n4Wg=\ngithub.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.0 h1:6GMWV6CNpA/6fbFHnoAjrv4+LGfyTqZz2LtCHnspgDg=\ngithub.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.0/go.mod h1:/mXlTIVG9jbxkqDnr5UQNQxW1HRYxeGklkM9vAFeabg=\ngithub.com/aws/aws-sdk-go-v2/config v1.28.6 h1:D89IKtGrs/I3QXOLNTH93NJYtDhm8SYa9Q5CsPShmyo=\ngithub.com/aws/aws-sdk-go-v2/config v1.28.6/go.mod h1:GDzxJ5wyyFSCoLkS+UhGB0dArhb9mI+Co4dHtoTxbko=\ngithub.com/aws/aws-sdk-go-v2/credentials v1.17.47 h1:48bA+3/fCdi2yAwVt+3COvmatZ6jUDNkDTIsqDiMUdw=\ngithub.com/aws/aws-sdk-go-v2/credentials v1.17.47/go.mod h1:+KdckOejLW3Ks3b0E3b5rHsr2f9yuORBum0WPnE5o5w=\ngithub.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue v1.15.21 h1:FdDxp4HNtJWPBAOdkJ+84Dfx2TOA7Dq+cH72GDHhjnA=\ngithub.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue v1.15.21/go.mod h1:doHEXGiMWQBxcTJy3YN1Ao2HCgCuMWumuvTULGndCuQ=\ngithub.com/aws/aws-sdk-go-v2/feature/dynamodb/expression v1.7.56 h1:LBLyOZPVFt53RvSOvzAfEs1lagLhNQQUO0q2gKpaNcQ=\ngithub.com/aws/aws-sdk-go-v2/feature/dynamodb/expression v1.7.56/go.mod h1:Ul6ESIrlilRfsKcbXX+OKR5YNByw8UOutPrhlFKEOFA=\ngithub.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.21 h1:AmoU1pziydclFT/xRV+xXE/Vb8fttJCLRPv8oAkprc0=\ngithub.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.21/go.mod h1:AjUdLYe4Tgs6kpH4Bv7uMZo7pottoyHMn4eTcIcneaY=\ngithub.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.43 h1:iLdpkYZ4cXIQMO7ud+cqMWR1xK5ESbt1rvN77tRi1BY=\ngithub.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.43/go.mod h1:OgbsKPAswXDd5kxnR4vZov69p3oYjbvUyIRBAAV0y9o=\ngithub.com/aws/aws-sdk-go-v2/internal/configsources v1.4.1 h1:ksZXBYv80EFTcgc8OJO48aQ8XDWXIQL7gGasPeCoTzI=\ngithub.com/aws/aws-sdk-go-v2/internal/configsources v1.4.1/go.mod h1:HSksQyyJETVZS7uM54cir0IgxttTD+8aEoJMPGepHBI=\ngithub.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.1 h1:+dn/xF/05utS7tUhjIcndbuaPjfll2LhbH1cCDGLYUQ=\ngithub.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.1/go.mod h1:hyAGz30LHdm5KBZDI58MXx5lDVZ5CUfvfTZvMu4HCZo=\ngithub.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 h1:VaRN3TlFdd6KxX1x3ILT5ynH6HvKgqdiXoTxAF4HQcQ=\ngithub.com/aws/aws-sdk-go-v2/internal/ini v1.8.1/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc=\ngithub.com/aws/aws-sdk-go-v2/internal/v4a v1.4.1 h1:4HbnOGE9491a9zYJ9VpPh1ApgEq6ZlD4Kuv1PJenFpc=\ngithub.com/aws/aws-sdk-go-v2/internal/v4a v1.4.1/go.mod h1:Z6QnHC6TmpJWUxAy8FI4JzA7rTwl6EIANkyK9OR5z5w=\ngithub.com/aws/aws-sdk-go-v2/service/cloudwatch v1.42.2 h1:eMh+iBTF1CbpHMfiRvIaVm+rzrH1DOzuSFaR55O+bBo=\ngithub.com/aws/aws-sdk-go-v2/service/cloudwatch v1.42.2/go.mod h1:/A4zNqF1+RS5RV+NNLKIzUX1KtK5SoWgf/OpiqrwmBo=\ngithub.com/aws/aws-sdk-go-v2/service/dynamodb v1.38.0 h1:isKhHsjpQR3CypQJ4G1g8QWx7zNpiC/xKw1zjgJYVno=\ngithub.com/aws/aws-sdk-go-v2/service/dynamodb v1.38.0/go.mod h1:xDvUyIkwBwNtVZJdHEwAuhFly3mezwdEWkbJ5oNYwIw=\ngithub.com/aws/aws-sdk-go-v2/service/dynamodbstreams v1.24.9 h1:yhB2XYpHeWeAv5u3w9PFiSVIariSyhK5jcyQUFJpnIQ=\ngithub.com/aws/aws-sdk-go-v2/service/dynamodbstreams v1.24.9/go.mod h1:Hcjb2SiUo9v1GhpXjRNW7hAwfzAPfrsgnlKpP5UYEPY=\ngithub.com/aws/aws-sdk-go-v2/service/eventbridge v1.36.0 h1:UBCwgevYbPDbPb8LKyCmyBJ0Lk/gCPq4v85rZLe3vr4=\ngithub.com/aws/aws-sdk-go-v2/service/eventbridge v1.36.0/go.mod h1:ve9wzd6ToYjkZrF0nesNJxy14kU77QjrH5Rixrr4NJY=\ngithub.com/aws/aws-sdk-go-v2/service/firehose v1.35.2 h1:A4rkZ/YpyzoU8f8LMe1rPXEvkzX5R/vdAxDwN6IGegs=\ngithub.com/aws/aws-sdk-go-v2/service/firehose v1.35.2/go.mod h1:3Iza1sNaP9L+uKzhE08ilDSz8Dbu2tOL8e5exyj0etE=\ngithub.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0 h1:6+lZi2JeGKtCraAj1rpoZfKqnQ9SptseRZioejfUOLM=\ngithub.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0/go.mod h1:eb3gfbVIxIoGgJsi9pGne19dhCBpK6opTYpQqAmdy44=\ngithub.com/aws/aws-sdk-go-v2/service/internal/checksum v1.8.1 h1:ps3nrmBWdWwakZBydGX1CxeYFK80HsQ79JLMwm7Y4/c=\ngithub.com/aws/aws-sdk-go-v2/service/internal/checksum v1.8.1/go.mod h1:bAdfrfxENre68Hh2swNaGEVuFYE74o0SaSCAlaG9E74=\ngithub.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.10.6 h1:nbmKXZzXPJn41CcD4HsHsGWqvKjLKz9kWu6XxvLmf1s=\ngithub.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.10.6/go.mod h1:SJhcisfKfAawsdNQoZMBEjg+vyN2lH6rO6fP+T94z5Y=\ngithub.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.1 h1:ky79ysLMxhwk5rxJtS+ILd3Mc8kC5fhsLBrP27r6h4I=\ngithub.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.1/go.mod h1:+2MmkvFvPYM1vsozBWduoLJUi5maxFk5B7KJFECujhY=\ngithub.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.1 h1:MdVYlN5pcQu1t1OYx4Ajo3fKl1IEhzgdPQbYFCRjYS8=\ngithub.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.1/go.mod h1:iikmNLrvHm2p4a3/4BPeix2S9P+nW8yM1IZW73x8bFA=\ngithub.com/aws/aws-sdk-go-v2/service/kinesis v1.6.0/go.mod h1:9O7UG2pELnP0hq35+Gd7XDjOLBkg7tmgRQ0y14ZjoJI=\ngithub.com/aws/aws-sdk-go-v2/service/kinesis v1.32.7 h1:QTtbqxI+i2gaWjcTwJZtm8/xEl9kiQXXbOatGabNuXA=\ngithub.com/aws/aws-sdk-go-v2/service/kinesis v1.32.7/go.mod h1:5aKZaOb2yfdeAOvfam0/6HoUXg01pN172bn7MqpM35c=\ngithub.com/aws/aws-sdk-go-v2/service/lambda v1.69.1 h1:q1NrvoJiz0rm9ayKOJ9wsMGmStK6rZSY36BDICMrcuY=\ngithub.com/aws/aws-sdk-go-v2/service/lambda v1.69.1/go.mod h1:hDj7He9kbR9T5zugnS+T21l4z6do4SEGuno/BpJLpA0=\ngithub.com/aws/aws-sdk-go-v2/service/route53 v1.6.2 h1:OsggywXCk9iFKdu2Aopg3e1oJITIuyW36hA/B0rqupE=\ngithub.com/aws/aws-sdk-go-v2/service/route53 v1.6.2/go.mod h1:ZnAMilx42P7DgIrdjlWCkNIGSBLzeyk6T31uB8oGTwY=\ngithub.com/aws/aws-sdk-go-v2/service/s3 v1.85.1 h1:Hsqo8+dFxSdDvv9B2PgIx1AJAnDpqgS0znVI+R+MoGY=\ngithub.com/aws/aws-sdk-go-v2/service/s3 v1.85.1/go.mod h1:8Q0TAPXD68Z8YqlcIGHs/UNIDHsxErV9H4dl4vJEpgw=\ngithub.com/aws/aws-sdk-go-v2/service/secretsmanager v1.34.7 h1:Nyfbgei75bohfmZNxgN27i528dGYVzqWJGlAO6lzXy8=\ngithub.com/aws/aws-sdk-go-v2/service/secretsmanager v1.34.7/go.mod h1:FG4p/DciRxPgjA+BEOlwRHN0iA8hX2h9g5buSy3cTDA=\ngithub.com/aws/aws-sdk-go-v2/service/sns v1.33.7 h1:N3o8mXK6/MP24BtD9sb51omEO9J9cgPM3Ughc293dZc=\ngithub.com/aws/aws-sdk-go-v2/service/sns v1.33.7/go.mod h1:AAHZydTB8/V2zn3WNwjLXBK1RAcSEpDNmFfrmjvrJQg=\ngithub.com/aws/aws-sdk-go-v2/service/sqs v1.37.2 h1:mFLfxLZB/TVQwNJAYox4WaxpIu+dFVIcExrmRmRCOhw=\ngithub.com/aws/aws-sdk-go-v2/service/sqs v1.37.2/go.mod h1:GnvfTdlvcpD+or3oslHPOn4Mu6KaCwlCp+0p0oqWnrM=\ngithub.com/aws/aws-sdk-go-v2/service/sso v1.24.7 h1:rLnYAfXQ3YAccocshIH5mzNNwZBkBo+bP6EhIxak6Hw=\ngithub.com/aws/aws-sdk-go-v2/service/sso v1.24.7/go.mod h1:ZHtuQJ6t9A/+YDuxOLnbryAmITtr8UysSny3qcyvJTc=\ngithub.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.6 h1:JnhTZR3PiYDNKlXy50/pNeix9aGMo6lLpXwJ1mw8MD4=\ngithub.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.6/go.mod h1:URronUEGfXZN1VpdktPSD1EkAL9mfrV+2F4sjH38qOY=\ngithub.com/aws/aws-sdk-go-v2/service/sts v1.33.2 h1:s4074ZO1Hk8qv65GqNXqDjmkf4HSQqJukaLuuW0TpDA=\ngithub.com/aws/aws-sdk-go-v2/service/sts v1.33.2/go.mod h1:mVggCnIWoM09jP71Wh+ea7+5gAp53q+49wDFs1SW5z8=\ngithub.com/aws/aws-xray-sdk-go/v2 v2.0.0 h1:/AkLb6rmRWjz8pQTm6BxCGcjebS+W1yFoH9rxy3ekM8=\ngithub.com/aws/aws-xray-sdk-go/v2 v2.0.0/go.mod h1:yyjiofE/pQ9u682QgBw3tkyuyvcN+6piDiQnhwWMyng=\ngithub.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E=\ngithub.com/aws/smithy-go v1.22.5 h1:P9ATCXPMb2mPjYBgueqJNCA5S9UfktsW0tTxi+a7eqw=\ngithub.com/aws/smithy-go v1.22.5/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI=\ngithub.com/awslabs/kinesis-aggregation/go/v2 v2.0.0-20241004223953-c2774b1ab29b h1:kbD/R7CFXWfsTbiL+dlBMNhUi5z/KeSMan9oFSmtbxQ=\ngithub.com/awslabs/kinesis-aggregation/go/v2 v2.0.0-20241004223953-c2774b1ab29b/go.mod h1:0Qr1uMHFmHsIYMcG4T7BJ9yrJtWadhOmpABCX69dwuc=\ngithub.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=\ngithub.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=\ngithub.com/cloudevents/sdk-go/v2 v2.15.2 h1:54+I5xQEnI73RBhWHxbI1XJcqOFOVJN85vb41+8mHUc=\ngithub.com/cloudevents/sdk-go/v2 v2.15.2/go.mod h1:lL7kSWAE/V8VI4Wh0jbL2v/jvqsm6tjmaQBSvxcv4uE=\ngithub.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 h1:Om6kYQYDUk5wWbT0t0q6pvyM49i9XZAv9dDrkDA7gjk=\ngithub.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=\ngithub.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=\ngithub.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=\ngithub.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=\ngithub.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=\ngithub.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M=\ngithub.com/envoyproxy/go-control-plane v0.13.4/go.mod h1:kDfuBlDVsSj2MjrLEtRWtHlsWIFcGyB2RMO44Dc5GZA=\ngithub.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A=\ngithub.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw=\ngithub.com/envoyproxy/go-control-plane/ratelimit v0.1.0 h1:/G9QYbddjL25KvtKTv3an9lx6VBE2cnb8wp1vEGNYGI=\ngithub.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4=\ngithub.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8=\ngithub.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU=\ngithub.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=\ngithub.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=\ngithub.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=\ngithub.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=\ngithub.com/go-jose/go-jose/v4 v4.1.2 h1:TK/7NqRQZfgAh+Td8AlsrvtPoUyiHh0LqVvokh+1vHI=\ngithub.com/go-jose/go-jose/v4 v4.1.2/go.mod h1:22cg9HWM1pOlnRiY+9cQYJ9XHmya1bYW8OeDM6Ku6Oo=\ngithub.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=\ngithub.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=\ngithub.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=\ngithub.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=\ngithub.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=\ngithub.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=\ngithub.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=\ngithub.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=\ngithub.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=\ngithub.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=\ngithub.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=\ngithub.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=\ngithub.com/google/go-jsonnet v0.20.0 h1:WG4TTSARuV7bSm4PMB4ohjxe33IHT5WVTrJSU33uT4g=\ngithub.com/google/go-jsonnet v0.20.0/go.mod h1:VbgWF9JX7ztlv770x/TolZNGGFfiHEVx9G6ca2eUmeA=\ngithub.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=\ngithub.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc=\ngithub.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0=\ngithub.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=\ngithub.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=\ngithub.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=\ngithub.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=\ngithub.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4=\ngithub.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=\ngithub.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q=\ngithub.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA=\ngithub.com/grpc-ecosystem/go-grpc-middleware/v2 v2.1.0 h1:pRhl55Yx1eC7BZ1N+BBWwnKaMyD8uC+34TLdndZMAKk=\ngithub.com/grpc-ecosystem/go-grpc-middleware/v2 v2.1.0/go.mod h1:XKMd7iuf/RGPSMJ/U4HP0zS2Z9Fh8Ps9a+6X26m/tmI=\ngithub.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=\ngithub.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=\ngithub.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k=\ngithub.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=\ngithub.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU=\ngithub.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk=\ngithub.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI=\ngithub.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=\ngithub.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=\ngithub.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=\ngithub.com/itchyny/gojq v0.12.17 h1:8av8eGduDb5+rvEdaOO+zQUjA04MS0m3Ps8HiD+fceg=\ngithub.com/itchyny/gojq v0.12.17/go.mod h1:WBrEMkgAfAGO1LUcGOckBl5O726KPp+OlkKug0I/FEY=\ngithub.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q=\ngithub.com/itchyny/timefmt-go v0.1.6/go.mod h1:RRDZYC5s9ErkjQvTvvU7keJjxUYzIISJGxm9/mAERQg=\ngithub.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=\ngithub.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=\ngithub.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=\ngithub.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=\ngithub.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=\ngithub.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=\ngithub.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=\ngithub.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=\ngithub.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=\ngithub.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=\ngithub.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=\ngithub.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=\ngithub.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=\ngithub.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=\ngithub.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=\ngithub.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=\ngithub.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=\ngithub.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=\ngithub.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=\ngithub.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=\ngithub.com/oschwald/maxminddb-golang v1.13.1 h1:G3wwjdN9JmIK2o/ermkHM+98oX5fS+k5MbwsmL4MRQE=\ngithub.com/oschwald/maxminddb-golang v1.13.1/go.mod h1:K4pgV9N/GcK694KSTmVSDTODk4IsCNThNdTmnaBZ/F8=\ngithub.com/parquet-go/parquet-go v0.25.1 h1:l7jJwNM0xrk0cnIIptWMtnSnuxRkwq53S+Po3KG8Xgo=\ngithub.com/parquet-go/parquet-go v0.25.1/go.mod h1:AXBuotO1XiBtcqJb/FKFyjBG4aqa3aQAAWF3ZPzCanY=\ngithub.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ=\ngithub.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=\ngithub.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=\ngithub.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=\ngithub.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo=\ngithub.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8=\ngithub.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=\ngithub.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=\ngithub.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=\ngithub.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=\ngithub.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=\ngithub.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=\ngithub.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=\ngithub.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=\ngithub.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=\ngithub.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=\ngithub.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=\ngithub.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=\ngithub.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=\ngithub.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE=\ngithub.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g=\ngithub.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=\ngithub.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=\ngithub.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=\ngithub.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=\ngithub.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=\ngithub.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=\ngithub.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=\ngithub.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=\ngithub.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=\ngithub.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=\ngithub.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=\ngithub.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=\ngithub.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=\ngithub.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=\ngithub.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=\ngithub.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=\ngithub.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=\ngithub.com/valyala/fasthttp v1.58.0 h1:GGB2dWxSbEprU9j0iMJHgdKYJVDyjrOwF9RE59PbRuE=\ngithub.com/valyala/fasthttp v1.58.0/go.mod h1:SYXvHHaFp7QZHGKSHmoMipInhrI5StHrhDTYVEjK/Kw=\ngithub.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=\ngithub.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=\ngithub.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM=\ngithub.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4=\ngo.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=\ngo.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=\ngo.opentelemetry.io/contrib/detectors/gcp v1.35.0 h1:bGvFt68+KTiAKFlacHW6AhA56GF2rS0bdD3aJYEnmzA=\ngo.opentelemetry.io/contrib/detectors/gcp v1.35.0/go.mod h1:qGWP8/+ILwMRIUf9uIVLloR1uo5ZYAslM4O6OqUi1DA=\ngo.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 h1:x7wzEgXfnzJcHDwStJT+mxOz4etr2EcexjqhBvmoakw=\ngo.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0/go.mod h1:rg+RlpR5dKwaS95IyyZqj5Wd4E13lk/msnTS0Xl9lJM=\ngo.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU=\ngo.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ=\ngo.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=\ngo.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=\ngo.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.35.0 h1:PB3Zrjs1sG1GBX51SXyTSoOTqcDglmsk7nT6tkKPb/k=\ngo.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.35.0/go.mod h1:U2R3XyVPzn0WX7wOIypPuptulsMcPDPs/oiSVOMVnHY=\ngo.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=\ngo.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=\ngo.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY=\ngo.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg=\ngo.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o=\ngo.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w=\ngo.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=\ngo.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=\ngo.uber.org/atomic v1.4.0 h1:cxzIVoETapQEqDhQu3QfnvXAV4AlzcvUCxkVUFw3+EU=\ngo.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=\ngo.uber.org/multierr v1.1.0 h1:HoEmRHQPVSqub6w2z2d2EOVs2fjyFRGyofhKuyDq0QI=\ngo.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=\ngo.uber.org/zap v1.10.0 h1:ORx85nbTijNz8ljznvCMR1ZBIPKFn3jQrag10X2AsuM=\ngo.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=\ngolang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM=\ngolang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY=\ngolang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY=\ngolang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8=\ngolang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw=\ngolang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA=\ngolang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=\ngolang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=\ngolang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=\ngolang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=\ngolang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA=\ngolang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=\ngolang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4=\ngolang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU=\ngolang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0=\ngolang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=\ngolang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=\ngoogle.golang.org/api v0.232.0 h1:qGnmaIMf7KcuwHOlF3mERVzChloDYwRfOJOrHt8YC3I=\ngoogle.golang.org/api v0.232.0/go.mod h1:p9QCfBWZk1IJETUdbTKloR5ToFdKbYh2fkjsUL6vNoY=\ngoogle.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb h1:ITgPrl429bc6+2ZraNSzMDk3I95nmQln2fuPstKwFDE=\ngoogle.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:sAo5UzpjUwgFBCzupwhcLcxHVDK7vG5IqI30YnwX2eE=\ngoogle.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 h1:vPV0tzlsK6EzEDHNNH5sa7Hs9bd7iXR7B1tSiPepkV0=\ngoogle.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:pKLAc5OolXC3ViWGI62vvC0n10CpwAtRcTNCFwTKBEw=\ngoogle.golang.org/genproto/googleapis/rpc v0.0.0-20250505200425-f936aa4a68b2 h1:IqsN8hx+lWLqlN+Sc3DoMy/watjofWiU8sRFgQ8fhKM=\ngoogle.golang.org/genproto/googleapis/rpc v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=\ngoogle.golang.org/grpc v1.72.0 h1:S7UkcVa60b5AAQTaO6ZKamFp1zMZSU0fGDK2WZLbBnM=\ngoogle.golang.org/grpc v1.72.0/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM=\ngoogle.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=\ngoogle.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=\ngoogle.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=\ngoogle.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=\ngopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=\ngopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=\ngopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=\ngopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=\ngopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=\ngopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=\ngopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=\ngopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=\ngopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=\nsigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=\nsigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY=\n"
  },
  {
    "path": "substation.go",
    "content": "package substation\n\nimport (\n\t\"context\"\n\t_ \"embed\"\n\t\"encoding/json\"\n\t\"fmt\"\n\n\t\"github.com/brexhq/substation/v2/config\"\n\t\"github.com/brexhq/substation/v2/message\"\n\t\"github.com/brexhq/substation/v2/transform\"\n)\n\n//go:embed substation.libsonnet\nvar Library string\n\nvar errNoTransforms = fmt.Errorf(\"no transforms configured\")\n\n// Config is the core configuration for the application. Custom applications\n// should embed this and add additional configuration options.\ntype Config struct {\n\t// Transforms contains a list of data transformatons that are executed.\n\tTransforms []config.Config `json:\"transforms\"`\n}\n\n// Substation provides access to data transformation functions.\ntype Substation struct {\n\tcfg Config\n\n\tfactory transform.Factory\n\ttforms  []transform.Transformer\n}\n\n// New returns a new Substation instance.\nfunc New(ctx context.Context, cfg Config, opts ...func(*Substation)) (*Substation, error) {\n\tif cfg.Transforms == nil {\n\t\treturn nil, errNoTransforms\n\t}\n\n\tsub := &Substation{\n\t\tcfg:     cfg,\n\t\tfactory: transform.New,\n\t}\n\n\tfor _, o := range opts {\n\t\to(sub)\n\t}\n\n\t// Create transforms from the configuration.\n\tfor _, c := range cfg.Transforms {\n\t\tt, err := sub.factory(ctx, c)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tsub.tforms = append(sub.tforms, t)\n\t}\n\n\treturn sub, nil\n}\n\n// WithTransformFactory implements a custom transform factory.\nfunc WithTransformFactory(fac transform.Factory) func(*Substation) {\n\treturn func(s *Substation) {\n\t\ts.factory = fac\n\t}\n}\n\n// Transform runs the configured data transformation functions on the\n// provided messages.\n//\n// This is safe to use concurrently.\nfunc (s *Substation) Transform(ctx context.Context, msg ...*message.Message) ([]*message.Message, error) {\n\treturn transform.Apply(ctx, s.tforms, msg...)\n}\n\n// String returns a JSON representation of the configuration.\nfunc (s *Substation) String() string {\n\tb, err := json.Marshal(s.cfg)\n\tif err != nil {\n\t\treturn fmt.Sprintf(\"substation: %v\", err)\n\t}\n\n\treturn string(b)\n}\n"
  },
  {
    "path": "substation.libsonnet",
    "content": "local helpers = {\n  // If the input is not an array, then this returns it as an array.\n  make_array(i): if !std.isArray(i) then [i] else i,\n  abbv(settings): std.mergePatch(settings, {\n    object: if std.objectHas(settings, 'object') then $.abbv_obj(settings.object) else if std.objectHas(settings, 'obj') then $.abbv_obj(settings.obj) else null,\n    obj: null,\n  }),\n  abbv_obj(s): {\n    source_key: if std.objectHas(s, 'src') then s.src else if std.objectHas(s, 'source_key') then s.source_key else null,\n    src: null,\n    target_key: if std.objectHas(s, 'trg') then s.trg else if std.objectHas(s, 'target_key') then s.target_key else null,\n    trg: null,\n    batch_key: if std.objectHas(s, 'btch') then s.batch else if std.objectHas(s, 'batch_key') then s.batch_key else null,\n  },\n  id(type, settings): std.join('-', [std.md5(type)[:8], std.md5(std.toString(settings))[:8]]),\n};\n\n{\n  // Mirrors interfaces from the condition package.\n  cnd: $.condition,\n  condition: {\n    all(i): $.condition.meta.all({ conditions: helpers.make_array(i) }),\n    any(i): $.condition.meta.any({ conditions: helpers.make_array(i) }),\n    none(i): $.condition.meta.none({ conditions: helpers.make_array(i) }),\n    meta: {\n      all(settings={}): {\n        local default = {\n          object: $.config.object,\n          conditions: [],\n        },\n\n        type: 'meta_all',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      any(settings={}): {\n        local default = {\n          object: $.config.object,\n          conditions: [],\n        },\n\n        type: 'meta_any',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      none(settings={}): {\n        local default = {\n          object: $.config.object,\n          conditions: [],\n        },\n\n        type: 'meta_none',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n    },\n    fmt: $.condition.format,\n    format: {\n      json(settings={}): {\n        type: 'format_json',\n      },\n      mime(settings={}): {\n        local default = {\n          object: $.config.object,\n          type: null,\n        },\n\n        type: 'format_mime',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n    },\n    num: $.condition.number,\n    number: {\n      default: {\n        object: $.config.object,\n        value: null,\n      },\n      eq(settings={}): $.condition.number.equal_to(settings=settings),\n      equal_to(settings={}): {\n        local default = $.condition.number.default,\n\n        type: 'number_equal_to',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      lt(settings={}): $.condition.number.less_than(settings=settings),\n      less_than(settings={}): {\n        local default = $.condition.number.default,\n\n        type: 'number_less_than',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      gt(settings={}): $.condition.number.greater_than(settings=settings),\n      greater_than(settings={}): {\n        local default = $.condition.number.default,\n\n        type: 'number_greater_than',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      bitwise: {\n        and(settings={}): {\n          local default = {\n            object: $.config.object,\n            value: null,\n          },\n\n          type: 'number_bitwise_and',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        not(settings={}): {\n          local default = {\n            object: $.config.object,\n          },\n\n          type: 'number_bitwise_not',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        or(settings={}): {\n          local default = {\n            object: $.config.object,\n            value: null,\n          },\n\n          type: 'number_bitwise_or',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        xor(settings={}): {\n          local default = {\n            object: $.config.object,\n            value: null,\n          },\n\n          type: 'number_bitwise_xor',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n      len: $.condition.number.length,\n      length: {\n        default: {\n          object: $.config.object,\n          value: null,\n          measurement: 'byte',\n        },\n        eq(settings={}): $.condition.number.length.equal_to(settings=settings),\n        equal_to(settings={}): {\n          local default = $.condition.number.length.default,\n\n          type: 'number_length_equal_to',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        gt(settings={}): $.condition.number.length.greater_than(settings=settings),\n        greater_than(settings={}): {\n          local default = $.condition.number.length.default,\n\n          type: 'number_length_greater_than',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        lt(settings={}): $.condition.number.length.less_than(settings=settings),\n        less_than(settings={}): {\n          local default = $.condition.number.length.default,\n\n          type: 'number_length_less_than',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n    },\n    net: $.condition.network,\n    network: {\n      ip: {\n        default: {\n          object: $.config.object,\n        },\n        global_unicast(settings={}): {\n          local default = $.condition.network.ip.default,\n\n          type: 'network_ip_global_unicast',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        link_local_multicast(settings={}): {\n          local default = $.condition.network.ip.default,\n\n          type: 'network_ip_link_local_multicast',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        link_local_unicast(settings={}): {\n          local default = $.condition.network.ip.default,\n\n          type: 'network_ip_link_local_unicast',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        loopback(settings={}): {\n          local default = $.condition.network.ip.default,\n\n          type: 'network_ip_loopback',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        multicast(settings={}): {\n          local default = $.condition.network.ip.default,\n\n          type: 'network_ip_multicast',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        private(settings={}): {\n          local default = $.condition.network.ip.default,\n\n          type: 'network_ip_private',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        unicast(settings={}): {\n          local default = $.condition.network.ip.default,\n\n          type: 'network_ip_unicast',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        unspecified(settings={}): {\n          local default = $.condition.network.ip.default,\n\n          type: 'network_ip_unspecified',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        valid(settings={}): {\n          local default = $.condition.network.ip.default,\n\n          type: 'network_ip_valid',\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n    },\n    str: $.condition.string,\n    string: {\n      default: {\n        object: $.config.object,\n        value: null,\n      },\n      has(settings={}): $.condition.string.contains(settings=settings),\n      contains(settings={}): {\n        local default = $.condition.string.default,\n\n        type: 'string_contains',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      eq(settings={}): $.condition.string.equal_to(settings=settings),\n      equal_to(settings={}): {\n        local default = $.condition.string.default,\n\n        type: 'string_equal_to',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      gt(settings={}): $.condition.string.greater_than(settings=settings),\n      greater_than(settings={}): {\n        local default = $.condition.string.default,\n\n        type: 'string_greater_than',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      lt(settings={}): $.condition.string.less_than(settings=settings),\n      less_than(settings={}): {\n        local default = $.condition.string.default,\n\n        type: 'string_less_than',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      prefix(settings={}): $.condition.string.starts_with(settings=settings),\n      starts_with(settings={}): {\n        local default = $.condition.string.default,\n\n        type: 'string_starts_with',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      suffix(settings={}): $.condition.string.ends_with(settings=settings),\n      ends_with(settings={}): {\n        local default = $.condition.string.default,\n\n        type: 'string_ends_with',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      match(settings={}): {\n        local default = {\n          object: $.config.object,\n          pattern: null,\n        },\n\n        type: 'string_match',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n    },\n    util: $.condition.utility,\n    utility: {\n      random(settings={}): {\n        type: 'utility_random',\n      },\n    },\n  },\n  // Mirrors interfaces from the transform package.\n  tf: $.transform,\n  transform: {\n    agg: $.transform.aggregate,\n    aggregate: {\n      from: {\n        arr(settings={}): $.transform.aggregate.from.array(settings=settings),\n        array(settings={}): {\n          local type = 'aggregate_from_array',\n          local default = {\n            id: helpers.id(type, settings),\n            object: $.config.object,\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        str(settings={}): $.transform.aggregate.from.string(settings=settings),\n        string(settings={}): {\n          local type = 'aggregate_from_string',\n          local default = {\n            id: helpers.id(type, settings),\n            separator: null,\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n      to: {\n        arr(settings={}): $.transform.aggregate.to.array(settings=settings),\n        array(settings={}): {\n          local type = 'aggregate_to_array',\n          local default = {\n            id: helpers.id(type, settings),\n            object: $.config.object,\n            batch: $.config.batch,\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        str(settings={}): $.transform.aggregate.to.string(settings=settings),\n        string(settings={}): {\n          local type = 'aggregate_to_string',\n          local default = {\n            id: helpers.id(type, settings),\n            batch: $.config.batch,\n            separator: null,\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n    },\n    arr: $.transform.array,\n    array: {\n      join(settings={}): {\n        local type = 'array_join',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n          separator: null,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      zip(settings={}): {\n        local type = 'array_zip',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n    },\n    enrich: {\n      aws: {\n        dynamodb: {\n          query(settings={}): {\n            local type = 'enrich_aws_dynamodb_query',\n            local default = {\n              id: helpers.id(type, settings),\n              object: $.config.object,\n              aws: $.config.aws,\n              attributes: { partition_key: null, sort_key: null },\n              limit: 1,\n              scan_index_forward: false,\n            },\n\n            type: type,\n            settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n          },\n        },\n        lambda(settings={}): {\n          local type = 'enrich_aws_lambda',\n          local default = {\n            id: helpers.id(type, settings),\n            object: $.config.object,\n            aws: $.config.aws,\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n      dns: {\n        default: {\n          object: $.config.object,\n          request: $.config.request,\n        },\n        domain_lookup(settings={}): {\n          local type = 'enrich_dns_domain_lookup',\n          local default = $.transform.enrich.dns.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        ip_lookup(settings={}): {\n          local type = 'enrich_dns_ip_lookup',\n          local default = $.transform.enrich.dns.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        txt_lookup(settings={}): {\n          local type = 'enrich_dns_txt_lookup',\n          local default = $.transform.enrich.dns.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n      http: {\n        default: {\n          object: $.config.object,\n          request: $.config.request,\n          url: null,\n          headers: null,\n        },\n        get(settings={}): {\n          local type = 'enrich_http_get',\n          local default = $.transform.enrich.http.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        post(settings={}): {\n          local type = 'enrich_http_post',\n          local default = $.transform.enrich.http.default { body_key: null, id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n      kv_store: {\n        default: {\n          object: $.config.object,\n          prefix: null,\n          kv_store: null,\n          close_kv_store: false,\n        },\n        iget: $.transform.enrich.kv_store.item.get,\n        iset: $.transform.enrich.kv_store.item.set,\n        item: {\n          get(settings={}): {\n            local type = 'enrich_kv_store_get',\n            local default = $.transform.enrich.kv_store.default { id: helpers.id(type, settings) },\n\n            type: type,\n            settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n          },\n          set(settings={}): {\n            local type = 'enrich_kv_store_set',\n            local default = $.transform.enrich.kv_store.default { ttl_key: null, ttl_offset: '0s', id: helpers.id(type, settings) },\n\n            type: type,\n            settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n          },\n        },\n        sadd: $.transform.enrich.kv_store.set.add,\n        set: {\n          add(settings={}): {\n            local type = 'enrich_kv_store_set_add',\n            local default = $.transform.enrich.kv_store.default { ttl_key: null, ttl_offset: '0s', id: helpers.id(type, settings) },\n\n            type: type,\n            settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n          },\n        },\n      },\n    },\n    fmt: $.transform.format,\n    format: {\n      default: {\n        object: $.config.object,\n      },\n      from: {\n        b64(settings={}): $.transform.format.from.base64(settings=settings),\n        base64(settings={}): {\n          local type = 'format_from_base64',\n          local default = $.transform.format.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        gz(settings={}): $.transform.format.from.gzip(settings=settings),\n        gzip(settings={}): {\n          local type = 'format_from_gzip',\n          local default = { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        parquet(settings={}): {\n          local type = 'format_from_parquet',\n          local default = { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        pretty_print(settings={}): {\n          local type = 'format_from_pretty_print',\n          local default = { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        zip(settings={}): {\n          local type = 'format_from_zip',\n          local default = { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n      to: {\n        b64(settings={}): $.transform.format.to.base64(settings=settings),\n        base64(settings={}): {\n          local type = 'format_to_base64',\n          local default = $.transform.format.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        gz(settings={}): $.transform.format.to.gzip(settings=settings),\n        gzip(settings={}): {\n          local type = 'format_to_gzip',\n          local default = { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n    },\n    hash: {\n      default: {\n        object: $.config.object,\n      },\n      md5(settings={}): {\n        local type = 'hash_md5',\n        local default = $.transform.hash.default { id: helpers.id(type, settings) },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      sha256(settings={}): {\n        local type = 'hash_sha256',\n        local default = $.transform.hash.default { id: helpers.id(type, settings) },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n    },\n    num: $.transform.number,\n    number: {\n      max(settings={}): $.transform.number.maximum(settings=settings),\n      maximum(settings={}): {\n        local type = 'number_maximum',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n          value: null,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      min(settings={}): $.transform.number.minimum(settings=settings),\n      minimum(settings={}): {\n        local type = 'number_minimum',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n          value: null,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      math: {\n        default: {\n          object: $.config.object,\n        },\n        add(settings={}): $.transform.number.math.addition(settings=settings),\n        addition(settings={}): {\n          local type = 'number_math_addition',\n          local default = $.transform.number.math.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        sub(settings={}): $.transform.number.math.subtraction(settings=settings),\n        subtraction(settings={}): {\n          local type = 'number_math_subtraction',\n          local default = $.transform.number.math.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        mul(settings={}): $.transform.number.math.multiplication(settings=settings),\n        multiplication(settings={}): {\n          local type = 'number_math_multiplication',\n          local default = $.transform.number.math.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        div(settings={}): $.transform.number.math.division(settings=settings),\n        division(settings={}): {\n          local type = 'number_math_division',\n          local default = $.transform.number.math.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n    },\n    meta: {\n      err(settings={}): {\n        local type = 'meta_err',\n        local default = {\n          id: helpers.id(type, settings),\n          transforms: null,\n          error_messages: ['.*'],\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      for_each(settings={}): {\n        local type = 'meta_for_each',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n          transforms: null,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      kv_store: {\n        lock(settings={}): {\n          local type = 'meta_kv_store_lock',\n          local default = {\n            id: helpers.id(type, settings),\n            object: $.config.object { ttl_key: null },\n            transforms: null,\n            kv_store: null,\n            prefix: null,\n            ttl_offset: '0s',\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n      metric: {\n        duration(settings={}): {\n          local type = 'meta_metric_duration',\n          local default = {\n            id: helpers.id(type, settings),\n            metric: $.config.metric,\n            transforms: null,\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n      retry(settings={}): {\n        local type = 'meta_retry',\n        local default = {\n          id: helpers.id(type, settings),\n          retry: $.config.retry,\n          transforms: null,\n          condition: null,\n          error_messages: ['.*'],\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      switch(settings={}): {\n        local type = 'meta_switch',\n        local default = {\n          id: helpers.id(type, settings),\n          cases: null,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n    },\n    net: $.transform.network,\n    network: {\n      domain: {\n        default: {\n          object: $.config.object,\n        },\n        registered_domain(settings={}): {\n          local type = 'network_domain_registered_domain',\n          local default = $.transform.network.domain.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        subdomain(settings={}): {\n          local type = 'network_domain_subdomain',\n          local default = $.transform.network.domain.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        tld(settings={}): $.transform.network.domain.top_level_domain(settings=settings),\n        top_level_domain(settings={}): {\n          local type = 'network_domain_top_level_domain',\n          local default = $.transform.network.domain.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n    },\n    obj: $.transform.object,\n    object: {\n      default: {\n        object: $.config.object,\n      },\n      cp(settings={}): $.transform.object.copy(settings=settings),\n      copy(settings={}): {\n        local type = 'object_copy',\n        local default = $.transform.object.default { id: helpers.id(type, settings) },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      del(settings={}): $.transform.object.delete(settings=settings),\n      delete(settings={}): {\n        local type = 'object_delete',\n        local default = $.transform.object.default { id: helpers.id(type, settings) },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      insert(settings={}): {\n        local type = 'object_insert',\n        local default = $.transform.object.default { id: helpers.id(type, settings) },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      jq(settings={}): {\n        local type = 'object_jq',\n        local default = {\n          id: helpers.id(type, settings),\n          filter: null,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      to: {\n        bool(settings={}): $.transform.object.to.boolean(settings=settings),\n        boolean(settings={}): {\n          local type = 'object_to_boolean',\n          local default = $.transform.object.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        float(settings={}): {\n          local type = 'object_to_float',\n          local default = $.transform.object.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        int(settings={}): $.transform.object.to.integer(settings=settings),\n        integer(settings={}): {\n          local type = 'object_to_integer',\n          local default = $.transform.object.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        str(settings={}): $.transform.object.to.string(settings=settings),\n        string(settings={}): {\n          local type = 'object_to_string',\n          local default = $.transform.object.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        uint(settings={}): $.transform.object.to.unsigned_integer(settings=settings),\n        unsigned_integer(settings={}): {\n          local type = 'object_to_unsigned_integer',\n          local default = $.transform.object.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n    },\n    send: {\n      aws: {\n        dynamodb: {\n          put(settings={}): {\n            local type = 'send_aws_dynamodb_put',\n            local default = {\n              id: helpers.id(type, settings),\n              batch: $.config.batch,\n              aws: $.config.aws,\n              auxiliary_transforms: null,\n            },\n\n            local s = std.mergePatch(settings, {\n              auxiliary_transforms: if std.objectHas(settings, 'auxiliary_transforms') then settings.auxiliary_transforms else if std.objectHas(settings, 'aux_tforms') then settings.aux_tforms else null,\n              aux_tforms: null,\n            }),\n\n            type: type,\n            settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n          },\n        },\n        firehose(settings={}): $.transform.send.aws.data_firehose(settings=settings),\n        data_firehose(settings={}): {\n          local type = 'send_aws_data_firehose',\n          local default = {\n            id: helpers.id(type, settings),\n            batch: $.config.batch,\n            aws: $.config.aws,\n            auxiliary_transforms: null,\n          },\n\n          local s = std.mergePatch(settings, {\n            auxiliary_transforms: if std.objectHas(settings, 'auxiliary_transforms') then settings.auxiliary_transforms else if std.objectHas(settings, 'aux_tforms') then settings.aux_tforms else null,\n            aux_tforms: null,\n          }),\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n        },\n        eventbridge(settings={}): {\n          local type = 'send_aws_eventbridge',\n          local default = {\n            id: helpers.id(type, settings),\n            batch: $.config.batch,\n            aws: $.config.aws,\n            auxiliary_transforms: null,\n            description: null,\n          },\n          local s = std.mergePatch(settings, {\n            auxiliary_transforms: if std.objectHas(settings, 'auxiliary_transforms') then settings.auxiliary_transforms else if std.objectHas(settings, 'aux_tforms') then settings.aux_tforms else null,\n            aux_tforms: null,\n          }),\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n        },\n        kinesis_data_stream(settings={}): {\n          local type = 'send_aws_kinesis_data_stream',\n          local default = {\n            id: helpers.id(type, settings),\n            batch: $.config.batch,\n            aws: $.config.aws,\n            auxiliary_transforms: null,\n            use_batch_key_as_partition_key: false,\n            enable_record_aggregation: false,\n          },\n\n          local s = std.mergePatch(settings, {\n            auxiliary_transforms: if std.objectHas(settings, 'auxiliary_transforms') then settings.auxiliary_transforms else if std.objectHas(settings, 'aux_tforms') then settings.aux_tforms else null,\n            aux_tforms: null,\n          }),\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n        },\n        lambda(settings={}): {\n          local type = 'send_aws_lambda',\n          local default = {\n            id: helpers.id(type, settings),\n            batch: $.config.batch,\n            aws: $.config.aws,\n            auxiliary_transforms: null,\n          },\n\n          type: type,\n          settings: std.mergePatch(default, helpers.abbv(settings)),\n        },\n        s3(settings={}): {\n          local type = 'send_aws_s3',\n          local default = {\n            id: helpers.id(type, settings),\n            batch: $.config.batch,\n            aws: $.config.aws,\n            file_path: $.file_path,\n            auxiliary_transforms: null,\n            storage_class: 'STANDARD',\n          },\n\n          local s = std.mergePatch(settings, {\n            auxiliary_transforms: if std.objectHas(settings, 'auxiliary_transforms') then settings.auxiliary_transforms else if std.objectHas(settings, 'aux_tforms') then settings.aux_tforms else null,\n            aux_tforms: null,\n          }),\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n        },\n        sns(settings={}): {\n          local type = 'send_aws_sns',\n          local default = {\n            id: helpers.id(type, settings),\n            batch: $.config.batch,\n            aws: $.config.aws,\n            auxiliary_transforms: null,\n          },\n\n          local s = std.mergePatch(settings, {\n            auxiliary_transforms: if std.objectHas(settings, 'auxiliary_transforms') then settings.auxiliary_transforms else if std.objectHas(settings, 'aux_tforms') then settings.aux_tforms else null,\n            aux_tforms: null,\n          }),\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n        },\n        sqs(settings={}): {\n          local type = 'send_aws_sqs',\n          local default = {\n            id: helpers.id(type, settings),\n            batch: $.config.batch,\n            aws: $.config.aws,\n            auxiliary_transforms: null,\n          },\n\n          local s = std.mergePatch(settings, {\n            auxiliary_transforms: if std.objectHas(settings, 'auxiliary_transforms') then settings.auxiliary_transforms else if std.objectHas(settings, 'aux_tforms') then settings.aux_tforms else null,\n            aux_tforms: null,\n          }),\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n        },\n      },\n      gcp: {\n        storage(settings={}): {\n          local type = 'send_gcp_storage',\n          local default = {\n            id: helpers.id(type, settings),\n            batch: $.config.batch,\n            gcp: $.config.gcp,\n            auxiliary_transforms: null,\n          },\n\n          local s = std.mergePatch(settings, {\n            auxiliary_transforms: if std.objectHas(settings, 'auxiliary_transforms') then settings.auxiliary_transforms else if std.objectHas(settings, 'aux_tforms') then settings.aux_tforms else null,\n            aux_tforms: null,\n          }),\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n        },\n      },\n      file(settings={}): {\n        local type = 'send_file',\n        local default = {\n          id: helpers.id(type, settings),\n          batch: $.config.batch,\n          auxiliary_transforms: null,\n          file_path: $.file_path,\n        },\n\n        local s = std.mergePatch(settings, {\n          auxiliary_transforms: if std.objectHas(settings, 'auxiliary_transforms') then settings.auxiliary_transforms else if std.objectHas(settings, 'aux_tforms') then settings.aux_tforms else null,\n          aux_tforms: null,\n        }),\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n      },\n      http: {\n        post(settings={}): {\n          local type = 'send_http_post',\n          local default = {\n            id: helpers.id(type, settings),\n            batch: $.config.batch,\n            auxiliary_transforms: null,\n            url: null,\n            headers: null,\n          },\n\n          local s = std.mergePatch(settings, {\n            auxiliary_transforms: if std.objectHas(settings, 'auxiliary_transforms') then settings.auxiliary_transforms else if std.objectHas(settings, 'aux_tforms') then settings.aux_tforms else null,\n            aux_tforms: null,\n            headers: if std.objectHas(settings, 'headers') then settings.headers else if std.objectHas(settings, 'hdr') then settings.hdr else null,\n            hdr: null,\n          }),\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n        },\n      },\n      stdout(settings={}): {\n        local type = 'send_stdout',\n        local default = {\n          id: helpers.id(type, settings),\n          batch: $.config.batch,\n          auxiliary_transforms: null,\n        },\n\n        local s = std.mergePatch(settings, {\n          auxiliary_transforms: if std.objectHas(settings, 'auxiliary_transforms') then settings.auxiliary_transforms else if std.objectHas(settings, 'aux_tforms') then settings.aux_tforms else null,\n          aux_tforms: null,\n        }),\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n      },\n    },\n    str: $.transform.string,\n    string: {\n      append(settings={}): {\n        local type = 'string_append',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n          suffix: null,\n          suffix_key: null,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      capture(settings={}): {\n        local type = 'string_capture',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n          pattern: null,\n          count: 0,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      repl: $.transform.string.replace,\n      replace(settings={}): {\n        local type = 'string_replace',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n          pattern: null,\n          replacement: null,\n        },\n\n        local s = std.mergePatch(settings, {\n          pattern: settings.pattern,\n          replacement: if std.objectHas(settings, 'replacement') then settings.replacement else if std.objectHas(settings, 'repl') then settings.repl else null,\n          repl: null,\n        }),\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n      },\n      split(settings={}): {\n        local type = 'string_split',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n          separator: null,\n        },\n\n        local s = std.mergePatch(settings, {\n          separator: if std.objectHas(settings, 'separator') then settings.separator else if std.objectHas(settings, 'sep') then settings.sep else null,\n          sep: null,\n        }),\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(s))),\n      },\n      to: {\n        default: {\n          object: $.config.object,\n        },\n        lower(settings={}): {\n          local type = 'string_to_lower',\n          local default = $.transform.string.to.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        upper(settings={}): {\n          local type = 'string_to_upper',\n          local default = $.transform.string.to.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        snake(settings={}): {\n          local type = 'string_to_snake',\n          local default = $.transform.string.to.default { id: helpers.id(type, settings) },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n      uuid(settings={}): {\n        local type = 'string_uuid',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n    },\n    test: {\n      message(settings={}): {\n        local type = 'test_message',\n        local default = {\n          id: helpers.id(type, settings),\n          value: null,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n    },\n    time: {\n      from: {\n        str(settings={}): $.transform.time.from.string(settings=settings),\n        string(settings={}): {\n          local type = 'time_from_string',\n          local default = {\n            id: helpers.id(type, settings),\n            object: $.config.object,\n            format: null,\n            location: 'UTC',\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        unix(settings={}): {\n          local type = 'time_from_unix',\n          local default = {\n            id: helpers.id(type, settings),\n            object: $.config.object,\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        unix_milli(settings={}): {\n          local type = 'time_from_unix_milli',\n          local default = {\n            id: helpers.id(type, settings),\n            object: $.config.object,\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n      now(settings={}): {\n        local type = 'time_now',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      to: {\n        str(settings={}): $.transform.time.to.string(settings=settings),\n        string(settings={}): {\n          local type = 'time_to_string',\n          local default = {\n            id: helpers.id(type, settings),\n            object: $.config.object,\n            format: null,\n            location: 'UTC',\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n      unix(settings={}): {\n        local type = 'time_to_unix',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      unix_milli(settings={}): {\n        local type = 'time_to_unix_milli',\n        local default = {\n          id: helpers.id(type, settings),\n          object: $.config.object,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n    },\n    util: $.transform.utility,\n    utility: {\n      control(settings={}): {\n        local type = 'utility_control',\n        local default = {\n          id: helpers.id(type, settings),\n          batch: $.config.batch,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      delay(settings={}): {\n        local type = 'utility_delay',\n        local default = {\n          id: helpers.id(type, settings),\n          duration: null,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      drop(settings={}): {\n        local type = 'utility_drop',\n        local default = {\n          id: helpers.id(type, settings),\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      err(settings={}): {\n        local type = 'utility_err',\n        local default = {\n          id: helpers.id(type, settings),\n          message: null,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n      metric: {\n        bytes(settings={}): {\n          local type = 'utility_metric_bytes',\n          local default = {\n            id: helpers.id(type, settings),\n            metric: $.config.metric,\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        count(settings={}): {\n          local type = 'utility_metric_count',\n          local default = {\n            id: helpers.id(type, settings),\n            metric: $.config.metric,\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n        freshness(settings={}): {\n          local type = 'utility_metric_freshness',\n          local default = {\n            id: helpers.id(type, settings),\n            threshold: null,\n            metric: $.config.metric,\n            object: $.config.object,\n          },\n\n          type: type,\n          settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n        },\n      },\n      secret(settings={}): {\n        local type = 'utility_secret',\n        local default = {\n          id: helpers.id(type, settings),\n          secret: null,\n        },\n\n        type: type,\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n    },\n  },\n  // Mirrors interfaces from the internal/kv_store package.\n  kv_store: {\n    aws: {\n      dynamodb(settings={}): {\n        local default = {\n          aws: $.config.aws,\n          attributes: { partition_key: null, sort_key: null, value: null, ttl: null },\n          consistent_read: false,\n        },\n\n        type: 'aws_dynamodb',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n    },\n    csv_file(settings={}): {\n      local default = { file: null, column: null, delimiter: ',', header: null },\n\n      type: 'csv_file',\n      settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n    },\n    json_file(settings=$.defaults.kv_store.json_file.settings): {\n      local default = { file: null, is_lines: false },\n\n      type: 'json_file',\n      settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n    },\n    memory(settings={}): {\n      local default = { capacity: 1024 },\n\n      type: 'memory',\n      settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n    },\n    mmdb(settings={}): {\n      local default = { file: null },\n\n      type: 'mmdb',\n      settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n    },\n    text_file(settings={}): {\n      local default = { file: null },\n\n      type: 'text_file',\n      settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n    },\n  },\n  // Mirrors interfaces from the internal/secrets package.\n  secrets: {\n    default: { id: null, ttl: null },\n    aws: {\n      secrets_manager(settings={}): {\n        local default = {\n          aws: $.config.aws,\n          id: null,\n          ttl_offset: null,\n        },\n\n        type: 'aws_secrets_manager',\n        settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n      },\n    },\n    environment_variable(settings={}): {\n      local default = { id: null, name: null, ttl_offset: null },\n\n      type: 'environment_variable',\n      settings: std.prune(std.mergePatch(default, helpers.abbv(settings))),\n    },\n  },\n  // Mirrors structs from the internal/config package.\n  config: {\n    aws: { arn: null, assume_role_arn: null },\n    gcp: { resource: null },\n    batch: { count: 1000, size: 1000 * 1000, duration: '1m' },\n    metric: { name: null, attributes: null, destination: null },\n    object: { source_key: null, target_key: null, batch_key: null },\n    request: { timeout: '1s' },\n    retry: { count: 3, delay: '1s' },\n  },\n  // Mirrors config from the internal/file package.\n  file_path: { prefix: null, time_format: '2006/01/02', uuid: true, suffix: null },\n}\n"
  },
  {
    "path": "substation_test.go",
    "content": "package substation_test\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"testing\"\n\n\t\"github.com/brexhq/substation/v2\"\n\t\"github.com/brexhq/substation/v2/config\"\n\t\"github.com/brexhq/substation/v2/message\"\n\t\"github.com/brexhq/substation/v2/transform\"\n)\n\nfunc ExampleSubstation() {\n\t// Substation applications rely on a context for cancellation and timeouts.\n\tctx := context.Background()\n\n\t// Define a configuration. For native Substation applications, this is managed by Jsonnet.\n\t//\n\t// This example copies an object's value and prints the data to stdout.\n\tconf := []byte(`\n\t\t{\n\t\t\t\"transforms\":[\n\t\t\t\t{\"type\":\"object_copy\",\"settings\":{\"object\":{\"source_key\":\"a\",\"target_key\":\"c\"}}},\n\t\t\t\t{\"type\":\"send_stdout\"}\n\t\t\t]\n\t\t}\n\t`)\n\n\tcfg := substation.Config{}\n\tif err := json.Unmarshal(conf, &cfg); err != nil {\n\t\t// Handle error.\n\t\tpanic(err)\n\t}\n\n\t// Create a new Substation instance.\n\tsub, err := substation.New(ctx, cfg)\n\tif err != nil {\n\t\t// Handle error.\n\t\tpanic(err)\n\t}\n\n\t// Print the Substation configuration.\n\tfmt.Println(sub)\n\n\t// Substation instances process data defined as a Message. Messages can be processed\n\t// individually or in groups. This example processes multiple messages as a group.\n\tmsg := []*message.Message{\n\t\t// The first message is a data message. Only data messages are transformed.\n\t\tmessage.New().SetData([]byte(`{\"a\":\"b\"}`)),\n\t\t// The second message is a ctrl message. ctrl messages flush the pipeline.\n\t\tmessage.New().AsControl(),\n\t}\n\n\t// Transform the group of messages. In this example, results are not used.\n\tif _, err := sub.Transform(ctx, msg...); err != nil {\n\t\t// Handle error.\n\t\tpanic(err)\n\t}\n\n\t// Output:\n\t// {\"transforms\":[{\"type\":\"object_copy\",\"settings\":{\"object\":{\"source_key\":\"a\",\"target_key\":\"c\"}}},{\"type\":\"send_stdout\",\"settings\":null}]}\n\t// {\"a\":\"b\",\"c\":\"b\"}\n}\n\n// Custom applications should embed the Substation configuration and\n// add additional configuration options.\ntype customConfig struct {\n\tsubstation.Config\n\n\tAuth struct {\n\t\tUsername string `json:\"username\"`\n\t\t// Please don't store passwords in configuration files, this is only an example!\n\t\tPassword string `json:\"password\"`\n\t} `json:\"auth\"`\n}\n\n// String returns an example string representation of the custom configuration.\nfunc (c customConfig) String() string {\n\treturn fmt.Sprintf(\"%s:%s\", c.Auth.Username, c.Auth.Password)\n}\n\nfunc Example_substationCustomConfig() {\n\t// Substation applications rely on a context for cancellation and timeouts.\n\tctx := context.Background()\n\n\t// Define and load the custom configuration. This config includes a username\n\t// and password for authentication.\n\tconf := []byte(`\n\t\t{\n\t\t\t\"transforms\":[\n\t\t\t\t{\"type\":\"object_copy\",\"settings\":{\"object\":{\"source_key\":\"a\",\"target_key\":\"c\"}}},\n\t\t\t\t{\"type\":\"send_stdout\"}\n\t\t\t],\n\t\t\t\"auth\":{\n\t\t\t\t\"username\":\"foo\",\n\t\t\t\t\"password\":\"bar\"\n\t\t\t}\n\t\t}\n\t`)\n\n\tcfg := customConfig{}\n\tif err := json.Unmarshal(conf, &cfg); err != nil {\n\t\t// Handle error.\n\t\tpanic(err)\n\t}\n\n\t// Create a new Substation instance from the embedded configuration.\n\tsub, err := substation.New(ctx, cfg.Config)\n\tif err != nil {\n\t\t// Handle error.\n\t\tpanic(err)\n\t}\n\n\t// Print the Substation configuration.\n\tfmt.Println(sub)\n\n\t// Print the custom configuration.\n\tfmt.Println(cfg)\n\n\t// Output:\n\t// {\"transforms\":[{\"type\":\"object_copy\",\"settings\":{\"object\":{\"source_key\":\"a\",\"target_key\":\"c\"}}},{\"type\":\"send_stdout\",\"settings\":null}]}\n\t// foo:bar\n}\n\nfunc Example_substationCustomTransforms() {\n\t// Substation applications rely on a context for cancellation and timeouts.\n\tctx := context.Background()\n\n\t// Define and load the configuration. This config includes a transform that\n\t// is not part of the standard Substation package.\n\tconf := []byte(`\n\t\t{\n\t\t\t\"transforms\":[\n\t\t\t\t{\"type\":\"utility_duplicate\"},\n\t\t\t\t{\"type\":\"send_stdout\"}\n\t\t\t]\n\t\t}\n\t`)\n\n\tcfg := substation.Config{}\n\tif err := json.Unmarshal(conf, &cfg); err != nil {\n\t\t// Handle error.\n\t\tpanic(err)\n\t}\n\n\t// Create a new Substation instance with a custom transform factory for loading\n\t// the custom transform.\n\tsub, err := substation.New(ctx, cfg, substation.WithTransformFactory(customFactory))\n\tif err != nil {\n\t\t// Handle error.\n\t\tpanic(err)\n\t}\n\n\tmsg := []*message.Message{\n\t\tmessage.New().SetData([]byte(`{\"a\":\"b\"}`)),\n\t\tmessage.New().AsControl(),\n\t}\n\n\t// Transform the group of messages. In this example, results are not used.\n\tif _, err := sub.Transform(ctx, msg...); err != nil {\n\t\t// Handle error.\n\t\tpanic(err)\n\t}\n\n\t// Output:\n\t// {\"a\":\"b\"}\n\t// {\"a\":\"b\"}\n}\n\n// customFactory is used in the custom transform example to load the custom transform.\nfunc customFactory(ctx context.Context, cfg config.Config) (transform.Transformer, error) {\n\tswitch cfg.Type {\n\t// Usually a custom transform requires configuration, but this\n\t// is a toy example. Customizable transforms should have a new\n\t// function that returns a new instance of the configured transform.\n\tcase \"utility_duplicate\":\n\t\treturn &utilityDuplicate{Count: 1}, nil\n\t}\n\n\treturn transform.New(ctx, cfg)\n}\n\n// Duplicates a message.\ntype utilityDuplicate struct {\n\t// Count is the number of times to duplicate the message.\n\tCount int `json:\"count\"`\n}\n\nfunc (t *utilityDuplicate) Transform(ctx context.Context, msg *message.Message) ([]*message.Message, error) {\n\t// Always return control messages.\n\tif msg.IsControl() {\n\t\treturn []*message.Message{msg}, nil\n\t}\n\n\toutput := []*message.Message{msg}\n\tfor i := 0; i < t.Count; i++ {\n\t\toutput = append(output, msg)\n\t}\n\n\treturn output, nil\n}\n\nfunc FuzzTestSubstation(f *testing.F) {\n\ttestcases := [][]byte{\n\t\t[]byte(`{\"transforms\":[{\"type\":\"utility_duplicate\"}]}`),\n\t\t[]byte(`{\"transforms\":[{\"type\":\"utility_duplicate\", \"count\":2}]}`),\n\t\t[]byte(`{\"transforms\":[{\"type\":\"unknown_type\"}]}`),\n\t\t[]byte(`{\"transforms\":[{\"type\":\"utility_duplicate\", \"count\":\"invalid\"}]}`),\n\t\t[]byte(``),\n\t}\n\n\tfor _, tc := range testcases {\n\t\tf.Add(tc)\n\t}\n\n\tf.Fuzz(func(t *testing.T, data []byte) {\n\t\tctx := context.TODO()\n\t\tvar cfg substation.Config\n\t\terr := json.Unmarshal(data, &cfg)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\n\t\tsub, err := substation.New(ctx, cfg)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\n\t\tmsg := message.New().SetData(data)\n\t\t_, err = sub.Transform(ctx, msg)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t})\n}\n"
  },
  {
    "path": "substation_test.jsonnet",
    "content": "local sub = std.extVar('sub');\n\nlocal src = 'source';\n\n{\n  condition: {\n    all: sub.condition.all([$.condition.string.contains, $.condition.string.match]),\n    any: sub.condition.any([$.condition.string.contains, $.condition.string.match]),\n    none: sub.condition.none([$.condition.string.contains, $.condition.string.match]),\n    meta: {\n      all: sub.condition.meta.all({ inspectors: [$.condition.string.contains, $.condition.string.match] }),\n      any: sub.condition.meta.any({ inspectors: [$.condition.string.contains, $.condition.string.match] }),\n      none: sub.condition.meta.none({ inspectors: [$.condition.string.contains, $.condition.string.match] }),\n    },\n    string: {\n      contains: sub.condition.string.contains({ obj: { src: src }, value: 'z' }),\n      match: sub.condition.string.match({ obj: { src: src }, pattern: 'z' }),\n    },\n  },\n}\n"
  }
]