[
  {
    "path": ".github/workflows/linters.yaml",
    "content": "name: Linters\n\non:\n  pull_request:\n    branches: [\"main\"]\n\njobs:\n  cargo-fmt-check:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v3\n      - uses: dtolnay/rust-toolchain@stable\n      - name: Check formatting\n        run: cargo fmt --check\n\n  cargo-clippy-check:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v3\n      - uses: dtolnay/rust-toolchain@stable\n      - name: Check Clippy\n        run: cargo clippy -- -W clippy::perf -W clippy::correctness -W clippy::suspicious --deny=warnings\n"
  },
  {
    "path": ".github/workflows/release.yaml",
    "content": "name: Release\n\npermissions:\n  contents: write\n\non:\n  workflow_dispatch:\n  push:\n    tags:\n      - v*\n\njobs:\n  create-release:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v4\n      - uses: taiki-e/create-gh-release-action@v1\n        with:\n          changelog: CHANGELOG.md\n          token: ${{ secrets.GITHUB_TOKEN }}\n\n  upload-assets:\n    strategy:\n      matrix:\n        include:\n          - target: x86_64-apple-darwin\n            os: macos-latest\n          - target: aarch64-apple-darwin\n            os: macos-latest\n    runs-on: ${{ matrix.os }}\n    steps:\n      - uses: actions/checkout@v4\n      - name: Install cross-compilation tools\n        uses: taiki-e/setup-cross-toolchain-action@v1\n        with:\n          target: ${{ matrix.target }}\n        if: startsWith(matrix.os, 'ubuntu')    \n      - name: Upload Rust binary\n        uses: taiki-e/upload-rust-binary-action@v1\n        with:\n          bin: container-compose\n          target: ${{ matrix.target }}\n          archive: $bin-$tag-$target\n          token: ${{ secrets.GITHUB_TOKEN }}\n"
  },
  {
    "path": ".gitignore",
    "content": "/target\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "# Changelog\n\nAll notable changes to this project will be documented in this file.\n\n## [0.0.1-alpha.5] - 2025-06-19\n\n### 🚀 Features\n\n- Support `cpu_count` and `mem_limit` (#20)\n\n### 🐛 Bug Fixes\n\n- Allow empty version argument (#21)\n\n### 💼 Other\n\n- *(release)* Update homebrew file to v0.0.1.alpha-4 (#17)\n- Update cargo.toml version (#18)\n\n### ⚙️ Miscellaneous Tasks\n\n- Add instructions to run locally (#19)\n\n## [0.0.1-alpha.4] - 2025-06-18\n\n### 🚀 Features\n\n- *(homebrew)* Bump brewfile to v0.0.1-alpha.3 (#7)\n\n### 🐛 Bug Fixes\n\n- *(homebrew)* Change sha256 to v0.0.1-alpha.3 (#10)\n- Allow empty ports, env vars, and volumes (#12)\n\n### 💼 Other\n\n- *(release)* Update CHANGELOG for v0.0.1-alpha.4 (#16)\n\n## [0.0.1-alpha.3] - 2025-06-17\n\n### 🐛 Bug Fixes\n\n- *(utils)* Read .yml file extension for docker-compose config files\n\n### 💼 Other\n\n- *(release)* Update CHANGELOG for v0.0.1-alpha.3\n\n### 📚 Documentation\n\n- Update README to add container-compose installation via crates.io\n\n### ⚙️ Miscellaneous Tasks\n\n- Add Formula with brewfile to install CLI with brew\n- Add initial README file\n- Update README\n- Update Cargo.toml\n- Add excludes field on Cargo.toml\n\n## [0.0.1-alpha.2] - 2025-06-15\n\n### 💼 Other\n\n- Add manual dispatch option\n- Add CHANGELOG.md\n- Trigger v0.0.1-alpha.2 release\n\n### ⚙️ Miscellaneous Tasks\n\n- Fix triggers\n\n## [0.0.1-alpha.1] - 2025-06-15\n\n### 🚀 Features\n\n- Add poc implementation for container run command\n- Add initial cli setup with up command\n- Improve code readability with structs\n- Add support for mount binding\n- Add support for command argument\n- Add support for down command\n\n### 💼 Other\n\n- Add initial github action workflow to build cli\n\n### ⚙️ Miscellaneous Tasks\n\n- Add nix flake file\n- Add MIT LICENSE file\n- Remove dbg!\n- Remove comment\n\n<!-- generated by git-cliff -->\n"
  },
  {
    "path": "Cargo.toml",
    "content": "[package]\nname = \"container-compose\"\nversion = \"0.0.1-alpha.5\"\nedition = \"2024\"\nlicense-file = \"LICENSE\"\ndescription = \" A docker-compose like tool for Apple Containers\"\nhomepage = \"https://github.com/noghartt/container-compose\"\nrepository = \"https://github.com/noghartt/container-compose\"\nreadme = \"README.md\"\nkeywords = [\"devops\"]\ncategories = [\"command-line-utilities\"]\nexclude = [\"scripts/*\", \"examples/\"]\n\n[dependencies]\nclap = { version = \"4.5.40\", features = [\"derive\"] }\nclap_mangen = \"0.2.27\"\nserde = { version = \"1.0.219\", features = [\"derive\"] }\nserde_json = \"1.0.140\"\nserde_yaml = \"0.9.34\"\n"
  },
  {
    "path": "Formula/container-compose.rb",
    "content": "class ContainerCompose < Formula\n  desc \"A docker-compose like tool for Apple Containers \"\n  homepage \"https://github.com/noghartt/container-compose\"\n\n  version \"0.0.1-alpha.5\"\n\n  depends_on \"socat\"\n\n  on_macos do\n    on_arm do\n      url \"https://github.com/noghartt/container-compose/releases/download/v#{version}/container-compose-v#{version}-aarch64-apple-darwin.tar.gz\"\n      sha256 \"8b6f9d35b8a03eb14c059b347aa2b9e705e6c75c10496ba462a8cf7d58545973\"\n    end\n\n    on_intel do\n      url \"https://github.com/noghartt/container-compose/releases/download/v#{version}/container-compose-v#{version}-x86_64-apple-darwin.tar.gz\"\n      sha256 \"a409ca44b8cec7ac540c0db453f6f8e3d22f29c6cbb4f8cb21ecfe946011610f\"\n    end\n  end\n\n  def install\n    bin.install \"container-compose\"\n  end\nend"
  },
  {
    "path": "LICENSE",
    "content": "MIT License\n\nCopyright (c) 2025 Guilherme \"noghartt\" Ananias\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "README.md",
    "content": "> [!NOTE]\n> \n> ⚠️ container-compose is being put in archive-mode due to me not having time to maintain this project.\n> If you want to use a maintained tool, I recommend: [container-compose](https://github.com/mcrich23/container-compose).\n\n# container-compose\n\nA CLI to let you use the Apple [container](https://github.com/apple/container) like a `docker-compose`\nconfig file.\n\n> [!WARNING]\n> This tool is on \"alpha\" mode, which means that you can find a lot of bugs or missing features.\n> If you find anything, feel free to open an issue. Thank you! :)\n\n## How to install\n\n### Installing via Homebrew\n\n```sh\nbrew tap noghartt/container-compose https://github.com/noghartt/container-compose.git\nbrew install noghartt/container-compose/container-compose\n```\n\n### Installing via cargo\n\n```sh\ncargo install container-compose\n```\n\n### Downloading binary\n\nWe have made a `.tar.gz` containing the binary available on the release page. \nIf you prefer to have your binary from there.\n\n### Running container-compose\n\nTo run `container-compose` from the source code, first ensure you have [Rust](https://www.rust-lang.org/tools/install) installed.\n\n1. Clone the repository:\n   ```sh\n   git clone https://github.com/noghartt/container-compose.git\n   cd container-compose\n   ```\n\n2. Build the project:\n   ```sh\n   cargo build --release\n   ```\n\n3. Run `container-compose` using Cargo:\n   ```sh\n   cargo run -- <command> [options]\n   ```\n   For example, to start services:\n   ```sh\n   cargo run -- up\n   ```\n\nYou can also run the compiled binary directly:\n```sh\n./target/release/container-compose <command> [options]\n```\n\nReplace `<command>` and `[options]` with the desired subcommand and arguments.\n"
  },
  {
    "path": "cliff.toml",
    "content": "# git-cliff ~ configuration file\n# https://git-cliff.org/docs/configuration\n\n\n[changelog]\n# A Tera template to be rendered as the changelog's footer.\n# See https://keats.github.io/tera/docs/#introduction\nheader = \"\"\"\n# Changelog\\n\nAll notable changes to this project will be documented in this file.\\n\n\"\"\"\n# A Tera template to be rendered for each release in the changelog.\n# See https://keats.github.io/tera/docs/#introduction\nbody = \"\"\"\n{% if version %}\\\n    ## [{{ version | trim_start_matches(pat=\"v\") }}] - {{ timestamp | date(format=\"%Y-%m-%d\") }}\n{% else %}\\\n    ## [unreleased]\n{% endif %}\\\n{% for group, commits in commits | group_by(attribute=\"group\") %}\n    ### {{ group | striptags | trim | upper_first }}\n    {% for commit in commits %}\n        - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\\\n            {% if commit.breaking %}[**breaking**] {% endif %}\\\n            {{ commit.message | upper_first }}\\\n    {% endfor %}\n{% endfor %}\\n\n\"\"\"\n# A Tera template to be rendered as the changelog's footer.\n# See https://keats.github.io/tera/docs/#introduction\nfooter = \"\"\"\n<!-- generated by git-cliff -->\n\"\"\"\n# Remove leading and trailing whitespaces from the changelog's body.\ntrim = true\n# Render body even when there are no releases to process.\nrender_always = true\n# An array of regex based postprocessors to modify the changelog.\npostprocessors = [\n    # Replace the placeholder <REPO> with a URL.\n    #{ pattern = '<REPO>', replace = \"https://github.com/orhun/git-cliff\" },\n]\n# render body even when there are no releases to process\n# render_always = true\n# output file path\n# output = \"test.md\"\n\n[git]\n# Parse commits according to the conventional commits specification.\n# See https://www.conventionalcommits.org\nconventional_commits = true\n# Exclude commits that do not match the conventional commits specification.\nfilter_unconventional = true\n# Require all commits to be conventional.\n# Takes precedence over filter_unconventional.\nrequire_conventional = false\n# Split commits on newlines, treating each line as an individual commit.\nsplit_commits = false\n# An array of regex based parsers to modify commit messages prior to further processing.\ncommit_preprocessors = [\n    # Replace issue numbers with link templates to be updated in `changelog.postprocessors`.\n    #{ pattern = '\\((\\w+\\s)?#([0-9]+)\\)', replace = \"([#${2}](<REPO>/issues/${2}))\"},\n    # Check spelling of the commit message using https://github.com/crate-ci/typos.\n    # If the spelling is incorrect, it will be fixed automatically.\n    #{ pattern = '.*', replace_command = 'typos --write-changes -' },\n]\n# Prevent commits that are breaking from being excluded by commit parsers.\nprotect_breaking_commits = false\n# An array of regex based parsers for extracting data from the commit message.\n# Assigns commits to groups.\n# Optionally sets the commit's scope and can decide to exclude commits from further processing.\ncommit_parsers = [\n    { message = \"^feat\", group = \"<!-- 0 -->🚀 Features\" },\n    { message = \"^fix\", group = \"<!-- 1 -->🐛 Bug Fixes\" },\n    { message = \"^doc\", group = \"<!-- 3 -->📚 Documentation\" },\n    { message = \"^perf\", group = \"<!-- 4 -->⚡ Performance\" },\n    { message = \"^refactor\", group = \"<!-- 2 -->🚜 Refactor\" },\n    { message = \"^style\", group = \"<!-- 5 -->🎨 Styling\" },\n    { message = \"^test\", group = \"<!-- 6 -->🧪 Testing\" },\n    { message = \"^chore\\\\(release\\\\): prepare for\", skip = true },\n    { message = \"^chore\\\\(deps.*\\\\)\", skip = true },\n    { message = \"^chore\\\\(pr\\\\)\", skip = true },\n    { message = \"^chore\\\\(pull\\\\)\", skip = true },\n    { message = \"^chore|^ci\", group = \"<!-- 7 -->⚙️ Miscellaneous Tasks\" },\n    { body = \".*security\", group = \"<!-- 8 -->🛡️ Security\" },\n    { message = \"^revert\", group = \"<!-- 9 -->◀️ Revert\" },\n    { message = \".*\", group = \"<!-- 10 -->💼 Other\" },\n]\n# Exclude commits that are not matched by any commit parser.\nfilter_commits = false\n# An array of link parsers for extracting external references, and turning them into URLs, using regex.\nlink_parsers = []\n# Include only the tags that belong to the current branch.\nuse_branch_tags = false\n# Order releases topologically instead of chronologically.\ntopo_order = false\n# Order releases topologically instead of chronologically.\ntopo_order_commits = true\n# Order of commits in each group/release within the changelog.\n# Allowed values: newest, oldest\nsort_commits = \"oldest\"\n# Process submodules commits\nrecurse_submodules = false\n"
  },
  {
    "path": "examples/docker-compose.yaml",
    "content": "version: '3.8'\n\nservices:\n  postgres:\n    image: postgres:15\n    ports:\n      - 5432:5432\n    command: postgres\n    environment:\n      - POSTGRES_PASSWORD=postgres\n    cpu_count: 1\n    mem_limit: 2gb\n"
  },
  {
    "path": "flake.nix",
    "content": "{\n  inputs = {\n    nixpkgs.url = \"github:nixos/nixpkgs/release-23.11\";\n    rust-overlay.url = \"github:oxalica/rust-overlay\";\n    rust-overlay.inputs.nixpkgs.follows = \"nixpkgs\";\n    flake-utils.url = \"github:numtide/flake-utils\";\n  };\n\n  outputs = {\n    self,\n    nixpkgs,\n    rust-overlay,\n    flake-utils,\n  }:\n    flake-utils.lib.eachDefaultSystem (system: let\n      overlays = [(import rust-overlay)];\n      pkgs = import nixpkgs {\n        inherit system overlays;\n      };\n    in {\n      devShells.default = pkgs.mkShell {\n        name = \"container-compose\";\n\n        buildInputs = with pkgs; [\n          darwin.apple_sdk.frameworks.SystemConfiguration\n          darwin.apple_sdk.frameworks.CoreServices\n          darwin.apple_sdk.frameworks.CoreFoundation\n        ];\n\n        nativeBuildInputs = with pkgs; [\n          (rust-bin.stable.\"1.85.0\".default.override { extensions = [ \"rust-src\" \"rust-analyzer\" ]; } )\n        ];\n      };\n    });\n}\n"
  },
  {
    "path": "src/cli.rs",
    "content": "use clap::{Parser, Subcommand};\n\n#[derive(Parser)]\n#[command(author, version, about, long_about = None)]\npub struct Cli {\n    #[arg(short, long)]\n    pub file: Option<String>,\n\n    #[command(subcommand)]\n    pub command: Option<Command>,\n}\n\n#[derive(Subcommand)]\npub enum Command {\n    #[command(about = \"Create and run containers\")]\n    // Usage:  docker compose up [OPTIONS] [SERVICE...]\n    // Follow this structure for the Up command\n    Up {\n        #[arg(short, long)]\n        detach: bool,\n\n        #[arg()]\n        service: Vec<String>,\n    },\n\n    #[command(about = \"Stop and remove containers\")]\n    Down,\n}\n"
  },
  {
    "path": "src/container.rs",
    "content": "use serde::Deserialize;\nuse std::{collections::HashMap, process::Command};\n\n#[allow(dead_code, unused_variables)]\npub fn get_containers_list() -> Result<Vec<Container>, ()> {\n    let mut command = Command::new(\"container\");\n    command.arg(\"ls\").arg(\"--format\").arg(\"json\");\n\n    let Ok(output) = command.output() else {\n        eprintln!(\"Failed to get containers list\");\n        return Err(());\n    };\n\n    let Ok(containers) = serde_json::from_slice(&output.stdout) else {\n        eprintln!(\"Failed to parse containers list\");\n        return Err(());\n    };\n\n    Ok(containers)\n}\n\npub fn stop_container(container_ids: Vec<String>) -> Result<(), ()> {\n    let mut command = Command::new(\"container\");\n    command.arg(\"stop\");\n\n    for id in container_ids {\n        command.arg(id);\n    }\n\n    let Ok(_) = command.output() else {\n        eprintln!(\"Failed to stop containers\");\n        return Err(());\n    };\n\n    Ok(())\n}\n\npub fn remove_container(container_ids: Vec<String>) -> Result<(), ()> {\n    let mut command = Command::new(\"container\");\n    command.arg(\"rm\");\n\n    for id in container_ids {\n        command.arg(id);\n    }\n\n    let Ok(_) = command.output() else {\n        eprintln!(\"Failed to remove containers\");\n        return Err(());\n    };\n\n    Ok(())\n}\n\n#[allow(dead_code, unused_variables)]\n#[derive(Debug, Deserialize, Clone)]\npub struct Container {\n    pub networks: Vec<Network>,\n    pub status: String,\n    pub configuration: Configuration,\n}\n\n#[allow(dead_code, unused_variables)]\n#[derive(Debug, Deserialize, Clone)]\npub struct Network {\n    pub address: String,\n    pub gateway: String,\n    pub network: String,\n    pub hostname: String,\n}\n\n#[allow(dead_code, unused_variables)]\n#[derive(Debug, Deserialize, Clone)]\npub struct Configuration {\n    pub resources: Resources,\n    pub labels: HashMap<String, String>,\n    pub hostname: String,\n    pub sysctls: HashMap<String, String>,\n    pub networks: Vec<String>,\n    // pub initProcess: InitProcess,\n    pub id: String,\n    pub rosetta: bool,\n    // pub runtimeHandler: String,\n    pub platform: Platform,\n    pub mounts: Vec<Mount>,\n    pub image: Image,\n    pub dns: Dns,\n}\n\n#[allow(dead_code, unused_variables)]\n#[derive(Debug, Deserialize, Clone)]\npub struct Resources {\n    pub cpus: u32,\n    // pub memoryInBytes: u64,\n}\n\n#[allow(dead_code, unused_variables)]\n#[derive(Debug, Deserialize, Clone)]\npub struct InitProcess {\n    pub environment: Vec<String>,\n    pub arguments: Vec<String>,\n    pub executable: String,\n    // pub workingDirectory: String,\n    pub terminal: bool,\n    pub user: User,\n    // pub supplementalGroups: Vec<u32>,\n    pub rlimits: Vec<String>,\n}\n\n#[allow(dead_code, unused_variables)]\n#[derive(Debug, Deserialize, Clone)]\npub struct User {\n    pub id: Id,\n}\n\n#[allow(dead_code, unused_variables)]\n#[derive(Debug, Deserialize, Clone)]\npub struct Id {\n    pub uid: u32,\n    pub gid: u32,\n}\n\n#[allow(dead_code, unused_variables)]\n#[derive(Debug, Deserialize, Clone)]\npub struct Platform {\n    pub os: String,\n    pub architecture: String,\n}\n\n#[derive(Debug, Deserialize, Clone)]\npub struct Mount {\n    // Fill in fields as needed\n}\n\n#[allow(dead_code, unused_variables)]\n#[derive(Debug, Deserialize, Clone)]\npub struct Image {\n    pub reference: String,\n    pub descriptor: Descriptor,\n}\n\n#[allow(dead_code, unused_variables)]\n#[derive(Debug, Deserialize, Clone)]\npub struct Descriptor {\n    pub size: u64,\n    pub digest: String,\n    // pub annotations: HashMap<String, String>,\n    // pub media_type: String,\n}\n\n#[allow(dead_code, unused_variables)]\n#[derive(Debug, Deserialize, Clone)]\npub struct Dns {\n    pub nameservers: Vec<String>,\n    // pub search_domains: Vec<String>,\n    pub options: Vec<String>,\n}\n"
  },
  {
    "path": "src/deserializer.rs",
    "content": "use std::collections::HashMap;\n\nuse serde::Deserialize;\nuse serde::de::{self, Deserializer, MapAccess, SeqAccess, Visitor};\nuse std::fmt;\n\n#[derive(Debug, Deserialize)]\npub struct Service {\n    pub name: Option<String>,\n    pub image: String,\n    #[serde(default)]\n    pub ports: Vec<String>,\n    #[serde(default, deserialize_with = \"deserialize_environment_variables\")]\n    pub environment: HashMap<String, String>,\n    #[serde(default, deserialize_with = \"deserialize_array_key_value\")]\n    pub volumes: HashMap<String, String>,\n    #[serde(default, deserialize_with = \"deserialize_command\")]\n    pub command: Option<Vec<String>>,\n    #[serde(default)]\n    pub cpu_count: Option<u32>,\n    #[serde(default)]\n    pub mem_limit: Option<String>,\n}\n\n#[allow(dead_code, unused_variables)]\n#[derive(Debug, Deserialize)]\npub struct Compose {\n    #[serde(default)]\n    pub version: String,\n    pub services: HashMap<String, Service>,\n}\n\npub fn deserialize_yaml(yaml: &str) -> Result<Compose, serde_yaml::Error> {\n    serde_yaml::from_str(yaml)\n}\n\nfn deserialize_environment_variables<'a, D>(\n    deserializer: D,\n) -> Result<HashMap<String, String>, D::Error>\nwhere\n    D: Deserializer<'a>,\n{\n    struct EnvVisitor;\n\n    impl<'a> Visitor<'a> for EnvVisitor {\n        type Value = HashMap<String, String>;\n\n        fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n            formatter.write_str(\"a map or a list of key=value strings\")\n        }\n\n        fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>\n        where\n            M: MapAccess<'a>,\n        {\n            let mut map = HashMap::new();\n            while let Some((key, value)) = access.next_entry()? {\n                map.insert(key, value);\n            }\n            Ok(map)\n        }\n\n        fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>\n        where\n            A: SeqAccess<'a>,\n        {\n            let mut map = HashMap::new();\n            while let Some(entry) = seq.next_element::<String>()? {\n                let parts: Vec<&str> = entry.splitn(2, '=').collect();\n                if parts.len() == 2 {\n                    map.insert(parts[0].to_string(), parts[1].to_string());\n                } else {\n                    return Err(de::Error::custom(format!(\n                        \"Invalid environment variable: {}\",\n                        entry\n                    )));\n                }\n            }\n            Ok(map)\n        }\n    }\n\n    deserializer.deserialize_any(EnvVisitor)\n}\n\nfn deserialize_array_key_value<'a, D>(deserializer: D) -> Result<HashMap<String, String>, D::Error>\nwhere\n    D: Deserializer<'a>,\n{\n    struct ArrayKeyValueVisitor;\n\n    impl<'a> Visitor<'a> for ArrayKeyValueVisitor {\n        type Value = HashMap<String, String>;\n\n        fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n            formatter.write_str(\"a list of strings which follows this format: key:value\")\n        }\n\n        fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>\n        where\n            A: SeqAccess<'a>,\n        {\n            let mut map = HashMap::new();\n            while let Some(entry) = seq.next_element::<String>()? {\n                let parts: Vec<&str> = entry.splitn(2, ':').collect();\n                if parts.len() == 2 {\n                    map.insert(parts[0].to_string(), parts[1].to_string());\n                } else {\n                    return Err(de::Error::custom(format!(\"Invalid volume: {}\", entry)));\n                }\n            }\n            Ok(map)\n        }\n    }\n\n    deserializer.deserialize_seq(ArrayKeyValueVisitor)\n}\n\nfn deserialize_command<'a, D>(deserializer: D) -> Result<Option<Vec<String>>, D::Error>\nwhere\n    D: Deserializer<'a>,\n{\n    struct CommandVisitor;\n\n    impl<'a> Visitor<'a> for CommandVisitor {\n        type Value = Option<Vec<String>>;\n\n        fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n            formatter.write_str(\"a string or a list of strings\")\n        }\n\n        fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n        where\n            E: de::Error,\n        {\n            let parts: Vec<&str> = v.splitn(2, ' ').collect();\n            if parts.len() == 2 {\n                Ok(Some(vec![parts[0].to_string(), parts[1].to_string()]))\n            } else {\n                Ok(Some(vec![v.to_string()]))\n            }\n        }\n\n        fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>\n        where\n            A: SeqAccess<'a>,\n        {\n            let mut vec = Vec::new();\n            while let Some(entry) = seq.next_element::<String>()? {\n                vec.push(entry);\n            }\n            Ok(Some(vec))\n        }\n    }\n\n    deserializer.deserialize_any(CommandVisitor)\n}\n"
  },
  {
    "path": "src/main.rs",
    "content": "use clap::{CommandFactory, Parser};\n\nmod cli;\nmod container;\nmod deserializer;\nmod runner;\nmod utils;\n\nuse crate::cli::{Cli, Command};\n\nfn main() {\n    let cli = Cli::parse();\n\n    match cli.command {\n        // TODO: Add error handling + exit code\n        Some(Command::Up { .. }) => runner::run_services(cli.file),\n        Some(Command::Down) => runner::stop_and_remove_services(cli.file),\n        None => Cli::command().print_help().unwrap(),\n    }\n}\n"
  },
  {
    "path": "src/runner.rs",
    "content": "use std::{collections::HashMap, path::Path, process::Command};\n\nuse crate::{container, deserializer, utils::deserialize_compose_file};\n\npub fn run_services(path: Option<String>) {\n    let compose = deserialize_compose_file(path).unwrap();\n    for (name, service) in compose.services.iter() {\n        let container_name = service.name.clone().unwrap_or(name.clone());\n        let service_container = ServiceContainer::new(container_name, service);\n        match service_container.run() {\n            Ok(_) => println!(\"{}\", name),\n            Err(e) => println!(\"Error while running {}: {:?}\", name, e),\n        };\n    }\n}\n\npub fn stop_and_remove_services(path: Option<String>) {\n    let containers = container::get_containers_list().unwrap();\n    let container_ids = containers\n        .iter()\n        .map(|c| c.configuration.id.clone())\n        .collect::<Vec<String>>();\n\n    container::stop_container(container_ids.clone()).unwrap();\n    container::remove_container(container_ids.clone()).unwrap();\n\n    let compose = deserialize_compose_file(path).unwrap();\n\n    let ports = compose\n        .services\n        .iter()\n        .flat_map(|(_, service)| service.ports.clone())\n        .collect::<Vec<String>>();\n\n    for port in ports {\n        let port = port.split(\":\").collect::<Vec<&str>>();\n        let host_port = port[0].parse::<u16>().unwrap();\n        let output = Command::new(\"lsof\")\n            .arg(\"-ti\")\n            .arg(format!(\":{host_port}\"))\n            .output()\n            .expect(\"Failed to execute process\");\n\n        if output.status.success() {\n            let pid = String::from_utf8(output.stdout).unwrap();\n            Command::new(\"kill\")\n                .arg(pid.trim())\n                .output()\n                .expect(\"Failed to execute process\");\n        }\n    }\n}\n\nstruct ServiceContainer {\n    name: String,\n    ports: Vec<String>,\n    environment: HashMap<String, String>,\n    image: String,\n    volumes: HashMap<String, String>,\n    command: Option<Vec<String>>,\n    cpu_count: Option<u32>,\n    mem_limit: Option<String>,\n}\n\nimpl ServiceContainer {\n    pub fn new(name: String, service: &deserializer::Service) -> Self {\n        Self {\n            name,\n            ports: service.ports.clone(),\n            environment: service.environment.clone(),\n            image: service.image.clone(),\n            volumes: service.volumes.clone(),\n            command: service.command.clone(),\n            cpu_count: service.cpu_count,\n            mem_limit: service.mem_limit.clone(),\n        }\n    }\n\n    pub fn run(&self) -> Result<(), String> {\n        let mut output = Command::new(\"container\");\n        output.arg(\"run\").arg(\"--name\").arg(self.name.clone());\n\n        for (key, value) in self.environment.iter() {\n            let env_var = format!(\"{}={}\", key, value);\n            output.arg(\"-e\");\n            output.arg(env_var);\n        }\n\n        for (key, value) in self.volumes.iter() {\n            output.arg(\"--mount\");\n\n            if !Path::new(key).exists() {\n                std::fs::create_dir_all(key).unwrap();\n            }\n\n            let abs_source =\n                std::fs::canonicalize(key).expect(\"failed to canonicalize mount source path\");\n            let abs_source_str = abs_source.to_str().expect(\"non-UTF8 path\");\n\n            output.arg(format!(\n                \"type=bind,source={},target={}\",\n                abs_source_str, value\n            ));\n        }\n\n        // Add CPU and memory limits if specified\n        if let Some(cpu_count) = self.cpu_count {\n            output.arg(\"--cpus\").arg(cpu_count.to_string());\n        }\n        if let Some(mem_limit) = &self.mem_limit {\n            output.arg(\"--memory\").arg(mem_limit);\n        }\n\n        let output = output.arg(\"-d\").arg(self.image.clone());\n\n        if let Some(command) = &self.command {\n            if command.len() == 1 && command[0].is_empty() {\n                output.arg(\"echo\").arg(\"No command provided\");\n            } else {\n                for arg in command {\n                    output.arg(arg);\n                }\n            }\n        }\n\n        match output.output() {\n            Ok(output) => {\n                if !output.status.success() {\n                    return Err(format!(\n                        \"Failed to run container: {:?}\",\n                        String::from_utf8(output.stderr).unwrap()\n                    ));\n                }\n            }\n            Err(e) => {\n                return Err(e.kind().to_string());\n            }\n        }\n\n        self.expose_service_ports();\n\n        Ok(())\n    }\n\n    fn expose_service_ports(&self) {\n        if !self.ports.is_empty() {\n            println!(\n                \"Found ports in service, container does not support mapping port yet. Running socat fallback.\"\n            );\n            let command = Command::new(\"container\")\n                .arg(\"inspect\")\n                .arg(self.name.clone())\n                .output()\n                .expect(\"Failed to execute process\");\n\n            let value = String::from_utf8(command.stdout).unwrap();\n            let container =\n                serde_json::from_str::<Vec<container::Container>>(&value).unwrap()[0].clone();\n\n            for port in self.ports.iter() {\n                let port = port.split(\":\").collect::<Vec<&str>>();\n                let host_port = port[0].parse::<u16>().unwrap();\n                let container_port = port[1].parse::<u16>().unwrap();\n\n                let container_ip =\n                    format!(\"{}:{}\", container.configuration.networks[0], container_port);\n\n                let output = Command::new(\"socat\")\n                    .arg(format!(\"TCP-LISTEN:{},fork\", host_port))\n                    .arg(format!(\"TCP:{}\", container_ip))\n                    .spawn();\n\n                match output {\n                    Ok(output) => {\n                        println!(\"socat running on pid {}\", output.id());\n                    }\n                    Err(e) => {\n                        eprintln!(\"Failed to run socat: {}\", e);\n                    }\n                }\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "src/utils.rs",
    "content": "use std::{fs, path::Path};\n\nuse crate::deserializer::{Compose, deserialize_yaml};\n\npub fn deserialize_compose_file(path: Option<String>) -> Result<Compose, serde_yaml::Error> {\n    if let Some(path) = path {\n        let yaml = fs::read_to_string(path).expect(\"Failed to read the provided compose file\");\n        return deserialize_yaml(&yaml);\n    };\n\n    let default_candidates = [\"docker-compose.yaml\", \"docker-compose.yml\"];\n\n    for candidate in default_candidates.iter() {\n        let path = Path::new(\".\").join(candidate);\n        if path.exists() {\n            let yaml = fs::read_to_string(path).expect(\"Failed to read compose file\");\n            return deserialize_yaml(&yaml);\n        }\n    }\n\n    panic!(\"No docker-compose.yaml or docker-compose.yml file found\");\n}\n"
  }
]