Repository: noghartt/container-compose Branch: main Commit: f6bcc62a63b3 Files: 17 Total size: 28.9 KB Directory structure: gitextract_jqe7xy8h/ ├── .github/ │ └── workflows/ │ ├── linters.yaml │ └── release.yaml ├── .gitignore ├── CHANGELOG.md ├── Cargo.toml ├── Formula/ │ └── container-compose.rb ├── LICENSE ├── README.md ├── cliff.toml ├── examples/ │ └── docker-compose.yaml ├── flake.nix └── src/ ├── cli.rs ├── container.rs ├── deserializer.rs ├── main.rs ├── runner.rs └── utils.rs ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/workflows/linters.yaml ================================================ name: Linters on: pull_request: branches: ["main"] jobs: cargo-fmt-check: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@stable - name: Check formatting run: cargo fmt --check cargo-clippy-check: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@stable - name: Check Clippy run: cargo clippy -- -W clippy::perf -W clippy::correctness -W clippy::suspicious --deny=warnings ================================================ FILE: .github/workflows/release.yaml ================================================ name: Release permissions: contents: write on: workflow_dispatch: push: tags: - v* jobs: create-release: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: taiki-e/create-gh-release-action@v1 with: changelog: CHANGELOG.md token: ${{ secrets.GITHUB_TOKEN }} upload-assets: strategy: matrix: include: - target: x86_64-apple-darwin os: macos-latest - target: aarch64-apple-darwin os: macos-latest runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 - name: Install cross-compilation tools uses: taiki-e/setup-cross-toolchain-action@v1 with: target: ${{ matrix.target }} if: startsWith(matrix.os, 'ubuntu') - name: Upload Rust binary uses: taiki-e/upload-rust-binary-action@v1 with: bin: container-compose target: ${{ matrix.target }} archive: $bin-$tag-$target token: ${{ secrets.GITHUB_TOKEN }} ================================================ FILE: .gitignore ================================================ /target ================================================ FILE: CHANGELOG.md ================================================ # Changelog All notable changes to this project will be documented in this file. ## [0.0.1-alpha.5] - 2025-06-19 ### 🚀 Features - Support `cpu_count` and `mem_limit` (#20) ### 🐛 Bug Fixes - Allow empty version argument (#21) ### 💼 Other - *(release)* Update homebrew file to v0.0.1.alpha-4 (#17) - Update cargo.toml version (#18) ### ⚙️ Miscellaneous Tasks - Add instructions to run locally (#19) ## [0.0.1-alpha.4] - 2025-06-18 ### 🚀 Features - *(homebrew)* Bump brewfile to v0.0.1-alpha.3 (#7) ### 🐛 Bug Fixes - *(homebrew)* Change sha256 to v0.0.1-alpha.3 (#10) - Allow empty ports, env vars, and volumes (#12) ### 💼 Other - *(release)* Update CHANGELOG for v0.0.1-alpha.4 (#16) ## [0.0.1-alpha.3] - 2025-06-17 ### 🐛 Bug Fixes - *(utils)* Read .yml file extension for docker-compose config files ### 💼 Other - *(release)* Update CHANGELOG for v0.0.1-alpha.3 ### 📚 Documentation - Update README to add container-compose installation via crates.io ### ⚙️ Miscellaneous Tasks - Add Formula with brewfile to install CLI with brew - Add initial README file - Update README - Update Cargo.toml - Add excludes field on Cargo.toml ## [0.0.1-alpha.2] - 2025-06-15 ### 💼 Other - Add manual dispatch option - Add CHANGELOG.md - Trigger v0.0.1-alpha.2 release ### ⚙️ Miscellaneous Tasks - Fix triggers ## [0.0.1-alpha.1] - 2025-06-15 ### 🚀 Features - Add poc implementation for container run command - Add initial cli setup with up command - Improve code readability with structs - Add support for mount binding - Add support for command argument - Add support for down command ### 💼 Other - Add initial github action workflow to build cli ### ⚙️ Miscellaneous Tasks - Add nix flake file - Add MIT LICENSE file - Remove dbg! - Remove comment ================================================ FILE: Cargo.toml ================================================ [package] name = "container-compose" version = "0.0.1-alpha.5" edition = "2024" license-file = "LICENSE" description = " A docker-compose like tool for Apple Containers" homepage = "https://github.com/noghartt/container-compose" repository = "https://github.com/noghartt/container-compose" readme = "README.md" keywords = ["devops"] categories = ["command-line-utilities"] exclude = ["scripts/*", "examples/"] [dependencies] clap = { version = "4.5.40", features = ["derive"] } clap_mangen = "0.2.27" serde = { version = "1.0.219", features = ["derive"] } serde_json = "1.0.140" serde_yaml = "0.9.34" ================================================ FILE: Formula/container-compose.rb ================================================ class ContainerCompose < Formula desc "A docker-compose like tool for Apple Containers " homepage "https://github.com/noghartt/container-compose" version "0.0.1-alpha.5" depends_on "socat" on_macos do on_arm do url "https://github.com/noghartt/container-compose/releases/download/v#{version}/container-compose-v#{version}-aarch64-apple-darwin.tar.gz" sha256 "8b6f9d35b8a03eb14c059b347aa2b9e705e6c75c10496ba462a8cf7d58545973" end on_intel do url "https://github.com/noghartt/container-compose/releases/download/v#{version}/container-compose-v#{version}-x86_64-apple-darwin.tar.gz" sha256 "a409ca44b8cec7ac540c0db453f6f8e3d22f29c6cbb4f8cb21ecfe946011610f" end end def install bin.install "container-compose" end end ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2025 Guilherme "noghartt" Ananias Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================ > [!NOTE] > > ⚠️ container-compose is being put in archive-mode due to me not having time to maintain this project. > If you want to use a maintained tool, I recommend: [container-compose](https://github.com/mcrich23/container-compose). # container-compose A CLI to let you use the Apple [container](https://github.com/apple/container) like a `docker-compose` config file. > [!WARNING] > This tool is on "alpha" mode, which means that you can find a lot of bugs or missing features. > If you find anything, feel free to open an issue. Thank you! :) ## How to install ### Installing via Homebrew ```sh brew tap noghartt/container-compose https://github.com/noghartt/container-compose.git brew install noghartt/container-compose/container-compose ``` ### Installing via cargo ```sh cargo install container-compose ``` ### Downloading binary We have made a `.tar.gz` containing the binary available on the release page. If you prefer to have your binary from there. ### Running container-compose To run `container-compose` from the source code, first ensure you have [Rust](https://www.rust-lang.org/tools/install) installed. 1. Clone the repository: ```sh git clone https://github.com/noghartt/container-compose.git cd container-compose ``` 2. Build the project: ```sh cargo build --release ``` 3. Run `container-compose` using Cargo: ```sh cargo run -- [options] ``` For example, to start services: ```sh cargo run -- up ``` You can also run the compiled binary directly: ```sh ./target/release/container-compose [options] ``` Replace `` and `[options]` with the desired subcommand and arguments. ================================================ FILE: cliff.toml ================================================ # git-cliff ~ configuration file # https://git-cliff.org/docs/configuration [changelog] # A Tera template to be rendered as the changelog's footer. # See https://keats.github.io/tera/docs/#introduction header = """ # Changelog\n All notable changes to this project will be documented in this file.\n """ # A Tera template to be rendered for each release in the changelog. # See https://keats.github.io/tera/docs/#introduction body = """ {% if version %}\ ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} {% else %}\ ## [unreleased] {% endif %}\ {% for group, commits in commits | group_by(attribute="group") %} ### {{ group | striptags | trim | upper_first }} {% for commit in commits %} - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ {% if commit.breaking %}[**breaking**] {% endif %}\ {{ commit.message | upper_first }}\ {% endfor %} {% endfor %}\n """ # A Tera template to be rendered as the changelog's footer. # See https://keats.github.io/tera/docs/#introduction footer = """ """ # Remove leading and trailing whitespaces from the changelog's body. trim = true # Render body even when there are no releases to process. render_always = true # An array of regex based postprocessors to modify the changelog. postprocessors = [ # Replace the placeholder with a URL. #{ pattern = '', replace = "https://github.com/orhun/git-cliff" }, ] # render body even when there are no releases to process # render_always = true # output file path # output = "test.md" [git] # Parse commits according to the conventional commits specification. # See https://www.conventionalcommits.org conventional_commits = true # Exclude commits that do not match the conventional commits specification. filter_unconventional = true # Require all commits to be conventional. # Takes precedence over filter_unconventional. require_conventional = false # Split commits on newlines, treating each line as an individual commit. split_commits = false # An array of regex based parsers to modify commit messages prior to further processing. commit_preprocessors = [ # Replace issue numbers with link templates to be updated in `changelog.postprocessors`. #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"}, # Check spelling of the commit message using https://github.com/crate-ci/typos. # If the spelling is incorrect, it will be fixed automatically. #{ pattern = '.*', replace_command = 'typos --write-changes -' }, ] # Prevent commits that are breaking from being excluded by commit parsers. protect_breaking_commits = false # An array of regex based parsers for extracting data from the commit message. # Assigns commits to groups. # Optionally sets the commit's scope and can decide to exclude commits from further processing. commit_parsers = [ { message = "^feat", group = "🚀 Features" }, { message = "^fix", group = "🐛 Bug Fixes" }, { message = "^doc", group = "📚 Documentation" }, { message = "^perf", group = "⚡ Performance" }, { message = "^refactor", group = "🚜 Refactor" }, { message = "^style", group = "🎨 Styling" }, { message = "^test", group = "🧪 Testing" }, { message = "^chore\\(release\\): prepare for", skip = true }, { message = "^chore\\(deps.*\\)", skip = true }, { message = "^chore\\(pr\\)", skip = true }, { message = "^chore\\(pull\\)", skip = true }, { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, { body = ".*security", group = "🛡️ Security" }, { message = "^revert", group = "◀️ Revert" }, { message = ".*", group = "💼 Other" }, ] # Exclude commits that are not matched by any commit parser. filter_commits = false # An array of link parsers for extracting external references, and turning them into URLs, using regex. link_parsers = [] # Include only the tags that belong to the current branch. use_branch_tags = false # Order releases topologically instead of chronologically. topo_order = false # Order releases topologically instead of chronologically. topo_order_commits = true # Order of commits in each group/release within the changelog. # Allowed values: newest, oldest sort_commits = "oldest" # Process submodules commits recurse_submodules = false ================================================ FILE: examples/docker-compose.yaml ================================================ version: '3.8' services: postgres: image: postgres:15 ports: - 5432:5432 command: postgres environment: - POSTGRES_PASSWORD=postgres cpu_count: 1 mem_limit: 2gb ================================================ FILE: flake.nix ================================================ { inputs = { nixpkgs.url = "github:nixos/nixpkgs/release-23.11"; rust-overlay.url = "github:oxalica/rust-overlay"; rust-overlay.inputs.nixpkgs.follows = "nixpkgs"; flake-utils.url = "github:numtide/flake-utils"; }; outputs = { self, nixpkgs, rust-overlay, flake-utils, }: flake-utils.lib.eachDefaultSystem (system: let overlays = [(import rust-overlay)]; pkgs = import nixpkgs { inherit system overlays; }; in { devShells.default = pkgs.mkShell { name = "container-compose"; buildInputs = with pkgs; [ darwin.apple_sdk.frameworks.SystemConfiguration darwin.apple_sdk.frameworks.CoreServices darwin.apple_sdk.frameworks.CoreFoundation ]; nativeBuildInputs = with pkgs; [ (rust-bin.stable."1.85.0".default.override { extensions = [ "rust-src" "rust-analyzer" ]; } ) ]; }; }); } ================================================ FILE: src/cli.rs ================================================ use clap::{Parser, Subcommand}; #[derive(Parser)] #[command(author, version, about, long_about = None)] pub struct Cli { #[arg(short, long)] pub file: Option, #[command(subcommand)] pub command: Option, } #[derive(Subcommand)] pub enum Command { #[command(about = "Create and run containers")] // Usage: docker compose up [OPTIONS] [SERVICE...] // Follow this structure for the Up command Up { #[arg(short, long)] detach: bool, #[arg()] service: Vec, }, #[command(about = "Stop and remove containers")] Down, } ================================================ FILE: src/container.rs ================================================ use serde::Deserialize; use std::{collections::HashMap, process::Command}; #[allow(dead_code, unused_variables)] pub fn get_containers_list() -> Result, ()> { let mut command = Command::new("container"); command.arg("ls").arg("--format").arg("json"); let Ok(output) = command.output() else { eprintln!("Failed to get containers list"); return Err(()); }; let Ok(containers) = serde_json::from_slice(&output.stdout) else { eprintln!("Failed to parse containers list"); return Err(()); }; Ok(containers) } pub fn stop_container(container_ids: Vec) -> Result<(), ()> { let mut command = Command::new("container"); command.arg("stop"); for id in container_ids { command.arg(id); } let Ok(_) = command.output() else { eprintln!("Failed to stop containers"); return Err(()); }; Ok(()) } pub fn remove_container(container_ids: Vec) -> Result<(), ()> { let mut command = Command::new("container"); command.arg("rm"); for id in container_ids { command.arg(id); } let Ok(_) = command.output() else { eprintln!("Failed to remove containers"); return Err(()); }; Ok(()) } #[allow(dead_code, unused_variables)] #[derive(Debug, Deserialize, Clone)] pub struct Container { pub networks: Vec, pub status: String, pub configuration: Configuration, } #[allow(dead_code, unused_variables)] #[derive(Debug, Deserialize, Clone)] pub struct Network { pub address: String, pub gateway: String, pub network: String, pub hostname: String, } #[allow(dead_code, unused_variables)] #[derive(Debug, Deserialize, Clone)] pub struct Configuration { pub resources: Resources, pub labels: HashMap, pub hostname: String, pub sysctls: HashMap, pub networks: Vec, // pub initProcess: InitProcess, pub id: String, pub rosetta: bool, // pub runtimeHandler: String, pub platform: Platform, pub mounts: Vec, pub image: Image, pub dns: Dns, } #[allow(dead_code, unused_variables)] #[derive(Debug, Deserialize, Clone)] pub struct Resources { pub cpus: u32, // pub memoryInBytes: u64, } #[allow(dead_code, unused_variables)] #[derive(Debug, Deserialize, Clone)] pub struct InitProcess { pub environment: Vec, pub arguments: Vec, pub executable: String, // pub workingDirectory: String, pub terminal: bool, pub user: User, // pub supplementalGroups: Vec, pub rlimits: Vec, } #[allow(dead_code, unused_variables)] #[derive(Debug, Deserialize, Clone)] pub struct User { pub id: Id, } #[allow(dead_code, unused_variables)] #[derive(Debug, Deserialize, Clone)] pub struct Id { pub uid: u32, pub gid: u32, } #[allow(dead_code, unused_variables)] #[derive(Debug, Deserialize, Clone)] pub struct Platform { pub os: String, pub architecture: String, } #[derive(Debug, Deserialize, Clone)] pub struct Mount { // Fill in fields as needed } #[allow(dead_code, unused_variables)] #[derive(Debug, Deserialize, Clone)] pub struct Image { pub reference: String, pub descriptor: Descriptor, } #[allow(dead_code, unused_variables)] #[derive(Debug, Deserialize, Clone)] pub struct Descriptor { pub size: u64, pub digest: String, // pub annotations: HashMap, // pub media_type: String, } #[allow(dead_code, unused_variables)] #[derive(Debug, Deserialize, Clone)] pub struct Dns { pub nameservers: Vec, // pub search_domains: Vec, pub options: Vec, } ================================================ FILE: src/deserializer.rs ================================================ use std::collections::HashMap; use serde::Deserialize; use serde::de::{self, Deserializer, MapAccess, SeqAccess, Visitor}; use std::fmt; #[derive(Debug, Deserialize)] pub struct Service { pub name: Option, pub image: String, #[serde(default)] pub ports: Vec, #[serde(default, deserialize_with = "deserialize_environment_variables")] pub environment: HashMap, #[serde(default, deserialize_with = "deserialize_array_key_value")] pub volumes: HashMap, #[serde(default, deserialize_with = "deserialize_command")] pub command: Option>, #[serde(default)] pub cpu_count: Option, #[serde(default)] pub mem_limit: Option, } #[allow(dead_code, unused_variables)] #[derive(Debug, Deserialize)] pub struct Compose { #[serde(default)] pub version: String, pub services: HashMap, } pub fn deserialize_yaml(yaml: &str) -> Result { serde_yaml::from_str(yaml) } fn deserialize_environment_variables<'a, D>( deserializer: D, ) -> Result, D::Error> where D: Deserializer<'a>, { struct EnvVisitor; impl<'a> Visitor<'a> for EnvVisitor { type Value = HashMap; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a map or a list of key=value strings") } fn visit_map(self, mut access: M) -> Result where M: MapAccess<'a>, { let mut map = HashMap::new(); while let Some((key, value)) = access.next_entry()? { map.insert(key, value); } Ok(map) } fn visit_seq(self, mut seq: A) -> Result where A: SeqAccess<'a>, { let mut map = HashMap::new(); while let Some(entry) = seq.next_element::()? { let parts: Vec<&str> = entry.splitn(2, '=').collect(); if parts.len() == 2 { map.insert(parts[0].to_string(), parts[1].to_string()); } else { return Err(de::Error::custom(format!( "Invalid environment variable: {}", entry ))); } } Ok(map) } } deserializer.deserialize_any(EnvVisitor) } fn deserialize_array_key_value<'a, D>(deserializer: D) -> Result, D::Error> where D: Deserializer<'a>, { struct ArrayKeyValueVisitor; impl<'a> Visitor<'a> for ArrayKeyValueVisitor { type Value = HashMap; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a list of strings which follows this format: key:value") } fn visit_seq(self, mut seq: A) -> Result where A: SeqAccess<'a>, { let mut map = HashMap::new(); while let Some(entry) = seq.next_element::()? { let parts: Vec<&str> = entry.splitn(2, ':').collect(); if parts.len() == 2 { map.insert(parts[0].to_string(), parts[1].to_string()); } else { return Err(de::Error::custom(format!("Invalid volume: {}", entry))); } } Ok(map) } } deserializer.deserialize_seq(ArrayKeyValueVisitor) } fn deserialize_command<'a, D>(deserializer: D) -> Result>, D::Error> where D: Deserializer<'a>, { struct CommandVisitor; impl<'a> Visitor<'a> for CommandVisitor { type Value = Option>; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a string or a list of strings") } fn visit_str(self, v: &str) -> Result where E: de::Error, { let parts: Vec<&str> = v.splitn(2, ' ').collect(); if parts.len() == 2 { Ok(Some(vec![parts[0].to_string(), parts[1].to_string()])) } else { Ok(Some(vec![v.to_string()])) } } fn visit_seq(self, mut seq: A) -> Result where A: SeqAccess<'a>, { let mut vec = Vec::new(); while let Some(entry) = seq.next_element::()? { vec.push(entry); } Ok(Some(vec)) } } deserializer.deserialize_any(CommandVisitor) } ================================================ FILE: src/main.rs ================================================ use clap::{CommandFactory, Parser}; mod cli; mod container; mod deserializer; mod runner; mod utils; use crate::cli::{Cli, Command}; fn main() { let cli = Cli::parse(); match cli.command { // TODO: Add error handling + exit code Some(Command::Up { .. }) => runner::run_services(cli.file), Some(Command::Down) => runner::stop_and_remove_services(cli.file), None => Cli::command().print_help().unwrap(), } } ================================================ FILE: src/runner.rs ================================================ use std::{collections::HashMap, path::Path, process::Command}; use crate::{container, deserializer, utils::deserialize_compose_file}; pub fn run_services(path: Option) { let compose = deserialize_compose_file(path).unwrap(); for (name, service) in compose.services.iter() { let container_name = service.name.clone().unwrap_or(name.clone()); let service_container = ServiceContainer::new(container_name, service); match service_container.run() { Ok(_) => println!("{}", name), Err(e) => println!("Error while running {}: {:?}", name, e), }; } } pub fn stop_and_remove_services(path: Option) { let containers = container::get_containers_list().unwrap(); let container_ids = containers .iter() .map(|c| c.configuration.id.clone()) .collect::>(); container::stop_container(container_ids.clone()).unwrap(); container::remove_container(container_ids.clone()).unwrap(); let compose = deserialize_compose_file(path).unwrap(); let ports = compose .services .iter() .flat_map(|(_, service)| service.ports.clone()) .collect::>(); for port in ports { let port = port.split(":").collect::>(); let host_port = port[0].parse::().unwrap(); let output = Command::new("lsof") .arg("-ti") .arg(format!(":{host_port}")) .output() .expect("Failed to execute process"); if output.status.success() { let pid = String::from_utf8(output.stdout).unwrap(); Command::new("kill") .arg(pid.trim()) .output() .expect("Failed to execute process"); } } } struct ServiceContainer { name: String, ports: Vec, environment: HashMap, image: String, volumes: HashMap, command: Option>, cpu_count: Option, mem_limit: Option, } impl ServiceContainer { pub fn new(name: String, service: &deserializer::Service) -> Self { Self { name, ports: service.ports.clone(), environment: service.environment.clone(), image: service.image.clone(), volumes: service.volumes.clone(), command: service.command.clone(), cpu_count: service.cpu_count, mem_limit: service.mem_limit.clone(), } } pub fn run(&self) -> Result<(), String> { let mut output = Command::new("container"); output.arg("run").arg("--name").arg(self.name.clone()); for (key, value) in self.environment.iter() { let env_var = format!("{}={}", key, value); output.arg("-e"); output.arg(env_var); } for (key, value) in self.volumes.iter() { output.arg("--mount"); if !Path::new(key).exists() { std::fs::create_dir_all(key).unwrap(); } let abs_source = std::fs::canonicalize(key).expect("failed to canonicalize mount source path"); let abs_source_str = abs_source.to_str().expect("non-UTF8 path"); output.arg(format!( "type=bind,source={},target={}", abs_source_str, value )); } // Add CPU and memory limits if specified if let Some(cpu_count) = self.cpu_count { output.arg("--cpus").arg(cpu_count.to_string()); } if let Some(mem_limit) = &self.mem_limit { output.arg("--memory").arg(mem_limit); } let output = output.arg("-d").arg(self.image.clone()); if let Some(command) = &self.command { if command.len() == 1 && command[0].is_empty() { output.arg("echo").arg("No command provided"); } else { for arg in command { output.arg(arg); } } } match output.output() { Ok(output) => { if !output.status.success() { return Err(format!( "Failed to run container: {:?}", String::from_utf8(output.stderr).unwrap() )); } } Err(e) => { return Err(e.kind().to_string()); } } self.expose_service_ports(); Ok(()) } fn expose_service_ports(&self) { if !self.ports.is_empty() { println!( "Found ports in service, container does not support mapping port yet. Running socat fallback." ); let command = Command::new("container") .arg("inspect") .arg(self.name.clone()) .output() .expect("Failed to execute process"); let value = String::from_utf8(command.stdout).unwrap(); let container = serde_json::from_str::>(&value).unwrap()[0].clone(); for port in self.ports.iter() { let port = port.split(":").collect::>(); let host_port = port[0].parse::().unwrap(); let container_port = port[1].parse::().unwrap(); let container_ip = format!("{}:{}", container.configuration.networks[0], container_port); let output = Command::new("socat") .arg(format!("TCP-LISTEN:{},fork", host_port)) .arg(format!("TCP:{}", container_ip)) .spawn(); match output { Ok(output) => { println!("socat running on pid {}", output.id()); } Err(e) => { eprintln!("Failed to run socat: {}", e); } } } } } } ================================================ FILE: src/utils.rs ================================================ use std::{fs, path::Path}; use crate::deserializer::{Compose, deserialize_yaml}; pub fn deserialize_compose_file(path: Option) -> Result { if let Some(path) = path { let yaml = fs::read_to_string(path).expect("Failed to read the provided compose file"); return deserialize_yaml(&yaml); }; let default_candidates = ["docker-compose.yaml", "docker-compose.yml"]; for candidate in default_candidates.iter() { let path = Path::new(".").join(candidate); if path.exists() { let yaml = fs::read_to_string(path).expect("Failed to read compose file"); return deserialize_yaml(&yaml); } } panic!("No docker-compose.yaml or docker-compose.yml file found"); }