Full Code of malus-security/sandblaster for AI

master d417bf90c9b2 cached
31 files
242.8 KB
60.3k tokens
313 symbols
1 requests
Download .txt
Showing preview only (254K chars total). Download the full file or copy to clipboard to get everything.
Repository: malus-security/sandblaster
Branch: master
Commit: d417bf90c9b2
Files: 31
Total size: 242.8 KB

Directory structure:
gitextract_1fgfkynp/

├── .github/
│   └── workflows/
│       ├── config/
│       │   └── config.json
│       ├── linter.yml
│       └── rules/
│           ├── common/
│           │   ├── inlineTokenChildren.js
│           │   └── wordPattern.js
│           ├── md101.js
│           ├── md102.js
│           ├── md103.js
│           ├── md104.js
│           └── rules.js
├── .gitignore
├── .gitmodules
├── LICENSE
├── README.md
├── helpers/
│   └── extract_sandbox_data.py
└── reverse-sandbox/
    ├── filters/
    │   ├── filters_ios11.json
    │   ├── filters_ios12.json
    │   ├── filters_ios13.json
    │   ├── filters_ios14.json
    │   ├── filters_ios4.json
    │   ├── filters_ios5.json
    │   └── filters_ios6.json
    ├── filters.py
    ├── logger.config
    ├── operation_node.py
    ├── regex_parser_v1.py
    ├── regex_parser_v2.py
    ├── regex_parser_v3.py
    ├── reverse_sandbox.py
    ├── reverse_string.py
    ├── sandbox_filter.py
    └── sandbox_regex.py

================================================
FILE CONTENTS
================================================

================================================
FILE: .github/workflows/config/config.json
================================================
{
	"default": true,
	"MD048": { "style": "backtick" },
	"MD046": { "style": "fenced" },
	"MD029": { "style": "one" },
	"line-length": false,
	"no-hard-tabs": false
}


================================================
FILE: .github/workflows/linter.yml
================================================
name: Linter

on: [push, pull_request]

jobs:
  superlinter:
    name: Super Linter
    runs-on: ubuntu-latest

    steps:
      - name: Checkout Code
        uses: actions/checkout@v3
        with:
          # Full git history is needed to get a proper list of changed files within `super-linter`
          fetch-depth: 0

      - name: Lint Code Base
        uses: github/super-linter@v4
        env:
          # Don't check already existent files
          VALIDATE_ALL_CODEBASE: false
          VALIDATE_GITHUB_ACTIONS: false
          LINTER_RULES_PATH: /.github/workflows/
          MARKDOWN_CONFIG_FILE: config/config.json
          MARKDOWN_CUSTOM_RULE_GLOBS: rules/rules.js
          DEFAULT_BRANCH: main
          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}


================================================
FILE: .github/workflows/rules/common/inlineTokenChildren.js
================================================
 class InlineTokenChildren {
    constructor(token) {
        if (token.type === "inline") {
            this.root = token;
            this.column = -1;
            this.lineNumber = token.map[0];
        } else {
            throw new TypeError("wrong argument token type");
        }
    }

    *[Symbol.iterator]() {
        for (let token of this.root.children) {
            let { line, lineNumber } = token;
            if (this.lineNumber !== lineNumber) {
                this.column = -1;
                this.lineNumber = lineNumber;
            }
            this.column = line.indexOf(token.content, this.column + 1);
            yield { token, column: this.column + 1, lineNumber };
        }
    }
}

module.exports = { InlineTokenChildren };


================================================
FILE: .github/workflows/rules/common/wordPattern.js
================================================
class WordPattern {
    constructor(pattern, parameters) {
        const escapedDots = pattern.replace(/\\?\./g, "\\.");
        this.pattern = parameters && parameters.hasOwnProperty('noWordBoundary') ? escapedDots : "\\b" + escapedDots + "\\b";
        const modifiers = parameters && parameters.hasOwnProperty('caseSensitive') && parameters.caseSensitive ? "" : "i";
        this.regex = new RegExp(this.pattern, modifiers);
        this.suggestion = parameters && parameters.hasOwnProperty('suggestion') ? parameters.suggestion : pattern;
        this.stringRegex = new RegExp("^" + escapedDots + "$", modifiers); // To match "Category" column words in changelogs, see case-sensitive.js
        this.skipForUseCases = !!(parameters && parameters.hasOwnProperty('skipForUseCases'));
    }

    test(line) {
        return new Match(line.match(this.regex));
    }
}

class Match {
    constructor(match) {
        this.match = match;
    }

    range() {
        if (this.match) {
            let column = this.match.index + 1;
            let length = this.match[0].length;
            if (this.match[2]) {
                column += this.match[1].length;
                length -= this.match[1].length;
            }
            return [column, length];
        }
        return null;
    }

    toString() {
        return this.match ? this.match.toString() : "null";
    }
}

module.exports = { WordPattern };


================================================
FILE: .github/workflows/rules/md101.js
================================================
const { InlineTokenChildren } = require("./common/inlineTokenChildren");
const { WordPattern } = require("./common/wordPattern");

const keywords = [
    new WordPattern("iExtractor-manager"),
    new WordPattern("device-info"),
    new WordPattern("device-name"),
    new WordPattern("list_apps"),
    new WordPattern("decrypt_kcache"),
    new WordPattern("decrypt_fs"),
    new WordPattern("curl"),
    new WordPattern("wget"),
    new WordPattern("crontab"),
    new WordPattern("cron"),
    new WordPattern("netcat"),
    new WordPattern("ping"),
    new WordPattern("traceroute"),
    new WordPattern("sudo"),
    new WordPattern("(?<!(system |ISRG ))root(?! ca)", { suggestion: "root" }),// match "root", but not "root CA", "MacOS System Root" and "ISRG Root X1"
    new WordPattern("true"),
    new WordPattern("false"),
    new WordPattern("jps"),
    new WordPattern("name=value"),
    new WordPattern("key=value"),
    new WordPattern("time:value"),
    new WordPattern("atsd.log"),
    new WordPattern("start.log"),
    new WordPattern("logback.xml"),
    new WordPattern("graphite.conf"),
    new WordPattern("command_malformed.log"),
    new WordPattern("stdout"),
    new WordPattern("stderr"),
    new WordPattern("SIGTERM"),
    new WordPattern("NaN"),
    new WordPattern(".png", { noWordBoundary: true }),
    new WordPattern(".xml", { noWordBoundary: true }),
    new WordPattern(".jar", { noWordBoundary: true }),
    new WordPattern(".gz", { noWordBoundary: true }),
    new WordPattern(".tar.gz", { noWordBoundary: true }),
    new WordPattern(".zip", { noWordBoundary: true }),
    new WordPattern(".txt", { noWordBoundary: true }),
    new WordPattern(".csv", { noWordBoundary: true }),
    new WordPattern(".json", { noWordBoundary: true }),
    new WordPattern(".pdf", { noWordBoundary: true }),
    new WordPattern(".html", { noWordBoundary: true })

];

module.exports = {
    names: ["MD101", "backtick-keywords"],
    description: "Keywords must be fenced and must be in appropriate case.",
    tags: ["backtick", "code", "bash"],
    "function": (params, onError) => {
        var inHeading = false;
        var inLink = false;
        for (let token of params.tokens) {
            switch (token.type) {
                case "heading_open":
                    inHeading = true; break;
                case "heading_close":
                    inHeading = false; break;
                case "inline":
                    let children = new InlineTokenChildren(token);
                    for (let { token: child, column, lineNumber } of children) {
                        let isText = child.type === "text";
                        switch (child.type) {
                            case "link_open":
                                inLink = true; break;
                            case "link_close":
                                inLink = false; break;
                        }
                        for (let k of keywords) {
                            let anyCaseMatch = child.content.match(k.regex);
                            if (anyCaseMatch != null) {
                                let match = anyCaseMatch[0];
                                let correct = k.suggestion;
                                if ((!inHeading && !inLink && isText) || // Bad not fenced
                                    (match !== correct)) { // Right fencing, wrong case
                                    onError({
                                        lineNumber,
                                        detail: `Expected \`${correct}\`. Actual ${match}.`,
                                        range: [column + anyCaseMatch.index, match.length]
                                    })
                                }
                            }
                        }
                    }
            }
        }
    }
};


================================================
FILE: .github/workflows/rules/md102.js
================================================
const http_keywords = [
    "GET",
    "POST",
    "PUT",
    "PATCH",
    "DELETE",
    "Content-Type",
    "Content-Encoding",
    "User-Agent",
    "200 OK",
    "401 Unauthorized",
    "403 Forbidden",
    "API_DATA_READ",
    "API_DATA_WRITE",
    "API_META_READ",
    "API_META_WRITE",
    "USER",
    "EDITOR",
    "ENTITY_GROUP_ADMIN",
    "ADMIN"
];
const keywordsRegex = new RegExp(http_keywords.map(word => "\\b" + word + "\\b").join("|"));

const { InlineTokenChildren } = require("./common/inlineTokenChildren");

module.exports = {
    names: ["MD102", "backtick-http"],
    description: "HTTP keywords must be fenced.",
    tags: ["backtick", "HTTP", "HTTPS"],
    "function": (params, onError) => {
        var inHeading = false;
        for (let token of params.tokens) {
            switch (token.type) {
                case "heading_open":
                    inHeading = true; break;
                case "heading_close":
                    inHeading = false; break;
                case "inline":
                    if (!inHeading) {
                        let children = new InlineTokenChildren(token);
                        for (let { token: child, column, lineNumber } of children) {
                            if (child.type === "text") {
                                let exactCaseMatch = child.content.match(keywordsRegex);
                                if (exactCaseMatch != null) {
                                    let match = exactCaseMatch[0];
                                    onError({
                                        lineNumber,
                                        detail: `Expected \`${match}\`. Actual ${match}.`,
                                        range: [column + exactCaseMatch.index, match.length]
                                    })
                                }
                            }
                        }
                    }
            }
        }
    }
};


================================================
FILE: .github/workflows/rules/md103.js
================================================
"use strict";

module.exports = {
  "names": [ "MD103", "inline triple backticks" ],
  "description": "inline triple backticks",
  "tags": [ "backticks" ],
  "function": function rule(params, onError) {
    for (const inline of params.tokens.filter(function filterToken(token) {
      return token.type === "inline";
    })) {
        const index = inline.content.toLowerCase().indexOf("```");
        if (index !== -1) {
          onError({
            "lineNumber": inline.lineNumber,
            "context": inline.content.substr(index - 1, 4),
            "detail": "Expected `. Actual ```"
          });
        }
      }
  }
};


================================================
FILE: .github/workflows/rules/md104.js
================================================
"use strict";

module.exports = {
  names: ["MD104", "one line per sentence"],
  description: "one line (and only one line) per sentence",
  tags: ["sentences"],
  function: function rule(params, onError) {
    for (const inline of params.tokens.filter(function filterToken(token) {
      return token.type === "inline";
    })) {
      var actual_lines = inline.content.split("\n");
      actual_lines.forEach((line, index, arr) => {
		let outside = true;
		let count = 0;
		Array.from(line).forEach((char) => {
			if ((char == "." || char == "?" || char == "!" || char == ";" || char == ":") && outside) {
				count++;
			}
			if (char == "`") outside = !outside;
			if (char == "[") outside = false;
			if (char == "(") outside = false;
			if (char == "]") outside = true;
			if (char == ")") outside = true;
		});
        if (count > 1) {
          onError({
            lineNumber: inline.lineNumber + index,
            detail:
              "Expected one sentence per line. Multiple end of sentence punctuation signs found on one line!",
          });
        }
      });
    }
  },
};


================================================
FILE: .github/workflows/rules/rules.js
================================================
"use strict";

const rules = [
	require("./md101.js"),
	require("./md102.js"),
	require("./md103.js"),
	require("./md104.js"),
];
module.exports = rules;


================================================
FILE: .gitignore
================================================
*~
*.o
*.zip
*.rar
*.tar
*gz
*bz2
*.obj
*.a
*.so
*.lib
*.dll
*.swp
*.swo
tags
TAGS
*.exe
*.class
*.jar
*.pyc
*.log
*.bin
core
.DS_STORE


================================================
FILE: .gitmodules
================================================


================================================
FILE: LICENSE
================================================
BSD 3-Clause License

Copyright (c) 2016, North Carolina State University and University POLITEHNICA
of Bucharest.
All rights reserved.

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:

1. Redistributions of source code must retain the above copyright notice, this
   list of conditions and the following disclaimer.

2. Redistributions in binary form must reproduce the above copyright notice,
   this list of conditions and the following disclaimer in the documentation
   and/or other materials provided with the distribution.

3. Neither the name of the copyright holder nor the names of its
   contributors may be used to endorse or promote products derived from
   this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.


================================================
FILE: README.md
================================================
# SandBlaster: Reversing the Apple Sandbox

SandBlaster is a tool for reversing (decompiling) binary Apple sandbox profiles. Apple sandbox profiles are written in SBPL (*Sandbox Profile Language*), a Scheme-like language, and are then compiled into an undocumented binary format and shipped. Primarily used on iOS, sandbox profiles are present on macOS as well. SandBlaster is, to our knowledge, the first tool that reverses binary sandbox profiles to their original SBPL format. SandBlaster works on iOS from version 7 onwards including iOS 11.

The technical report [SandBlaster: Reversing the Apple Sandbox](https://arxiv.org/abs/1608.04303) presents extensive (though a bit outdated) information on SandBlaster internals.

SandBlaster relied on previous work by [Dionysus Blazakis](https://github.com/dionthegod/XNUSandbox) and Stefan Esser's [code](https://github.com/sektioneins/sandbox_toolkit) and [slides](https://www.slideshare.net/i0n1c/ruxcon-2014-stefan-esser-ios8-containers-sandboxes-and-entitlements).

The reverser (in the `reverse-sandbox/` folder) and the helper tool (in the `helpers/` folder) run on any Python running platform.

SandBlaster may be installed and run standalone, though we recommend installing and running it from within [iExtractor](https://github.com/malus-security/iExtractor). Check the [iExtractor documentation](https://github.com/malus-security/iExtractor/blob/master/README.md) for information.

iExtractor is open source software released under the 3-clause BSD license.

## Installation

SandBlaster requires Python2 for the reverser (in `reverse-sandbox/`), Python3 with `lief` library for helper script (in `helpers/`).

After cloning the SandBlaster repository, you have to install `lief` for Python3:
```
pip3 install lief
```

If the installation of `lief` fails you need compile to it. More information about how to compile it can be found on the [wiki page](https://lief.quarkslab.com/doc/stable/compilation.html).

## Usage

In order to use SandBlaster you need access to the binary sandbox profiles and the sandbox operations, a set of strings that define sandbox-specific actions. Sandbox operations and sandbox profiles are extracted using the `helpers/extract_sandbox_data.py` script. Sandbox profiles are extracted from the kernel sandbox extension (as a bundle for iOS 4 and 9-11) or from kernel cache (as a bundle for iOS 12) or from the `sandboxd` file in the iOS filesystem (for iOS 5-8). Sandbox operations are extracted either from kernel extension (for iOS 4-11) or from kernel cache (for iOS 12).

So, as input data, SandBlaster requires the kernelcache, the kernel sandbox extension and the `sandboxd` file. Information and scripts on extracting them from a publicly available IPSW (*iPhone Software*) file is presented by [iExtractor](https://github.com/malus-security/iExtractor).

Below are the steps and commands to reverse the sandbox profiles for iOS 8.4.1, assuming the sandbox kernel extension (`com.apple.security.sandbox.kext`) and the `sandboxd` file are available:

```
# Extract sandbox operations from kernelcache.
cd helpers/
./extract_sandbox_data.py -o iPad2,1_8.4.1_12H321.sb_ops iPad2,1_8.4.1_12H321.com.apple.security.sandox.kext 8.4.1
# Extract binary sandbox profile files from sandboxd.
mkdir iPad2,1_8.4.1_12H321.sandbox_profiles
./extract_sandbox_data.py -O iPad2,1_8.4.1_12H321.sandbox_profiles/ iPad2,1_8.4.1_12H321.sandboxd 8.4.1
# Reverse all binary sandbox profiles.
cd ../reverse-sandbox/
mkdir iPad2,1_8.4.1_12H321.reversed_profiles
for i in ../helpers/iPad2,1_8.4.1_12H321.sandbox_profiles/*; do python reverse_sandbox.py -r 8.4.1 -o ../helpers/iPad2,1_8.4.1_12H321.sb_ops -d iPad2,1_8.4.1_12H321.reversed_profiles/ "$i"; done
```

Below are the steps and commands to reverse the sandbox profiles for iOS 9.3, assuming the sandbox kernel extension (`com.apple.security.sandbox.kext`) is available:

```
# Extract sandbox operations from kernelcache.
cd helpers/
./extract_sandbox_data.py -o iPhone5,1_9.3_13E237.sb_ops iPhone5,1_9.3_13E237.com.apple.security.sandox.kext 9.3
# Extract sandbox profile bundle from kernel sandbox extension.
./extract_sandbox_data.py -O . iPhone5,1_9.3_13E237.com.apple.security.sandox.kext 9.3
cd ../reverse-sandbox/
# Reverse all binary sandbox profiles in sandbox bundle.
mkdir iPhone5,1_9.3_13E237.reversed_profiles
# Print all sandbox profiles in bundle.
python reverse_sandbox.py -r 9.3 -o ../helpers/iPhone5,1_9.3_13E237.sb_ops -d iPhone5,1_9.3_13E237.reversed_profiles/ ../helpers/sandbox_bundle -psb
# Do actual reversing.
python reverse_sandbox.py -r 9.3 -o ../helpers/iPhone5,1_9.3_13E237.sb_ops -d iPhone5,1_9.3_13E237.reversed_profiles/ ../helpers/sandbox_bundle
```

The extraction of the binary sandbox profiles differs between iOS <= 8 and iOS >= 9. Since iOS >= 9 the binary sandbox profiles are stored in a sandbox bundle in the kernel sandbox extension. The `helpers/extract_sandbox_data.py` script extracts them appropriately depending on the iOS version.

The `-psb` option for `reverse_sandbox.py` prints out the sandbox profiles part of a sandbox bundle without doing the actual reversing.

The `reverse_sandbox.py` script needs to be run in its directory (`reverse-sandbox/`) since it needs the other Python modules and the `logger.config` file.

## Internals

The `helpers/` subfolder contains helper scripts that provide a nicer interface for the external tools.

The actual reverser is part of the `reverse-sandbox/` folder. Files here can be categorized as follows:

  * The main script is `reverse_sandbox.py`. It parses the command line arguments, does basic parsing of the input binary file (extracts sections) and calls the appropriate functions from the other modules.
  * The core of the implementation is `operation_node.py`. It provides functions to build the rules graph corresponding to the sandbox profile and to convert the graph to SBPL. It is called by `reverse_sandbox.py`.
  * Sandbox filters (i.e. match rules inside sandbox profiles) are handled by the implementation in `sandbox_filter.py` and the configuration in `filters.json`, `filter_list.py` and `filters.py`. Filter specific functions are called by `operation_node.py`.
  * Regular expression reversing is handled by `sandbox_regex.py` and `regex_parse.py`. `regex_parse.py` is the back end parser that converts the binary representation to a basic graph. `sandbox_regex.py` converts the graph representation (an automaton) to an actual regular expression (i.e. a string of characters and metacharacters). It is called by `reverse_sandbox.py` for parsing regular expressions, with the resulting regular expression list being passed to the functions exposed by `operation_node.py`; `operation_node.py` passes them on to sandbox filter handling files.
  * The new format for storing strings since iOS 10 is handled by `reverse_string.py`. The primary `SandboxString` class in `reverse_string.py` is used in `sandbox_filter.py`.
  * Logging is configured in the `logger.config` file. By default, `INFO` and higher level messages are printed to the console, while `DEBUG` and higher level messages are printed to the `reverse.log` file.

## Supported iOS Versions

SandBlaster works for iOS version 4 onwards including iOS 12. Apple has been making updates to the binary format of the sandbox profiles: since iOS 9 sandbox profiles are stored in a bundle, since iOS 10 strings are aggregated together in a specialied binary format. iOS 11 didn't bring any change to the format.

## Community

Join us on [Discord](https://discord.gg/m3gjuyHYw9) for live discussions.


================================================
FILE: helpers/extract_sandbox_data.py
================================================
#!/usr/bin/env python3

import sys
import argparse
import struct
import lief

CSTRING_SECTION = '__cstring'
CONST_SECTION = '__const'
DATA_SECTION = '__data'


def binary_get_word_size(binary: lief.MachO.Binary):
    """Gets the word size of the given binary

    The Mach-O binary has 'magic' bytes. These bytes can be used for checking
    whether the binary is 32bit or 64bit.
    Note: iOS 4 and 5 are different to the other sandbox profiles as they have
    no magic values.

    Args:
        binary: A sandbox profile in its binary form.

    Returns:
        4: for 32bit MachO binaries
        8: for 64bit MachO binaries
    """

    assert (binary.header.magic in
            [lief.MachO.MACHO_TYPES.MAGIC, lief.MachO.MACHO_TYPES.MAGIC_64])

    return 4 if binary.header.magic == lief.MachO.MACHO_TYPES.MAGIC else 8


def unpack(bytes_list):
    """Unpacks bytes

    The information is stored as little endian so '<' is needed.
    For 32bit 'I' is needed and for 64bit 'Q'.

    Args:
        bytes_list: A packed list of bytes.

    Returns:
        The unpacked 'higher-order' equivalent.
    """

    if len(bytes_list) == 4:
        return struct.unpack('<I', bytes(bytes_list))[0]

    return struct.unpack('<Q', bytes(bytes_list))[0]


def binary_get_string_from_address(binary: lief.MachO.Binary, vaddr: int):
    """Returns the string from a given MachO binary at a given virtual address.

        Note: The virtual address must be in the CSTRING section.

        Args:
            binary: A sandbox profile in its binary form.
            vaddr: An address.

        Returns:
            A string with the content stored at the given virtual address.

        Raises:
            LIEF_ERR("Can't find a segment associated with the virtual address
             0x{:x}", address);
    """

    section = get_section_from_segment(binary, "__TEXT", CSTRING_SECTION)
    if not is_vaddr_in_section(vaddr, section):
        return None

    str = ''
    while True:
        try:
            byte = binary.get_content_from_virtual_address(vaddr, 1)
        except(Exception,):
            return None

        if byte is None or len(byte) == 0:
            return None

        byte = byte[0]
        if byte == 0:
            break

        vaddr += 1
        str += chr(byte)

    return str


def untag_pointer(tagged_pointer):
    """Returns the untagged pointer.

    On iOS 12 the first 16 bits(MSB) of a pointer are used to store extra
    information. We say that the pointers from iOS 12 are tagged.
    The pointers should have the 2 first bytes 0xffff, the next digits should
    be fff0 and the pointed-to values should be multiple of 4.
    More information can be found here:
    https://bazad.github.io/2018/06/ios-12-kernelcache-tagged-pointers/

    Args:
        tagged_pointer: a pointer with the first 16 bits used to store extra
                        information.

    Returns:
        A pointer with the 'tag' removed and starting with 0xffff
        (the traditional way).
    """

    return (tagged_pointer & ((1 << 48) -1)) | (0xffff << 48)


def get_section_from_segment(binary: lief.MachO.FatBinary,
                             segment_name: str, section_name: str):
    """This can be used for retrieving const, cstring and data sections.
    Const section contains two tables: one with the names of the sandbox
    profile and one with the content of the sandbox profile.
    This section is in the __DATA segment.

    Constant string section (cstring) contains the names of the profiles.
    This section is in the __TEXT segment.

    Data section contains the structures describing the content of the
    profiles and the content itself.
    This section is in the __DATA segment.

    Args:
        binary: A sandbox profile in its binary form.
        segment_name: The segment name (can be __DATA or __TEXT).
        section_name: The section name (can be CSTRING_SECTION, CONST_SECTION,
                      DATA_SECTION, all of them are macros)

    Returns:
        A binary section with the name given.
    """

    seg = binary.get_segment(segment_name)

    if seg:
        sects = [s for s in seg.sections if s.name == section_name]
        assert len(sects) == 1
        return sects[0]

    return None


def get_xref(binary: lief.MachO.Binary, vaddr: int):
    """Custom cross reference implementation which supports tagged pointers
    from iOS 12. Searches for pointers in the given MachO binary to the given
    virtual address.

    Args:
        binary: A sandbox profile in its binary form.
        vaddr: An address.

    Returns:
        A list with all the pointers to the given virtual address.
    """

    ans = []
    word_size = binary_get_word_size(binary)
    i = 0

    for sect in binary.sections:
        content = sect.content[:len(sect.content) - len(sect.content) % word_size]
        content = [unpack(content[i:i + word_size])
                   for i in range(0, len(content), word_size)]

        if word_size == 8:
            content = [untag_pointer(p) for p in content]

        ans.extend((sect.virtual_address + i * word_size
                    for i, p in enumerate(content) if p == vaddr))

    return ans


def get_tables_section(binary: lief.MachO.Binary):
    """Searches for the section containing the sandbox operations table and
    the sandbox binary profiles for older versions of iOS.

    Args:
        binary: A sandbox profile in its binary form.

    Returns:
        A binary section.
    """

    str_sect = get_section_from_segment(binary, "__TEXT", CSTRING_SECTION)
    strs = str_sect.search_all('default\x00')

    if len(strs) > 0:
        vaddr_str = str_sect.virtual_address + strs[0]
        xref_vaddrs = get_xref(binary, vaddr_str)

        if len(xref_vaddrs) > 0:
            sects = [binary.section_from_virtual_address(x) for x in xref_vaddrs]
            sects = [s for s in sects if 'const' in s.name.lower()]
            assert len(sects) >= 1 and all([sects[0] == s for s in sects])
            return sects[0]

    seg = binary.get_segment('__DATA')
    if seg:
        sects = [s for s in seg.sections if s.name == CONST_SECTION]
        assert len(sects) <= 1

        if len(sects) == 1:
            return sects[0]

    return binary.get_section(CONST_SECTION)


def is_vaddr_in_section(vaddr, section):
    """Checks if given virtual address is inside given section.

    Args:
        vaddr: A virtual address.
        section: A section of the binary.

    Returns:
        True: if the address is inside the section
        False: Otherwise
    """

    return vaddr >= section.virtual_address \
        and vaddr < section.virtual_address + section.size


def unpack_pointer(addr_size, binary, vaddr):
    """Unpacks a pointer and untags it if it is necessary.

    Args:
        binary: A sandbox profile in its binary form.
        vaddr: A virtual address.
        addr_size: The size of an address (4 or 8).

    Returns:
        A pointer.
    """

    ptr = unpack(
        binary.get_content_from_virtual_address(vaddr, addr_size))
    if addr_size == 8:
        ptr = untag_pointer(ptr)
    return ptr


def extract_data_tables_from_section(binary: lief.MachO.Binary, to_data, section):
    """ Generic implementation of table search. A table is formed of adjacent
    pointers to data.

    Args:
        binary: A sandbox profile in its binary form.
        to_data: Function that checks if the data is valid. This function
                 returns None for invalid data and anything else otherwise.
        section: A section of the binary.

    Returns:
            An array of tables (arrays of data).
    """

    addr_size = binary_get_word_size(binary)
    start_addr = section.virtual_address
    end_addr = section.virtual_address + section.size
    tables = []
    vaddr = start_addr

    while vaddr <= end_addr - addr_size:
        ptr = unpack_pointer(addr_size, binary, vaddr)

        data = to_data(binary, ptr)
        if data is None:
            vaddr += addr_size
            continue

        table = [data]
        vaddr += addr_size

        while vaddr <= end_addr - addr_size:
            ptr = unpack_pointer(addr_size, binary, vaddr)

            data = to_data(binary, ptr)
            if data is None:
                break

            table.append(data)
            vaddr += addr_size

        if table not in tables:
            tables.append(table)

        vaddr += addr_size

    return tables


def extract_string_tables(binary: lief.MachO.Binary):
    """Extracts string tables from the given MachO binary.

    Args:
        binary: A sandbox profile in its binary form.

    Returns:
        The string tables.
    """

    return extract_data_tables_from_section(binary,
                                            binary_get_string_from_address,
                                            get_tables_section(binary))


def extract_separated_profiles(binary, string_tables):
    """Extract separated profiles from given MachO binary. It requires all
    string tables. This function is intended to be used for older version
    of iOS(<=7) because in newer versions the sandbox profiles are bundled.

    Args:
        binary: A sandbox profile in its binary form.
        string_tables: The extracted string tables.

    Returns:
        A zip object with profiles.
    """

    def get_profile_names():
        """Extracts the profile names.

            Returns:
                A list with the names of the sandbox profiles.
        """

        def transform(arr):
            if len(arr) <= 3:
                return None

            ans = []
            tmp = []
            for val in arr:
                if val in ['default', '0123456789abcdef']:
                    ans.append(tmp)
                    tmp = []
                else:
                    tmp.append(val)
            ans.append(tmp)
            return ans

        def get_sol(posible):
            ans = [arr for arr in posible
                   if 'com.apple.sandboxd' in arr]
            assert len(ans) == 1
            return ans[0]

        profile_names_v = [transform(v) for v in string_tables]
        profile_names_v = [v for v in profile_names_v if v is not None]
        profile_names_v = [x for v in profile_names_v for x in v]
        return get_sol(profile_names_v)

    def get_profile_contents():
        """Extracts the profile names.

            Returns:
                 The contents of the sandbox profiles.
        """

        def get_profile_content(binary, vaddr):
            addr_size = binary_get_word_size(binary)
            section = get_section_from_segment(binary, "__DATA", DATA_SECTION)

            if not is_vaddr_in_section(vaddr, section):
                return None

            data = binary.get_content_from_virtual_address(vaddr, 2 * addr_size)
            if len(data) != 2 * addr_size:
                return None

            data_vaddr = unpack(data[:addr_size])
            size = unpack(data[addr_size:])
            if not is_vaddr_in_section(vaddr, section):
                return None

            data = binary.get_content_from_virtual_address(data_vaddr, size)
            if len(data) != size:
                return None
            return bytes(data)

        contents_v = [v for v in
                      extract_data_tables_from_section(binary,
                                                       get_profile_content,
                                                       get_tables_section(binary))
                      if len(v) > 3]

        assert len(contents_v) == 1
        return contents_v[0]

    profile_names = get_profile_names()
    profile_contents = get_profile_contents()

    assert len(profile_names) == len(profile_contents)
    return zip(profile_names, profile_contents)


def extract_sbops(string_tables):
    """ Extracts sandbox operations from a given MachO binary.
    If the sandbox profiles are stored either in sandboxd or sandbox kernel
    extension, the operations are stored always in the kernel extension.
    The sandbox operations are stored similar to the separated sandbox profiles
    but this time we have only one table: the name table.

    Args:
        string_tables: The binary's string tables.

    Returns:
        The sandbox operations.
    """

    def transform(arr):
        if len(arr) <= 3:
            return None

        idxs = []
        for idx, val in enumerate(arr):
            if val == 'default':
                idxs.append(idx)

        return [arr[idx:] for idx in idxs]

    def get_sol(possible):
        assert len(possible) >= 1

        sol = []
        if len(possible) > 1:
            cnt = min(len(arr) for arr in possible)
            for vals in zip(*[val[:cnt] for val in possible]):
                if not all(val == vals[0] for val in vals):
                    break
                sol.append(vals[0])
        else:
            sol.append(possible[0][0])
            for pos in possible[0][1:]:
                if pos in ['HOME', 'default']:
                    break
                sol.append(pos)

        return sol

    sbops_v = [transform(v) for v in string_tables]
    sbops_v = [v for v in sbops_v if v is not None and v != []]
    sbops_v = [x for v in sbops_v for x in v]

    return get_sol(sbops_v)


def get_ios_major_version(version: str):
    """Extracts the major iOS version from a given version.

        Args:
            version: A string with the 'full' version.
        Returns:
            An integer with the major iOS version.

    """

    return int(version.split('.')[0])


def findall(searching, pattern):
    """Finds all the substring in the given string.

    Args:
        searching: A string.
        pattern: A pattern that needs to be searched in the searching string.

    Returns:
        The indexes of all substrings equal to pattern inside searching string.
    """

    i = searching.find(pattern)
    while i != -1:
        yield i
        i = searching.find(pattern, i + 1)


def check_regex(data: bytes, base_index: int, ios_version: int):
    """ Checks if the regular expression (from sandbox profile) at offset
    base_index from data is valid for newer versions of iOS(>=8).

    Args:
        data: An array of bytes.
        base_index: The starting index.
        ios_version: An integer representing the iOS version.

    Returns:
        True: if the regular expression is valid for iOS version >= 8.
        False: otherwise.
    """

    if base_index + 0x10 > len(data):
        return False

    if ios_version >= 13:
        size = struct.unpack('<H', data[base_index: base_index + 0x2])[0]
        version = struct.unpack('>I', data[base_index + 0x2: base_index + 0x6])[0]
    else:
        size = struct.unpack('<I', data[base_index: base_index + 0x4])[0]
        version = struct.unpack('>I', data[base_index + 0x4: base_index + 0x8])[0]

    if size > 0x1000 or size < 0x8 or base_index + size + 4 > len(data):
        return False

    if version != 3:
        return False

    if ios_version >= 13:
        sub_size = struct.unpack('<H', data[base_index + 0x6: base_index + 0x8])[0]
    else:
        sub_size = struct.unpack('<H', data[base_index + 0x8: base_index + 0xa])[0]

    return size == sub_size + 6


def unpack_for_newer_ios(base_index, count, data):
    """Unpacking for newer iOS versions (>= 13).

    Args:
        base_index: The starting index.
        count: Bundle size.
        data: An array of bytes.
    Returns:
        The new base index and an offset.
    """

    re_offset = base_index + 12
    op_nodes_count = struct.unpack('<H', data[base_index + 2:base_index + 4])[0]
    sb_ops_count = struct.unpack('<H', data[base_index + 4:base_index + 6])[0]
    sb_profiles_count = struct.unpack('<H', data[base_index + 6:base_index + 8])[0]
    global_table_count = struct.unpack('<B', data[base_index + 10:base_index + 11])[0]
    debug_table_count = struct.unpack('<B', data[base_index + 11:base_index + 12])[0]
    # base_index will be now at the of op_nodes
    base_index += 12 + (count + global_table_count + debug_table_count) * 2 + \
                  (2 + sb_ops_count) * 2 * sb_profiles_count + \
                  op_nodes_count * 8 + 4

    return base_index, re_offset


def check_bundle(data: bytes, base_index: int, ios_version: int):
    """Checks if the sandbox profile bundle at offset base_index from data
    is valid for the given ios_version. Note that sandbox profile bundles are
    used for newer versions of iOS(>=8).

    Args:
        data: An array of bytes.
        base_index: The starting index.
        ios_version: An integer representing the iOS version.

    Returns:
        True: if the sandbox profile bundle is valid.
        False: otherwise.
    """

    if len(data) - base_index < 50:
        return False
    re_offset, aux = struct.unpack('<2H', data[base_index + 2:base_index + 6])

    if ios_version >= 13:
        count = struct.unpack('<H', data[base_index + 8:base_index + 10])[0]
        if count < 0x10:
            return False
    elif ios_version >= 12:
        count = (aux - re_offset) * 4
        # bundle should be big
        if count < 0x10:
            return False
    else:
        count = aux

    if count > 0x1000 or re_offset < 0x10:
        return False

    if ios_version >= 13:
        base_index, re_offset = unpack_for_newer_ios(base_index, count, data)

    else:
        re_offset = base_index + re_offset * 8
        if len(data) - re_offset < count * 2:
            return False

    for off_index in range(re_offset, re_offset + 2 * count, 2):
        index = struct.unpack('<H', data[off_index:off_index + 2])[0]
        if index == 0:
            if off_index < re_offset + 2 * count - 4:
                return False
            continue

        index = base_index + index * 8

        if not check_regex(data, index, ios_version):
            return False

    return True


def extract_bundle_profiles(binary: lief.MachO.Binary, ios_version: int):
    """Extracts sandbox profile bundle from the given MachO binary which was
    extracted from a device with provided ios version.

    Args:
        binary: A sandbox profile in its binary form.
        ios_version: The major ios version.

    Returns:
        The sandbox profile bundle.
    """

    matches = []
    for section in binary.sections:
        if section.name == '__text':
            continue

        content = bytes(section.content)
        for index in findall(content, b'\x00\x80'):
            if check_bundle(content, index, ios_version):
                matches.append(content[index:])

    assert len(matches) == 1
    return matches[0]


def main(args):
    if type(args.binary) == lief.MachO.FatBinary:
        assert args.binary.size == 1
        binary = args.binary.at(0)
    else:
        binary = args.binary

    retcode = 0
    string_tables = extract_string_tables(binary)

    if args.sbops_file is not None:
        sbops = extract_sbops(string_tables)
        sbops_str = '\n'.join(sbops)
        if args.sbops_file == '-':
            print(sbops_str)
        else:
            try:
                with open(args.sbops_file, 'w') as file:
                    file.write(sbops_str + '\n')
            except IOError as exception:
                retcode = exception.errno
                print(exception, file=sys.stderr)

    if args.sbs_dir is not None:
        if args.version <= 8:
            profiles = extract_separated_profiles(binary, string_tables)
            for name, content in profiles:
                try:
                    with open(args.sbs_dir + '/' + name + '.sb.bin', 'wb') as file:
                        file.write(content)
                except IOError as exception:
                    retcode = exception.errno
                    print(exception, file=sys.stderr)
        else:
            content = extract_bundle_profiles(binary, args.version)
            try:
                with open(args.sbs_dir + '/sandbox_bundle', 'wb') as file:
                    file.write(content)
            except IOError as exception:
                retcode = exception.errno
                print(exception, file=sys.stderr)
    exit(retcode)


if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description='Sandbox profiles and operations extraction tool(iOS <9)')
    parser.add_argument('binary', metavar='BINARY', type=lief.MachO.parse,
                        help='path to sandbox(seatbelt) kernel exenstion' +
                        '(iOS 4-12) in order to extract sandbox operations OR ' +
                        'path to sandboxd(iOS 5-8) / sandbox(seatbelt) kernel extension' +
                        '(iOS 4 and 9-12) in order to extract sandbox profiles')
    parser.add_argument('version', metavar='VERSION',
                        type=get_ios_major_version, help='iOS version for given binary')
    parser.add_argument('-o', '--output-sbops', dest='sbops_file', type=str,
                        default=None,
                        help='path to sandbox profile operations store file')
    parser.add_argument('-O', '--output-profiles', dest='sbs_dir', type=str,
                        default=None,
                        help='path to directory in which sandbox profiles should be stored')

    args = parser.parse_args()
    exit(main(args))


================================================
FILE: reverse-sandbox/filters/filters_ios11.json
================================================
{
    "0x01":{
        "name":"",
        "arg_process_fn":"get_filter_arg_string_by_offset_with_type"
    },
    "0x02":{
        "name":"mount-relative",
        "arg_process_fn":"get_filter_arg_string_by_offset_with_type"
    },
    "0x03":{
        "name":"xattr",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x04":{
        "name":"file-mode",
        "arg_process_fn":"get_filter_arg_octal_integer"
    },
    "0x05":{
        "name":"ipc-posix-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x06":{
        "name":"global-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x07":{
        "name":"local-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x08":{
        "name":"local",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x09":{
        "name":"remote",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x0a":{
        "name":"control-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x0b":{
        "name":"socket-domain",
        "arg_process_fn":"get_filter_arg_socket_domain"
    },
    "0x0c":{
        "name":"socket-type",
        "arg_process_fn":"get_filter_arg_socket_type"
    },
    "0x0d":{
        "name":"socket-protocol",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x0e":{
        "name":"target",
        "arg_process_fn":"get_filter_arg_owner"
    },
    "0x0f":{
        "name":"fsctl-command",
        "arg_process_fn":"get_filter_arg_ctl"
    },
    "0x10":{
        "name":"ioctl-command",
        "arg_process_fn":"get_filter_arg_ctl"
    },
    "0x11":{
        "name":"iokit-user-client-class",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x12":{
        "name":"iokit-property",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x13":{
        "name":"iokit-connection",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x14":{
        "name":"device-major",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x15":{
        "name":"device-minor",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x16":{
        "name":"device-conforms-to",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x17":{
        "name":"extension",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x18":{
        "name":"extension-class",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x19":{
        "name":"appleevent-destination",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1a":{
        "name":"debug-mode",
        "arg_process_fn":"get_none"
    },
    "0x1b":{
        "name":"right-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1c":{
        "name":"preference-domain",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1d":{
        "name":"vnode-type",
        "arg_process_fn":"get_filter_arg_vnode_type"
    },
    "0x1e":{
        "name":"require-entitlement",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x1f":{
        "name":"entitlement-value",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x20":{
        "name":"entitlement-value",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x21":{
        "name":"kext-bundle-id",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x22":{
        "name":"info-type",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x23":{
        "name":"notification-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x24":{
        "name":"notification-payload",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x25":{
        "name":"semaphore-owner",
        "arg_process_fn":"get_filter_arg_owner"
    },
    "0x26":{
        "name":"sysctl-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x27":{
        "name":"process-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x28":{
        "name":"rootless-boot-device-filter",
        "arg_process_fn":"get_none"
    },
    "0x29":{
        "name":"rootless-file-filter",
        "arg_process_fn":"get_none"
    },
    "0x2a":{
        "name":"rootless-disk-filter",
        "arg_process_fn":"get_none"
    },
    "0x2b":{
        "name":"rootless-proc-filter",
        "arg_process_fn":"get_none"
    },
    "0x2c":{
        "name":"privilege-id",
        "arg_process_fn":"get_filter_arg_privilege_id"
    },
    "0x2d":{
        "name":"process-attribute",
        "arg_process_fn":"get_filter_arg_process_attribute"
    },
    "0x2e":{
        "name":"uid",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x2f":{
        "name":"nvram-variable",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x30":{
        "name":"csr",
        "arg_process_fn":"get_filter_arg_csr"
    },
    "0x31":{
        "name":"host-special-port",
        "arg_process_fn":"get_filter_arg_host_port"
    },
    "0x81":{
        "name":"regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x82":{
        "name":"mount-relative-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x83":{
        "name":"xattr-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x85":{
        "name":"ipc-posix-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x86":{
        "name":"global-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x87":{
        "name":"local-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x91":{
        "name":"iokit-user-client-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x92":{
        "name":"iokit-property-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x93":{
        "name":"iokit-connection-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x98":{
        "name":"extension-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x99":{
        "name":"appleevent-destination-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x9b":{
        "name":"right-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x9c":{
        "name":"preference-domain-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa0":{
        "name":"entitlement-value-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa1":{
        "name":"kext-bundle-id-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa3":{
        "name":"notification-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa6":{
        "name":"sysctl-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa7":{
        "name":"process-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    }
}

================================================
FILE: reverse-sandbox/filters/filters_ios12.json
================================================
{
    "0x01":{
        "name":"",
        "arg_process_fn":"get_filter_arg_string_by_offset_with_type"
    },
    "0x02":{
        "name":"mount-relative-literal",
        "arg_process_fn":"get_filter_arg_string_by_offset_with_type"
    },
    "0x03":{
        "name":"xattr",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x04":{
        "name":"file-mode",
        "arg_process_fn":"get_filter_arg_octal_integer"
    },
    "0x05":{
        "name":"ipc-posix-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x06":{
        "name":"global-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x07":{
        "name":"local-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x08":{
        "name":"local",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x09":{
        "name":"remote",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x0a":{
        "name":"control-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x0b":{
        "name":"socket-domain",
        "arg_process_fn":"get_filter_arg_socket_domain"
    },
    "0x0c":{
        "name":"socket-type",
        "arg_process_fn":"get_filter_arg_socket_type"
    },
    "0x0d":{
        "name":"socket-protocol",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x0e":{
        "name":"target",
        "arg_process_fn":"get_filter_arg_owner"
    },
    "0x0f":{
        "name":"fsctl-command",
        "arg_process_fn":"get_filter_arg_ctl"
    },
    "0x10":{
        "name":"ioctl-command",
        "arg_process_fn":"get_filter_arg_ctl"
    },
    "0x11":{
        "name":"iokit-user-client-class",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x12":{
        "name":"iokit-property",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x13":{
        "name":"iokit-connection",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x14":{
        "name":"device-major",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x15":{
        "name":"device-minor",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x16":{
        "name":"device-conforms-to",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x17":{
        "name":"extension",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x18":{
        "name":"extension-class",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x19":{
        "name":"appleevent-destination",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1a":{
        "name":"debug-mode",
        "arg_process_fn":"get_none"
    },
    "0x1b":{
        "name":"right-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1c":{
        "name":"preference-domain",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1d":{
        "name":"vnode-type",
        "arg_process_fn":"get_filter_arg_vnode_type"
    },
    "0x1e":{
        "name":"require-entitlement",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x1f":{
        "name":"entitlement-value",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x20":{
        "name":"entitlement-value",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x21":{
        "name":"kext-bundle-id",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x22":{
        "name":"info-type",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x23":{
        "name":"notification-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x24":{
        "name":"notification-payload",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x25":{
        "name":"semaphore-owner",
        "arg_process_fn":"get_filter_arg_owner"
    },
    "0x26":{
        "name":"sysctl-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x27":{
        "name":"process-path",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x28":{
        "name":"rootless-boot-device-filter",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x29":{
        "name":"rootless-disk-filter",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x2a":{
        "name":"privilege-id",
        "arg_process_fn":"get_filter_arg_privilege_id"
    },
    "0x2b":{
        "name":"process-attribute",
        "arg_process_fn":"get_filter_arg_process_attribute"
    },
    "0x2c":{
        "name":"uid",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x2d":{
        "name":"nvram-variable",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x2e":{
        "name":"csr",
        "arg_process_fn":"get_filter_arg_csr"
    },
    "0x2f":{
        "name":"host-special-port",
        "arg_process_fn":"get_filter_arg_host_port"
    },
    "0x30":{
        "name":"filesystem-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x31":{
        "name":"boot-arg",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x32":{
        "name":"xpc-service-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x33":{
        "name":"signing-identifier",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x34":{
        "name":"signal-number",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x35":{
        "name":"target-signing-identifier",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x36":{
        "name":"reboot-flags",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x37":{
        "name":"datavault-disk-filter",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x38":{
        "name":"extension-path-ancestor",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x39":{
        "name":"file-attribute",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x3a":{
        "name":"storage-class",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x3b":{
        "name":"storage-class-extension",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x3c":{
        "name":"iokit-usb-interface-class",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x3d":{
        "name":"iokit-usb-interface-subclass",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x3e":{
        "name":"ancestor-signing-identifier",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x3f":{
        "name":"frequire-ancestor-with-entitlement",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x81":{
        "name":"regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x82":{
        "name":"mount-relative-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x83":{
        "name":"xattr-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x85":{
        "name":"ipc-posix-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x86":{
        "name":"global-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x87":{
        "name":"local-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x8a":{
        "name":"control-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x91":{
        "name":"iokit-user-client-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x92":{
        "name":"iokit-property-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x93":{
        "name":"iokit-connection-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x98":{
        "name":"extension-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x99":{
        "name":"appleevent-destination-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x9b":{
        "name":"right-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x9c":{
        "name":"preference-domain-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa0":{
        "name":"entitlement-value-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa1":{
        "name":"kext-bundle-id-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa2":{
        "name":"info-type-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa3":{
        "name":"notification-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa6":{
        "name":"sysctl-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa7":{
        "name":"process-path-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xad":{
        "name":"nvram-variable-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb0":{
        "name":"filesystem-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb1":{
        "name":"boot-arg-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb2":{
        "name":"xpc-service-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb3":{
        "name":"signing-identifier-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb5":{
        "name":"target-signing-identifier-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xbe":{
        "name":"ancestor-signing-identifier-regex",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    }
}


================================================
FILE: reverse-sandbox/filters/filters_ios13.json
================================================
{
    "0x01":{
        "name":"",
        "arg_process_fn":"get_filter_arg_string_by_offset_with_type"
    },
    "0x02":{
        "name":"mount-relative-literal",
        "arg_process_fn":"get_filter_arg_string_by_offset_with_type"
    },
    "0x03":{
        "name":"xattr",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x04":{
        "name":"file-mode",
        "arg_process_fn":"get_filter_arg_octal_integer"
    },
    "0x05":{
        "name":"ipc-posix-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x06":{
        "name":"global-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x07":{
        "name":"local-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x08":{
        "name":"local",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x09":{
        "name":"remote",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x0a":{
        "name":"control-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x0b":{
        "name":"socket-domain",
        "arg_process_fn":"get_filter_arg_socket_domain"
    },
    "0x0c":{
        "name":"socket-type",
        "arg_process_fn":"get_filter_arg_socket_type"
    },
    "0x0d":{
        "name":"socket-protocol",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x0e":{
        "name":"target",
        "arg_process_fn":"get_filter_arg_owner"
    },
    "0x0f":{
        "name":"fsctl-command",
        "arg_process_fn":"get_filter_arg_ctl"
    },
    "0x10":{
        "name":"ioctl-command",
        "arg_process_fn":"get_filter_arg_ctl"
    },
    "0x11":{
        "name":"iokit-user-client-class",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x12":{
        "name":"iokit-property",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x13":{
        "name":"iokit-connection",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x14":{
        "name":"device-major",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x15":{
        "name":"device-minor",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x16":{
        "name":"device-conforms-to",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x17":{
        "name":"extension",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x18":{
        "name":"extension-class",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x19":{
        "name":"appleevent-destination",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1a":{
        "name":"debug-mode",
        "arg_process_fn":"get_none"
    },
    "0x1b":{
        "name":"right-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1c":{
        "name":"preference-domain",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1d":{
        "name":"vnode-type",
        "arg_process_fn":"get_filter_arg_vnode_type"
    },
    "0x1e":{
        "name":"require-entitlement",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x1f":{
        "name":"entitlement-value",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x20":{
        "name":"entitlement-value",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x21":{
        "name":"kext-bundle-id",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x22":{
        "name":"info-type",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x23":{
        "name":"notification-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x24":{
        "name":"notification-payload",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x25":{
        "name":"semaphore-owner",
        "arg_process_fn":"get_filter_arg_owner"
    },
    "0x26":{
        "name":"sysctl-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x27":{
        "name":"process-path",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x28":{
        "name":"rootless-boot-device-filter",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x29":{
        "name":"rootless-disk-filter",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x2a":{
        "name":"privilege-id",
        "arg_process_fn":"get_filter_arg_privilege_id"
    },
    "0x2b":{
        "name":"process-attribute",
        "arg_process_fn":"get_filter_arg_process_attribute"
    },
    "0x2c":{
        "name":"uid",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x2d":{
        "name":"nvram-variable",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x2e":{
        "name":"csr",
        "arg_process_fn":"get_filter_arg_csr"
    },
    "0x2f":{
        "name":"host-special-port",
        "arg_process_fn":"get_filter_arg_host_port"
    },
    "0x30":{
        "name":"filesystem-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x31":{
        "name":"boot-arg",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x32":{
        "name":"xpc-service-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x33":{
        "name":"signing-identifier",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x34":{
        "name":"signal-number",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x35":{
        "name":"target-signing-identifier",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x36":{
        "name":"reboot-flags",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x37":{
        "name":"datavault-disk-filter",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x38":{
        "name":"extension-path-ancestor",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x39":{
        "name":"file-attribute",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x3a":{
        "name":"storage-class",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x3b":{
        "name":"storage-class-extension",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x3c":{
        "name":"iokit-usb-interface-class",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x3d":{
        "name":"iokit-usb-interface-subclass",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x3e":{
        "name":"ancestor-signing-identifier",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x3f":{
        "name":"require-ancestor-with-entitlement",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x40":{
        "name":"persona-type",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x41":{
        "name":"syscall-number",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x42":{
        "name":"syscall-mask",
        "arg_process_fn":"get_none"
    },
    "0x43":{
        "name":"require-target-with-entitlement",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x44":{
        "name":"iokit-registry-entry-attribute",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x45":{
        "name":"user-intent-extension",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x46":{
        "name":"snapshot-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x81":{
        "name":"regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x82":{
        "name":"mount-relative-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x83":{
        "name":"xattr-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x85":{
        "name":"ipc-posix-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x86":{
        "name":"global-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x87":{
        "name":"local-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x8a":{
        "name":"control-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x91":{
        "name":"iokit-user-client-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x92":{
        "name":"iokit-property-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x93":{
        "name":"iokit-connection-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x98":{
        "name":"extension-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x99":{
        "name":"appleevent-destination-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x9b":{
        "name":"right-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x9c":{
        "name":"preference-domain-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa0":{
        "name":"entitlement-value-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa1":{
        "name":"kext-bundle-id-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa2":{
        "name":"info-type-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa3":{
        "name":"notification-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa6":{
        "name":"sysctl-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa7":{
        "name":"process-path-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xad":{
        "name":"nvram-variable-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb0":{
        "name":"filesystem-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb1":{
        "name":"boot-arg-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb2":{
        "name":"xpc-service-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb3":{
        "name":"signing-identifier-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb5":{
        "name":"target-signing-identifier-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xbe":{
        "name":"ancestor-signing-identifier-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xc6":{
        "name":"snapshot-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    }
}


================================================
FILE: reverse-sandbox/filters/filters_ios14.json
================================================
{
    "0x01":{
        "name":"",
        "arg_process_fn":"get_filter_arg_string_by_offset_with_type"
    },
    "0x02":{
        "name":"mount-relative-literal",
        "arg_process_fn":"get_filter_arg_string_by_offset_with_type"
    },
    "0x03":{
        "name":"xattr",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x04":{
        "name":"file-mode",
        "arg_process_fn":"get_filter_arg_octal_integer"
    },
    "0x05":{
        "name":"ipc-posix-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x06":{
        "name":"global-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x07":{
        "name":"local-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x08":{
        "name":"local",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x09":{
        "name":"remote",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x0a":{
        "name":"control-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x0b":{
        "name":"socket-domain",
        "arg_process_fn":"get_filter_arg_socket_domain"
    },
    "0x0c":{
        "name":"socket-type",
        "arg_process_fn":"get_filter_arg_socket_type"
    },
    "0x0d":{
        "name":"socket-protocol",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x0e":{
        "name":"target",
        "arg_process_fn":"get_filter_arg_owner"
    },
    "0x0f":{
        "name":"fsctl-command",
        "arg_process_fn":"get_filter_arg_ctl"
    },
    "0x10":{
        "name":"ioctl-command",
        "arg_process_fn":"get_filter_arg_ctl"
    },
    "0x11":{
        "name":"iokit-user-client-class",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x12":{
        "name":"iokit-property",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x13":{
        "name":"iokit-connection",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x14":{
        "name":"device-major",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x15":{
        "name":"device-minor",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x16":{
        "name":"device-conforms-to",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x17":{
        "name":"extension",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x18":{
        "name":"extension-class",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x19":{
        "name":"appleevent-destination",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1a":{
        "name":"debug-mode",
        "arg_process_fn":"get_none"
    },
    "0x1b":{
        "name":"right-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1c":{
        "name":"preference-domain",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1d":{
        "name":"vnode-type",
        "arg_process_fn":"get_filter_arg_vnode_type"
    },
    "0x1e":{
        "name":"require-entitlement",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x1f":{
        "name":"entitlement-value",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x20":{
        "name":"entitlement-value",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x21":{
        "name":"kext-bundle-id",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x22":{
        "name":"info-type",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x23":{
        "name":"notification-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x24":{
        "name":"notification-payload",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x25":{
        "name":"semaphore-owner",
        "arg_process_fn":"get_filter_arg_owner"
    },
    "0x26":{
        "name":"sysctl-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x27":{
        "name":"process-path",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x28":{
        "name":"rootless-boot-device-filter",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x29":{
        "name":"rootless-disk-filter",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x2a":{
        "name":"privilege-id",
        "arg_process_fn":"get_filter_arg_privilege_id"
    },
    "0x2b":{
        "name":"process-attribute",
        "arg_process_fn":"get_filter_arg_process_attribute"
    },
    "0x2c":{
        "name":"uid",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x2d":{
        "name":"nvram-variable",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x2e":{
        "name":"csr",
        "arg_process_fn":"get_filter_arg_csr"
    },
    "0x2f":{
        "name":"host-special-port",
        "arg_process_fn":"get_filter_arg_host_port"
    },
    "0x30":{
        "name":"filesystem-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x31":{
        "name":"boot-arg",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x32":{
        "name":"xpc-service-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x33":{
        "name":"signing-identifier",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x34":{
        "name":"signal-number",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x35":{
        "name":"target-signing-identifier",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x36":{
        "name":"reboot-flags",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x37":{
        "name":"datavault-disk-filter",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x38":{
        "name":"extension-path-ancestor",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x39":{
        "name":"file-attribute",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x3a":{
        "name":"storage-class",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x3b":{
        "name":"storage-class-extension",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x3c":{
        "name":"iokit-usb-interface-class",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x3d":{
        "name":"iokit-usb-interface-subclass",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x3e":{
        "name":"ancestor-signing-identifier",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x3f":{
        "name":"require-ancestor-with-entitlement",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x40":{
        "name":"persona-type",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x41":{
        "name":"syscall-number",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x42":{
        "name":"syscall-mask",
        "arg_process_fn":"get_none"
    },
    "0x43":{
        "name":"require-target-with-entitlement",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x44":{
        "name":"iokit-registry-entry-attribute",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x45":{
        "name":"user-intent-extension",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x46":{
        "name":"snapshot-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x47":{
        "name":"mach-derived-port-role",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x48":{
        "name":"message-number",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x49":{
        "name":"message-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x4a":{
        "name":"iokit-method-number",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x4b":{
        "name":"iokit-trap-number",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x4c":{
        "name":"machtrap-number",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x4d":{
        "name":"machtrap-mask",
        "arg_process_fn":"get_none"
    },
    "0x4e":{
        "name":"kernel-mig-routine",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x4f":{
        "name":"kernel-mig-routine-mask",
        "arg_process_fn":"get_none"
    },
    "0x81":{
        "name":"regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x82":{
        "name":"mount-relative-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x83":{
        "name":"xattr-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x85":{
        "name":"ipc-posix-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x86":{
        "name":"global-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x87":{
        "name":"local-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x8a":{
        "name":"control-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x91":{
        "name":"iokit-user-client-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x92":{
        "name":"iokit-property-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x93":{
        "name":"iokit-connection-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x98":{
        "name":"extension-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x99":{
        "name":"appleevent-destination-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x9b":{
        "name":"right-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x9c":{
        "name":"preference-domain-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa0":{
        "name":"entitlement-value-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa1":{
        "name":"kext-bundle-id-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa2":{
        "name":"info-type-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa3":{
        "name":"notification-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa6":{
        "name":"sysctl-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xa7":{
        "name":"process-path-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xad":{
        "name":"nvram-variable-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb0":{
        "name":"filesystem-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb1":{
        "name":"boot-arg-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb2":{
        "name":"xpc-service-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb3":{
        "name":"signing-identifier-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xb5":{
        "name":"target-signing-identifier-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xbe":{
        "name":"ancestor-signing-identifier-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xc6":{
        "name":"snapshot-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xc7":{
        "name":"mach-derived-port-role-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0xc9":{
        "name":"message-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    }
}


================================================
FILE: reverse-sandbox/filters/filters_ios4.json
================================================
{
    "0x01":{
        "name":"regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x02":{
        "name":"xattr",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x03":{
        "name":"file-mode",
        "arg_process_fn":"get_filter_arg_octal_integer"
    },
    "0x04":{
        "name":"ipc-posix-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x05":{
        "name":"global-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x06":{
        "name":"local-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x07":{
        "name":"local",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x08":{
        "name":"remote",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x09":{
        "name":"socket-domain",
        "arg_process_fn":"get_filter_arg_socket_domain"
    },
    "0x0a":{
        "name":"target",
        "arg_process_fn":"get_filter_arg_owner"
    },
    "0x0b":{
        "name":"iokit-user-client-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x0c":{
        "name":"extension",
        "arg_process_fn":"get_none"
    },
    "0x0d":{
        "name":"debug-mode",
        "arg_process_fn":"get_none"
    }
}


================================================
FILE: reverse-sandbox/filters/filters_ios5.json
================================================
{
    "0x01":{
        "name":"",
        "arg_process_fn":"get_filter_arg_string_by_offset_with_type"
    },
    "0x02":{
        "name":"mount-relative",
        "arg_process_fn":"get_filter_arg_string_by_offset_with_type"
    },
    "0x04":{
        "name":"file-mode",
        "arg_process_fn":"get_filter_arg_octal_integer"
    },
    "0x05":{
        "name":"ipc-posix-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x06":{
        "name":"global-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x07":{
        "name":"local-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x08":{
        "name":"local",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x09":{
        "name":"remote",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x0a":{
        "name":"control-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x0b":{
        "name":"socket-domain",
        "arg_process_fn":"get_filter_arg_socket_domain"
    },
    "0x0c":{
        "name":"socket-type",
        "arg_process_fn":"get_filter_arg_socket_type"
    },
    "0x0d":{
        "name":"socket-protocol",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x0e":{
        "name":"target",
        "arg_process_fn":"get_filter_arg_owner"
    },
    "0x0f":{
        "name":"iokit-user-client-class",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x10":{
        "name":"iokit-property",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x11":{
        "name":"iokit-connection",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x12":{
        "name":"extension",
        "arg_process_fn":"get_none"
    },
    "0x13":{
        "name":"mach-extension",
        "arg_process_fn":"get_none"
    },
    "0x14":{
        "name":"appleevent-destination",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x15":{
        "name":"debug-mode",
        "arg_process_fn":"get_none"
    },
    "0x16":{
        "name":"right-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x81":{
        "name":"regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x82":{
        "name":"mount-relative-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x83":{
        "name":"xattr",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x85":{
        "name":"ipc-posix-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x86":{
        "name":"global-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x87":{
        "name":"local-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x8a":{
        "name":"control-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x8f":{
        "name":"iokit-user-client-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x90":{
        "name":"iokit-property-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x91":{
        "name":"iokit-connection-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x93":{
        "name":"extension-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x94":{
        "name":"appleevent-destination-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x96":{
        "name":"right-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    }
}


================================================
FILE: reverse-sandbox/filters/filters_ios6.json
================================================
{
    "0x01":{
        "name":"",
        "arg_process_fn":"get_filter_arg_string_by_offset_with_type"
    },
    "0x02":{
        "name":"mount-relative",
        "arg_process_fn":"get_filter_arg_string_by_offset_with_type"
    },
    "0x04":{
        "name":"file-mode",
        "arg_process_fn":"get_filter_arg_octal_integer"
    },
    "0x05":{
        "name":"ipc-posix-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x06":{
        "name":"global-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x07":{
        "name":"local-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x08":{
        "name":"local",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x09":{
        "name":"remote",
        "arg_process_fn":"get_filter_arg_network_address"
    },
    "0x0a":{
        "name":"control-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x0b":{
        "name":"socket-domain",
        "arg_process_fn":"get_filter_arg_socket_domain"
    },
    "0x0c":{
        "name":"socket-type",
        "arg_process_fn":"get_filter_arg_socket_type"
    },
    "0x0d":{
        "name":"socket-protocol",
        "arg_process_fn":"get_filter_arg_integer"
    },
    "0x0e":{
        "name":"target",
        "arg_process_fn":"get_filter_arg_owner"
    },
    "0x0f":{
        "name":"fsctl-command",
        "arg_process_fn":"get_filter_arg_ctl"
    },
    "0x10":{
        "name":"ioctl-command",
        "arg_process_fn":"get_filter_arg_ctl"
    },
    "0x11":{
        "name":"iokit-user-client-class",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x12":{
        "name":"iokit-property",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x13":{
        "name":"iokit-connection",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x14":{
        "name":"extension",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x15":{
        "name":"extension-class",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x16":{
        "name":"appleevent-destination",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x17":{
        "name":"debug-mode",
        "arg_process_fn":"get_none"
    },
    "0x18":{
        "name":"right-name",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x19":{
        "name":"preference-domain",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1a":{
        "name":"tty",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x1b":{
        "name":"require-entitlement",
        "arg_process_fn":"get_filter_arg_string_by_offset_no_skip"
    },
    "0x1c":{
        "name":"entitlement-value",
        "arg_process_fn":"get_filter_arg_boolean"
    },
    "0x1d":{
        "name":"entitlement-value",
        "arg_process_fn":"get_filter_arg_string_by_offset"
    },
    "0x81":{
        "name":"regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x82":{
        "name":"mount-relative-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x83":{
        "name":"xattr",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x85":{
        "name":"ipc-posix-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x86":{
        "name":"global-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x87":{
        "name":"local-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x8a":{
        "name":"control-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x91":{
        "name":"iokit-user-client-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x92":{
        "name":"iokit-property-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x93":{
        "name":"iokit-connection-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x95":{
        "name":"extension-class-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x96":{
        "name":"appleevent-destination-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x98":{
        "name":"right-name-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x99":{
        "name":"preference-domain-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    },
    "0x9d":{
        "name":"entitlement-value-regex",
        "arg_process_fn":"get_filter_arg_regex_by_id"
    }
}


================================================
FILE: reverse-sandbox/filters.py
================================================
import json

def read_filters(file_path):
    temp = {}
    filters = {}
    with open(file_path) as data:
        temp = json.load(data)

        for key, value in temp.items():
            filters[int(str(key), 16)] = value

    return filters


class Filters(object):

    filters_ios4 = read_filters('filters/filters_ios4.json')
    filters_ios5 = read_filters('filters/filters_ios5.json')
    filters_ios6 = read_filters('filters/filters_ios6.json')
    filters_ios11 = read_filters('filters/filters_ios11.json')
    filters_ios12 = read_filters('filters/filters_ios12.json')
    filters_ios13 = read_filters('filters/filters_ios13.json')
    filters_ios14 = read_filters('filters/filters_ios14.json')

    @staticmethod
    def get_filters(ios_major_version):
        if ios_major_version <= 4:
            return Filters.filters_ios4
        if ios_major_version == 5:
            return Filters.filters_ios5
        if ios_major_version == 6:
            return Filters.filters_ios6
        if ios_major_version <= 11:
            return Filters.filters_ios11
        if ios_major_version <= 12:
            return Filters.filters_ios12
        if ios_major_version <= 13:
            return Filters.filters_ios13
        return Filters.filters_ios14

    @staticmethod
    def exists(ios_major_version, id):
        return id in Filters.get_filters(ios_major_version)

    @staticmethod
    def get(ios_major_version, id):
        return Filters.get_filters(ios_major_version).get(id, None)


================================================
FILE: reverse-sandbox/logger.config
================================================
[loggers]
keys=root

[logger_root]
level=NOTSET
handlers=file,screen

[formatters]
keys=simple,complex

[formatter_simple]
format=%(asctime)s - %(name)s - %(levelname)s - %(message)s

[formatter_complex]
format=%(asctime)s - %(name)s - %(levelname)s - (%(module)s:%(lineno)d) - %(message)s

[handlers]
keys=file,screen

[handler_screen]
class=StreamHandler
formatter=simple
level=INFO
args=(sys.stderr,)

[handler_file]
class=logging.FileHandler
formatter=complex
level=DEBUG
args=('reverse.log',)


================================================
FILE: reverse-sandbox/operation_node.py
================================================
#!/usr/bin/python3

import sys
import struct
import re
import logging
import logging.config

logging.config.fileConfig("logger.config")
logger = logging.getLogger(__name__)

class TerminalNode():
    """Allow or Deny end node in binary sandbox format

    A terminal node, when reached, either denies or allows the rule.
    A node has a type (allow or deny) and a set of flags. Flags are
    currently unused.
    """

    TERMINAL_NODE_TYPE_ALLOW = 0x00
    TERMINAL_NODE_TYPE_DENY = 0x01
    type = None
    flags = None

    def __eq__(self, other):
        return self.type == other.type and self.flags == other.flags

    def __str__(self):
        if self.type == self.TERMINAL_NODE_TYPE_ALLOW:
            return "allow"
        elif self.type == self.TERMINAL_NODE_TYPE_DENY:
            return "deny"
        else:
            return "unknown"

    def is_allow(self):
        return self.type == self.TERMINAL_NODE_TYPE_ALLOW

    def is_deny(self):
        return self.type == self.TERMINAL_NODE_TYPE_DENY


class NonTerminalNode():
    """Intermediary node consisting of a filter to match

    The non-terminal node, when matched, points to a new node, and
    when unmatched, to another node.

    A non-terminal node consists of the filter to match, its argument and
    the match and unmatch nodes.
    """

    filter_id = None
    filter = None
    argument_id = None
    argument = None
    match_offset = None
    match = None
    unmatch_offset = None
    unmatch = None

    def __eq__(self, other):
        return self.filter_id == other.filter_id and self.argument_id == other.argument_id and self.match_offset == other.match_offset and self.unmatch_offset == other.unmatch_offset

    def simplify_list(self, arg_list):
        result_list = []
        for a in arg_list:
            if len(a) == 0:
                continue
            tmp_list = list(result_list)
            match_found = False
            for r in tmp_list:
                if len(r) == 0:
                    continue
                if a == r or a+"/" == r or a == r+"/":
                    match_found = True
                    result_list.remove(r)
                    if a[-1] == '/':
                        result_list.append(a + "^^^")
                    else:
                        result_list.append(a + "/^^^")
            if match_found == False:
                result_list.append(a)

        return result_list

    def str_debug(self):
        if self.filter:
            if self.argument:
                if type(self.argument) is list:
                    if len(self.argument) == 1:
                        ret_str = ""
                    else:
                        self.argument = self.simplify_list(self.argument)
                        if len(self.argument) == 1:
                            ret_str = ""
                        else:
                            ret_str = "(require-any "
                    for s in self.argument:
                        curr_filter = self.filter
                        regex_added = False
                        prefix_added = False
                        if len(s) == 0:
                            s = ".+"
                            if not regex_added:
                                regex_added = True
                                if self.filter == "literal":
                                    curr_filter = "regex"
                                else:
                                    curr_filter += "-regex"
                        else:
                            if s[-4:] == "/^^^":
                                curr_filter = "subpath"
                                s = s[:-4]
                            if '\\' in s or '|' in s or ('[' in s and ']' in s) or '+' in s:
                                if curr_filter == "subpath":
                                    s = s + "/?"
                                if self.filter == "literal":
                                    curr_filter = "regex"
                                else:
                                    curr_filter += "-regex"
                                s = s.replace('\\\\.', '[.]')
                                s = s.replace('\\.', '[.]')
                            if "${" in s and "}" in s:
                                if not prefix_added:
                                    prefix_added = True
                                    curr_filter += "-prefix"
                        if "regex" in curr_filter:
                            ret_str += '(%04x, %04x) (%s #"%s")\n' % (self.match_offset, self.unmatch_offset, curr_filter, s)
                        else:
                            ret_str += '(%s "%s")\n' % (curr_filter, s)
                    if len(self.argument) == 1:
                        ret_str = ret_str[:-1]
                    else:
                        ret_str = ret_str[:-1] + ")"
                    return ret_str
                s = self.argument
                curr_filter = self.filter
                if not "regex" in curr_filter:
                    if '\\' in s or '|' in s or ('[' in s and ']' in s) or '+' in s:
                        if self.filter == "literal":
                            curr_filter = "regex"
                        else:
                            curr_filter += "-regex"
                        s = s.replace('\\\\.', '[.]')
                        s = s.replace('\\.', '[.]')
                if "${" in s and "}" in s:
                    if not "prefix" in curr_filter:
                        curr_filter += "-prefix"
                return "(%04x, %04x) (%s %s)" % (self.match_offset, self.unmatch_offset, curr_filter, s)
            else:
                return "(%04x, %04x) (%s)" % (self.match_offset, self.unmatch_offset, self.filter)
        return "(%02x %04x %04x %04x)" % (self.filter_id, self.argument_id, self.match_offset, self.unmatch_offset)

    def __str__(self):
        if self.filter:
            if self.argument:
                if type(self.argument) is list:
                    if len(self.argument) == 1:
                        ret_str = ""
                    else:
                        self.argument = self.simplify_list(self.argument)
                        if len(self.argument) == 1:
                            ret_str = ""
                        else:
                            ret_str = "(require-any "
                    for s in self.argument:
                        curr_filter = self.filter
                        regex_added = False
                        prefix_added = False
                        if len(s) == 0:
                            s = ".+"
                            if not regex_added:
                                regex_added = True
                                if self.filter == "literal":
                                    curr_filter = "regex"
                                else:
                                    curr_filter += "-regex"
                        else:
                            if s[-4:] == "/^^^":
                                curr_filter = "subpath"
                                s = s[:-4]
                            if '\\' in s or '|' in s or ('[' in s and ']' in s) or '+' in s:
                                if curr_filter == "subpath":
                                    s = s + "/?"
                                if self.filter == "literal":
                                    curr_filter = "regex"
                                else:
                                    curr_filter += "-regex"
                                s = s.replace('\\\\.', '[.]')
                                s = s.replace('\\.', '[.]')
                            if "${" in s and "}" in s:
                                if not prefix_added:
                                    prefix_added = True
                                    curr_filter += "-prefix"
                        if "regex" in curr_filter:
                            ret_str += '(%s #"%s")\n' % (curr_filter, s)
                        else:
                            ret_str += '(%s "%s")\n' % (curr_filter, s)
                    if len(self.argument) == 1:
                        ret_str = ret_str[:-1]
                    else:
                        ret_str = ret_str[:-1] + ")"
                    return ret_str
                s = self.argument
                curr_filter = self.filter
                if not "regex" in curr_filter:
                    if '\\' in s or '|' in s or ('[' in s and ']' in s) or '+' in s:
                        if self.filter == "literal":
                            curr_filter = "regex"
                        else:
                            curr_filter += "-regex"
                        s = s.replace('\\\\.', '[.]')
                        s = s.replace('\\.', '[.]')
                if "${" in s and "}" in s:
                    if not "prefix" in curr_filter:
                        curr_filter += "-prefix"
                return "(%s %s)" % (curr_filter, s)
            else:
                return "(%s)" % (self.filter)
        return "(%02x %04x %04x %04x)" % (self.filter_id, self.argument_id, self.match_offset, self.unmatch_offset)

    def str_not(self):
        if self.filter:
            if self.argument:
                if type(self.argument) is list:
                    if len(self.argument) == 1:
                        ret_str = ""
                    else:
                        self.argument = self.simplify_list(self.argument)
                        if len(self.argument) == 1:
                            ret_str = ""
                        else:
                            ret_str = "(require-all "
                    for s in self.argument:
                        curr_filter = self.filter
                        regex_added = False
                        prefix_added = False
                        if len(s) == 0:
                            s = ".+"
                            if not regex_added:
                                regex_added = True
                                if self.filter == "literal":
                                    curr_filter = "regex"
                                else:
                                    curr_filter += "-regex"
                        else:
                            if s[-4:] == "/^^^":
                                curr_filter = "subpath"
                                s = s[:-4]
                            if '\\' in s or '|' in s or ('[' in s and ']' in s) or '+' in s:
                                if curr_filter == "subpath":
                                    s = s + "/?"
                                if self.filter == "literal":
                                    curr_filter = "regex"
                                else:
                                    curr_filter += "-regex"
                                s = s.replace('\\\\.', '[.]')
                                s = s.replace('\\.', '[.]')
                            if "${" in s and "}" in s:
                                if not prefix_added:
                                    prefix_added = True
                                    curr_filter += "-prefix"
                        if "regex" in curr_filter:
                            ret_str += '(require-not (%s #"%s"))\n' % (curr_filter, s)
                        else:
                            ret_str += '(require-not (%s "%s"))\n' % (curr_filter, s)
                    if len(self.argument) == 1:
                        ret_str = ret_str[:-1]
                    else:
                        ret_str = ret_str[:-1] + ")"
                    return ret_str
                s = self.argument
                curr_filter = self.filter
                if not "regex" in curr_filter:
                    if '\\' in s or '|' in s or ('[' in s and ']' in s) or '+' in s:
                        if self.filter == "literal":
                            curr_filter = "regex"
                        else:
                            curr_filter += "-regex"
                        s = s.replace('\\\\.', '[.]')
                        s = s.replace('\\.', '[.]')
                if "${" in s and "}" in s:
                    if not "prefix" in curr_filter:
                        curr_filter += "-prefix"
                return "(%s %s)" % (curr_filter, s)
            else:
                return "(%s)" % (self.filter)
        return "(%02x %04x %04x %04x)" % (self.filter_id, self.argument_id, self.match_offset, self.unmatch_offset)

    def values(self):
        if self.filter:
            return (self.filter, self.argument)
        return ("%02x" % self.filter_id, "%04x" % (self.argument_id))

    def is_entitlement_start(self):
        return self.filter_id == 0x1e or self.filter_id == 0xa0

    def is_entitlement(self):
        return self.filter_id == 0x1e or self.filter_id == 0x1f or self.filter_id == 0x20 or self.filter_id == 0xa0

    def is_last_regular_expression(self):
        return self.filter_id == 0x81 and self.argument_id == num_regex-1

    def convert_filter(self, convert_fn, f, regex_list, ios_major_version,
            keep_builtin_filters, global_vars, base_addr):
        (self.filter, self.argument) = convert_fn(f, ios_major_version,
            keep_builtin_filters, global_vars, regex_list, self.filter_id,
            self.argument_id, base_addr)

    def is_non_terminal_deny(self):
        if self.match.is_non_terminal() and self.unmatch.is_terminal():
            return self.unmatch.terminal.is_deny()

    def is_non_terminal_allow(self):
        if self.match.is_non_terminal() and self.unmatch.is_terminal():
            return self.unmatch.terminal.is_allow()

    def is_non_terminal_non_terminal(self):
        return self.match.is_non_terminal() and self.unmatch.is_non_terminal()

    def is_allow_non_terminal(self):
        if self.match.is_terminal() and self.unmatch.is_non_terminal():
            return self.match.terminal.is_allow()

    def is_deny_non_terminal(self):
        if self.match.is_terminal() and self.unmatch.is_non_terminal():
            return self.match.terminal.is_deny()

    def is_deny_allow(self):
        if self.match.is_terminal() and self.unmatch.is_terminal():
            return self.match.terminal.is_deny() and self.unmatch.terminal.is_allow()

    def is_allow_deny(self):
        if self.match.is_terminal() and self.unmatch.is_terminal():
            return self.match.terminal.is_allow() and self.unmatch.terminal.is_deny()


class OperationNode():
    """A rule item in the binary sandbox profile

    It may either be a teminal node (end node) or a non-terminal node
    (intermediary node). Each node type uses another class, as defined
    above.
    """

    OPERATION_NODE_TYPE_NON_TERMINAL = 0x00
    OPERATION_NODE_TYPE_TERMINAL = 0x01
    offset = None
    raw = []
    type = None
    terminal = None
    non_terminal = None

    def __init__(self, offset):
        self.offset = offset

    def is_terminal(self):
        return self.type == self.OPERATION_NODE_TYPE_TERMINAL

    def is_non_terminal(self):
        return self.type == self.OPERATION_NODE_TYPE_NON_TERMINAL

    def parse_terminal(self, ios_major_version):
        self.terminal = TerminalNode()
        self.terminal.parent = self
        self.terminal.type = \
            self.raw[2 if ios_major_version <12 else 1] & 0x01
        self.terminal.flags = \
            self.raw[2 if ios_major_version <12 else 1] & 0xfe

    def parse_non_terminal(self):
        self.non_terminal = NonTerminalNode()
        self.non_terminal.parent = self
        self.non_terminal.filter_id = self.raw[1]
        self.non_terminal.argument_id = self.raw[2] + (self.raw[3] << 8)
        self.non_terminal.match_offset = self.raw[4] + (self.raw[5] << 8)
        self.non_terminal.unmatch_offset = self.raw[6] + (self.raw[7] << 8)

    def parse_raw(self, ios_major_version):
        self.type = self.raw[0]
        if self.is_terminal():
            self.parse_terminal(ios_major_version)
        elif self.is_non_terminal():
            self.parse_non_terminal()

    def convert_filter(self, convert_fn, f, regex_list, ios_major_version,
            keep_builtin_filters, global_vars, base_addr):
        if self.is_non_terminal():
            self.non_terminal.convert_filter(convert_fn, f, regex_list,
                ios_major_version, keep_builtin_filters, global_vars, base_addr)

    def str_debug(self):
        ret = "(%02x) " % (int)(self.offset)
        if self.is_terminal():
            ret += "terminal: "
            ret += str(self.terminal)
        if self.is_non_terminal():
            ret += "non-terminal: "
            ret += str(self.non_terminal)
        return ret

    def __str__(self):
        ret = ""
        if self.is_terminal():
            ret += str(self.terminal)
        if self.is_non_terminal():
            ret += str(self.non_terminal)
        return ret

    def str_not(self):
        ret = ""
        if self.is_terminal():
            ret += str(self.terminal)
        if self.is_non_terminal():
            ret += self.non_terminal.str_not()
        return ret

    def values(self):
        if self.is_terminal():
            return (None, None)
        else:
            return self.non_terminal.values()

    def __eq__(self, other):
        return self.raw == other.raw

    def __hash__(self):
        return struct.unpack('<I', ''.join([chr(v) for v in self.raw[:4]]))[0]


# Operation nodes processed so far.
processed_nodes = []

# Number of regular expressions.
num_regex = 0

# Operation nodes offset.
operations_offset = 0


def has_been_processed(node):
    global processed_nodes
    return node in processed_nodes


def build_operation_node(raw, offset, ios_major_version):
    global operations_offset
    node = OperationNode((offset - operations_offset) / 8) # why offset / 8 ?
    node.raw = raw
    node.parse_raw(ios_major_version)
    return node


def build_operation_nodes(f, num_operation_nodes, ios_major_version):
    global operations_offset
    operation_nodes = []

    if ios_major_version <= 12:
        operations_offset = 0
    else:
        operations_offset = f.tell()
    for i in range(num_operation_nodes):
        offset = f.tell()
        raw = struct.unpack("<8B", f.read(8))
        operation_nodes.append(build_operation_node(raw, offset,
            ios_major_version))

    # Fill match and unmatch fields for each node in operation_nodes.
    for i in range(len(operation_nodes)):
        if operation_nodes[i].is_non_terminal():
            for j in range(len(operation_nodes)):
                if operation_nodes[i].non_terminal.match_offset == operation_nodes[j].offset:
                    operation_nodes[i].non_terminal.match = operation_nodes[j]
                if operation_nodes[i].non_terminal.unmatch_offset == operation_nodes[j].offset:
                    operation_nodes[i].non_terminal.unmatch = operation_nodes[j]

    return operation_nodes


def find_operation_node_by_offset(operation_nodes, offset):
    for node in operation_nodes:
        if node.offset == offset:
            return node
    return None


def ong_mark_not(g, node, parent_node, nodes_to_process):
    g[node]["not"] = True
    tmp = node.non_terminal.match
    node.non_terminal.match = node.non_terminal.unmatch
    node.non_terminal.unmatch = tmp
    tmp_offset = node.non_terminal.match_offset
    node.non_terminal.match_offset = node.non_terminal.unmatch_offset
    node.non_terminal.unmatch_offset = tmp_offset


def ong_end_path(g, node, parent_node, nodes_to_process):
    g[node]["decision"] = str(node.non_terminal.match.terminal)
    g[node]["type"].add("final")


def ong_add_to_path(g, node, parent_node, nodes_to_process):
    if not has_been_processed(node.non_terminal.match):
        g[node]["list"].add(node.non_terminal.match)
        nodes_to_process.add((node, node.non_terminal.match))


def ong_add_to_parent_path(g, node, parent_node, nodes_to_process):
    if not has_been_processed(node.non_terminal.unmatch):
        if parent_node:
            g[parent_node]["list"].add(node.non_terminal.unmatch)
        nodes_to_process.add((parent_node, node.non_terminal.unmatch))


def build_operation_node_graph(node, default_node):
    if node.is_terminal():
        return None

    if default_node.is_non_terminal():
        return None

    # If node is non-terminal and has already been processed, then it's a jump rule to a previous operation.
    if has_been_processed(node):
        return None

    # Create operation node graph.
    g = {}
    nodes_to_process = set()
    nodes_to_process.add((None, node))
    while nodes_to_process:
        (parent_node, current_node) = nodes_to_process.pop()
        if not current_node in g.keys():
            g[current_node] = {"list": set(), "decision": None,
                "type": set(["normal"]), "reduce": None, "not": False}
        if not parent_node:
            g[current_node]["type"].add("start")

        if default_node.terminal.is_deny():
            # In case of non-terminal match and deny as unmatch, add match to path.
            if current_node.non_terminal.is_non_terminal_deny():
                ong_add_to_path(g, current_node, parent_node, nodes_to_process)
            # In case of non-terminal match and allow as unmatch, do a not (reverse), end match path and add unmatch to parent path.
            elif current_node.non_terminal.is_non_terminal_allow():
                ong_mark_not(g, current_node, parent_node, nodes_to_process)
                ong_end_path(g, current_node, parent_node, nodes_to_process)
                ong_add_to_parent_path(g, current_node, parent_node, nodes_to_process)
            # In case of non-terminals, add match to path and unmatch to parent path.
            elif current_node.non_terminal.is_non_terminal_non_terminal():
                ong_add_to_path(g, current_node, parent_node, nodes_to_process)
                ong_add_to_parent_path(g, current_node, parent_node, nodes_to_process)
            # In case of allow as match and non-terminal unmatch, end path and add unmatch to parent path.
            elif current_node.non_terminal.is_allow_non_terminal():
                ong_end_path(g, current_node, parent_node, nodes_to_process)
                ong_add_to_parent_path(g, current_node, parent_node, nodes_to_process)
            # In case of deny as match and non-terminal unmatch, do a not (reverse), and add match to path.
            elif current_node.non_terminal.is_deny_non_terminal():
                ong_mark_not(g, current_node, parent_node, nodes_to_process)
                ong_add_to_path(g, current_node, parent_node, nodes_to_process)
            # In case of deny as match and allow as unmatch, do a not (reverse), and end match path (completely).
            elif current_node.non_terminal.is_deny_allow():
                ong_mark_not(g, current_node, parent_node, nodes_to_process)
                ong_end_path(g, current_node, parent_node, nodes_to_process)
            # In case of allow as match and deny as unmatch, end match path (completely).
            elif current_node.non_terminal.is_allow_deny():
                ong_end_path(g, current_node, parent_node, nodes_to_process)
        elif default_node.terminal.is_allow():
            # In case of non-terminal match and deny as unmatch, do a not (reverse), end match path and add unmatch to parent path.
            if current_node.non_terminal.is_non_terminal_deny():
                ong_mark_not(g, current_node, parent_node, nodes_to_process)
                ong_end_path(g, current_node, parent_node, nodes_to_process)
                ong_add_to_parent_path(g, current_node, parent_node, nodes_to_process)
            # In case of non-terminal match and allow as unmatch, add match to path.
            elif current_node.non_terminal.is_non_terminal_allow():
                ong_add_to_path(g, current_node, parent_node, nodes_to_process)
            # In case of non-terminals, add match to path and unmatch to parent path.
            elif current_node.non_terminal.is_non_terminal_non_terminal():
                ong_add_to_path(g, current_node, parent_node, nodes_to_process)
                ong_add_to_parent_path(g, current_node, parent_node, nodes_to_process)
            # In case of allow as match and non-terminal unmatch, do a not (reverse), and add match to path.
            elif current_node.non_terminal.is_allow_non_terminal():
                ong_mark_not(g, current_node, parent_node, nodes_to_process)
                ong_add_to_path(g, current_node, parent_node, nodes_to_process)
            # In case of deny as match and non-terminal unmatch, end path and add unmatch to parent path.
            elif current_node.non_terminal.is_deny_non_terminal():
                ong_end_path(g, current_node, parent_node, nodes_to_process)
                ong_add_to_parent_path(g, current_node, parent_node, nodes_to_process)
            # In case of deny as match and allow as unmatch, end match path (completely).
            elif current_node.non_terminal.is_deny_allow():
                ong_end_path(g, current_node, parent_node, nodes_to_process)
            # In case of allow as match and deny as unmatch, do a not (reverse), and end match path (completely).
            elif current_node.non_terminal.is_allow_deny():
                ong_mark_not(g, current_node, parent_node, nodes_to_process)
                ong_end_path(g, current_node, parent_node, nodes_to_process)

    processed_nodes.append(node)
    print_operation_node_graph(g)
    g = clean_edges_in_operation_node_graph(g)
    while True:
        (g, more) = clean_nodes_in_operation_node_graph(g)
        if more == False:
            break
    logger.debug("*** after cleaning nodes:")
    print_operation_node_graph(g)

    return g


def print_operation_node_graph(g):
    if not g:
        return
    message = ""
    for node_iter in g.keys():
        message += "0x%x (%s) (%s) (decision: %s): [ " % ((int)(node_iter.offset), str(node_iter), g[node_iter]["type"], g[node_iter]["decision"])
        for edge in g[node_iter]["list"]:
            message += "0x%x (%s) " % ((int)(edge.offset), str(edge))
        message += "]\n"
    logger.debug(message)


def remove_edge_in_operation_node_graph(g, node_start, node_end):
    if node_end in g[node_start]["list"]:
        g[node_start]["list"].remove(node_end)
    return g


def remove_node_in_operation_node_graph(g, node_to_remove):
    for n in g[node_to_remove]["list"]:
        g = remove_edge_in_operation_node_graph(g, node_to_remove, n)
    node_list = list(g.keys())
    for n in node_list:
        if node_to_remove in g[n]["list"]:
            g = remove_edge_in_operation_node_graph(g, n, node_to_remove)
    del g[node_to_remove]
    return g


paths = []
current_path = []


def _get_operation_node_graph_paths(g, node):
    global paths, current_path
    logger.debug("getting path for " + node.str_debug())
    current_path.append(node)
    debug_message = "current_path: [ "
    for n in current_path:
        debug_message += n.str_debug() + ", "
    debug_message += "]"
    logger.debug(debug_message)
    if "final" in g[node]["type"]:
        copy_path = list(current_path)
        paths.append(copy_path)
    else:
        for next_node in g[node]["list"]:
            _get_operation_node_graph_paths(g, next_node)
    current_path.pop()


def get_operation_node_graph_paths(g, start_node):
    global paths, current_path
    paths = []
    current_path = []
    _get_operation_node_graph_paths(g, start_node)
    return paths


nodes_traversed_for_removal = []
def _remove_duplicate_node_edges(g, node, start_list):
    global nodes_traversed_for_removal
    nodes_traversed_for_removal.append(node)

    nexts = list(g[node]["list"])
    for n in nexts:
        if n in start_list:
            g = remove_edge_in_operation_node_graph(g, node, n)
        else:
            if not n in nodes_traversed_for_removal:
                _remove_duplicate_node_edges(g, n, start_list)


def remove_duplicate_node_edges(g, start_list):
    for n in start_list:
        logger.debug("removing from node: " + n.str_debug())
        _remove_duplicate_node_edges(g, n, start_list)


def clean_edges_in_operation_node_graph(g):
    """From the initial graph remove edges that are redundant.
    """
    global nodes_traversed_for_removal
    start_nodes = []
    final_nodes = []
    for node_iter in g.keys():
        if "start" in g[node_iter]["type"]:
            start_nodes.append(node_iter)
        if "final" in g[node_iter]["type"]:
            final_nodes.append(node_iter)

    # Remove edges to start nodes.
    for snode in start_nodes:
        for node_iter in g.keys():
            g = remove_edge_in_operation_node_graph(g, node_iter, snode)

    for snode in start_nodes:
        nodes_bag = [ snode ]
        while True:
            node = nodes_bag.pop()
            nodes_traversed_for_removal = []
            logger.debug("%%% going through " + node.str_debug())
            remove_duplicate_node_edges(g, g[node]["list"])
            nodes_bag.extend(g[node]["list"])
            if not nodes_bag:
                break

    # Traverse graph and built all paths. If end node and start node of
    # two or more paths are similar, remove edges.
    for snode in start_nodes:
        logger.debug("traversing node " + str(snode))
        paths = get_operation_node_graph_paths(g, snode)
        debug_message = "for start node " + str(snode) + str(" paths are")
        for p in paths:
            debug_message += "[ "
            for n in p:
                debug_message += n.str_debug() + " "
            debug_message += "]\n"
        logger.debug(debug_message)

        for i in range(0, len(paths)):
            for j in range(i+1, len(paths)):
                # Step over equal length paths.
                if len(paths[i]) == len(paths[j]):
                    continue
                elif len(paths[i]) < len(paths[j]):
                    p = paths[i]
                    q = paths[j]
                else:
                    p = paths[j]
                    q = paths[i]
                # If similar final nodes, remove edge.
                debug_message = ""
                debug_message += "short path: ["
                for n in p:
                    debug_message += str(n)
                debug_message += "]\n"
                debug_message += "long path: ["
                for n in q:
                    debug_message += str(n)
                debug_message += "]"
                if p[len(p)-1] == q[len(q)-1]:
                    for k in range(0, len(p)):
                        if p[len(p)-1-k] == q[len(q)-1-k]:
                            continue
                        else:
                            g = remove_edge_in_operation_node_graph(g, q[len(q)-1-k], q[len(q)-k])
                            break


    return g


def clean_nodes_in_operation_node_graph(g):
    made_change = False
    node_list = list(g.keys())
    for node_iter in node_list:
        if "final" in g[node_iter]["type"]:
            continue
        if g[node_iter]["list"]:
            continue
        logger.warn("going to remove" + str(node_iter))
        made_change = True
        g = remove_node_in_operation_node_graph(g, node_iter)
    return (g, made_change)


replace_occurred = False

class ReducedVertice():
    TYPE_SINGLE = "single"
    TYPE_START = "start"
    TYPE_REQUIRE_ANY = "require-any"
    TYPE_REQUIRE_ALL = "require-all"
    TYPE_REQUIRE_ENTITLEMENT = "require-entitlement"
    type = TYPE_SINGLE
    is_not = False
    value = None
    decision = None

    def __init__(self, type=TYPE_SINGLE, value=None, decision=None, is_not=False):
        self.type = type
        self.value = value
        self.decision = decision
        self.is_not = is_not

    def set_value(self, value):
        self.value = value

    def set_type(self, type):
        self.type = type

    def _replace_in_list(self, lst, old, new):
        global replace_occurred
        tmp_list = list(lst)
        for i, v in enumerate(tmp_list):
            if isinstance(v.value, list):
                self._replace_in_list(v.value, old, new)
            else:
                if v == old:
                    lst[i] = new
                    replace_occurred = True
                    return

    def replace_in_list(self, old, new):
        if isinstance(self.value, list):
            self._replace_in_list(self.value, old, new)

    def _replace_sublist_in_list(self, lst, old, new):
        global replace_occurred
        all_found = True
        for v in old:
            if v not in lst:
                all_found = False
                break
        if all_found:
            for v in old:
                lst.remove(v)
            lst.append(new)
            replace_occurred = True
            return

        for i, v in enumerate(lst):
            if isinstance(v.value, list):
                self._replace_sublist_in_list(v.value, old, new)
            else:
                return

    def replace_sublist_in_list(self, old, new):
        if isinstance(self.value, list):
            self._replace_sublist_in_list(self.value, old, new)

    def set_decision(self, decision):
        self.decision = decision

    def set_type_single(self):
        self.type = self.TYPE_SINGLE

    def set_type_start(self):
        self.type = self.TYPE_START

    def set_type_require_entitlement(self):
        self.type = self.TYPE_REQUIRE_ENTITLEMENT

    def set_type_require_any(self):
        self.type = self.TYPE_REQUIRE_ANY

    def set_type_require_all(self):
        self.type = self.TYPE_REQUIRE_ALL

    def set_integrated_vertice(self, integrated_vertice):
        (n, i) = self.value
        self.value = (n, integrated_vertice)

    def is_type_single(self):
        return self.type == self.TYPE_SINGLE

    def is_type_start(self):
        return self.type == self.TYPE_START

    def is_type_require_entitlement(self):
        return self.type == self.TYPE_REQUIRE_ENTITLEMENT

    def is_type_require_all(self):
        return self.type == self.TYPE_REQUIRE_ALL

    def is_type_require_any(self):
        return self.type == self.TYPE_REQUIRE_ANY

    def recursive_str(self, level, recursive_is_not):
        result_str = ""
        if self.is_type_single():
            if self.is_not and not recursive_is_not:
                value = str(self.value)
                if "(require-any" in value:
                    result_str = self.value.str_not()
                else:
                    result_str += "(require-not " + str(self.value) + ")"
            else:
                result_str += str(self.value)
        elif self.is_type_require_entitlement():
            ent_str = ""
            (n, i) = self.value
            if i == None:
                ent_str += str(n.value)
            else:
                ent_str += str(n.value)[:-1] + " "
                ent_str += i.recursive_str(level, self.is_not)
                ent_str += ")"
            if self.is_not:
                result_str += "(require-not " + ent_str + ")"
            else:
                result_str += ent_str
        else:
            if level == 1:
                result_str += "\n" + 13*' '
            result_str += "(" + self.type
            level += 1
            for i, v in enumerate(self.value):
                if i == 0:
                    result_str += " " + v.recursive_str(level, recursive_is_not)
                else:
                    result_str += "\n" + 13*level*' ' + v.recursive_str(level, recursive_is_not)
            result_str += ")"
        return result_str

    def recursive_str_debug(self, level, recursive_is_not):
        result_str = ""
        if self.is_type_single():
            if self.is_not and not recursive_is_not:
                result_str += "(require-not " + self.value.str_debug() + ")"
            else:
                result_str += self.value.str_debug()
        elif self.is_type_require_entitlement():
            ent_str = ""
            (n, i) = self.value
            if i == None:
                ent_str += n.value.str_debug()
            else:
                ent_str += n.value.str_debug()[:-1] + " "
                ent_str += i.recursive_str_debug(level, self.is_not)
                ent_str += ")"
            if self.is_not:
                result_str += "(require-not " + ent_str + ")"
            else:
                result_str += ent_str
        else:
            if level == 1:
                result_str += "\n" + 13*' '
            result_str += "(" + self.type
            level += 1
            for i, v in enumerate(self.value):
                if i == 0:
                    result_str += " " + v.recursive_str_debug(level, recursive_is_not)
                else:
                    result_str += "\n" + 13*level*' ' + v.recursive_str_debug(level, recursive_is_not)
            result_str += ")"
        return result_str

    def recursive_xml_str(self, level, recursive_is_not):
        result_str = ""
        if self.is_type_single():
            if self.is_not and not recursive_is_not:
                result_str += level*"\t" + "<require type=\"require-not\">\n"
                (name, argument) = self.value.values()
                if argument == None:
                    result_str += (level+1)*"\t" + "<filter name=\"" + str(name) + "\" />\n"
                else:
                    arg = str(argument).replace('&', '&amp;').replace('"', '&quot;').replace('\'', '&apos;').replace('<', '&lt;').replace('>', '&gt;')
                    result_str += (level+1)*"\t" + "<filter name=\"" + str(name) + "\" argument=\"" + arg + "\" />\n"
                result_str += level*"\t" + "</require>\n"
            else:
                (name, argument) = self.value.values()
                if argument == None:
                    result_str += level*"\t" + "<filter name=\"" + str(name) + "\" />\n"
                else:
                    arg = str(argument).replace('&', '&amp;').replace('"', '&quot;').replace('\'', '&apos;').replace('<', '&lt;').replace('>', '&gt;')
                    result_str += level*"\t" + "<filter name=\"" + str(name) + "\" argument=\"" + arg + "\" />\n"
        elif self.is_type_require_entitlement():
            if self.is_not:
                result_str += level*"\t" + "<require type=\"require-not\">\n"
                level += 1
            result_str += level*"\t" + "<require type=\"require-entitlement\""
            (n, i) = self.value
            if i == None:
                _tmp = str(n.value)[21:-1].replace('&', '&amp;').replace('"', '&quot;').replace('\'', '&apos;').replace('<', '&lt;').replace('>', '&gt;')
                result_str += " value=\"" + _tmp + "\" />\n"
            else:
                _tmp = str(n.value)[21:-1].replace('&', '&amp;').replace('"', '&quot;').replace('\'', '&apos;').replace('<', '&lt;').replace('>', '&gt;')
                result_str += " value=\"" + _tmp + "\">\n"
                result_str += i.recursive_xml_str(level+1, self.is_not)
                result_str += level*"\t" + "</require>\n"
            if self.is_not:
                level -= 1
                result_str += level*"\t" + "</require>\n"
        else:
            result_str += level*"\t" + "<require type=\"" + self.type + "\">\n"
            for i, v in enumerate(self.value):
                result_str += v.recursive_xml_str(level+1, recursive_is_not)
            result_str += level*"\t" + "</require>\n"
        return result_str

    def __str__(self):
        return self.recursive_str(1, False)

    def str_debug(self):
        return self.recursive_str_debug(1, False)

    def str_simple(self):
        if self.is_type_single():
            return self.value.str_debug()
        elif self.is_type_require_any():
            return "require-any"
        elif self.is_type_require_all():
            return "require-all"
        elif self.is_type_require_entitlement():
            return self.value.str_debug()[1:-1]
        elif self.is_type_start():
            return "start"
        else:
            return "unknown-type"

    def str_print_debug(self):
        if self.is_type_single():
            return (self.value.str_debug(), None)
        elif self.is_type_require_any():
            return ("(require-any", ")")
        elif self.is_type_require_all():
            return ("(require-all", ")")
        elif self.is_type_require_entitlement():
            return (self.value.str_debug()[:-1], ")")
        elif self.is_type_start():
            return (None, None)
        else:
            return ("unknown-type", None)

    def str_print(self):
        if self.is_type_single():
            return (str(self.value), None)
        elif self.is_type_require_any():
            return ("(require-any", ")")
        elif self.is_type_require_all():
            return ("(require-all", ")")
        elif self.is_type_require_entitlement():
            return (str(self.value)[:-1], ")")
        elif self.is_type_start():
            return (None, None)
        else:
            return ("unknown-type", None)

    def str_print_not(self):
        result_str = ""
        if self.is_type_single():
            if self.is_not:
                value = str(self.value)
                if "(require-any" in value:
                    result_str = self.value.str_not()
                else:
                    result_str += "(require-not " + str(self.value) + ")"
        return result_str

    def xml_str(self):
        return self.recursive_xml_str(3, False)


class ReducedEdge():
    start = None
    end = None

    def __init__(self, start=None, end=None):
        self.start = start
        self.end = end

    def str_debug(self):
        return self.start.str_debug() + " -> " + self.end.str_debug()

    def str_simple(self):
        #print "start: %s" % (self.start.str_simple())
        #print "end: %s" % (self.end.str_simple())
        return "%s -----> %s" % (self.start.str_simple(), self.end.str_simple())

    def __str__(self):
        return str(self.start) + " -> " + str(self.end)


class ReducedGraph():
    vertices = []
    edges = []
    final_vertices = []
    reduce_changes_occurred = False

    def __init__(self):
        self.vertices = []
        self.edges = []
        self.final_vertices = []
        self.reduce_changes_occurred = False

    def add_vertice(self, v):
        self.vertices.append(v)

    def add_edge(self, e):
        self.edges.append(e)

    def add_edge_by_vertices(self, v_start, v_end):
        e = ReducedEdge(v_start, v_end)
        self.edges.append(e)

    def set_final_vertices(self):
        self.final_vertices = []
        for v in self.vertices:
            is_final = True
            for e in self.edges:
                if v == e.start:
                    is_final = False
                    break
            if is_final:
                self.final_vertices.append(v)

    def contains_vertice(self, v):
        return v in self.vertices

    def contains_edge(self, e):
        return e in self.edges

    def contains_edge_by_vertices(self, v_start, v_end):
        for e in self.edges:
            if e.start == v_start and e.end == v_end:
                return True
        return False

    def get_vertice_by_value(self, value):
        for v in self.vertices:
            if v.is_type_single():
                if v.value == value:
                    return v

    def get_edge_by_vertices(self, v_start, v_end):
        for e in self.edges:
            if e.start == v_start and e.end == v_end:
                return e
        return None

    def remove_vertice(self, v):
        edges_copy = list(self.edges)
        for e in edges_copy:
            if e.start == v or e.end == v:
                self.edges.remove(e)
        if v in self.vertices:
            self.vertices.remove(v)

    def remove_vertice_update_decision(self, v):
        edges_copy = list(self.edges)
        for e in edges_copy:
            if e.start == v:
                self.edges.remove(e)
            if e.end == v:
                e.start.decision = v.decision
                self.edges.remove(e)
        if v in self.vertices:
            self.vertices.remove(v)

    def remove_edge(self, e):
        if e in self.edges:
            self.edges.remove(e)

    def remove_edge_by_vertices(self, v_start, v_end):
        e = self.get_edge_by_vertices(v_start, v_end)
        if e:
            self.edges.remove(e)

    def replace_vertice_in_edge_start(self, old, new):
        global replace_occurred
        for e in self.edges:
            if e.start == old:
                e.start = new
                replace_occurred = True
            else:
                if isinstance(e.start.value, list):
                    e.start.replace_in_list(old, new)
                    if replace_occurred:
                        e.start.decision = new.decision

    def replace_vertice_in_edge_end(self, old, new):
        global replace_occurred
        for e in self.edges:
            if e.end == old:
                e.end = new
                replace_occurred = True
            else:
                if isinstance(e.end.value, list):
                    e.end.replace_in_list(old, new)
                    if replace_occurred:
                        e.end.decision = new.decision

    def replace_vertice_in_single_vertices(self, old, new):
        for v in self.vertices:
            if len(self.get_next_vertices(v)) == 0 and len(self.get_prev_vertices(v)) == 0:
                if isinstance(v.value, list):
                    v.replace_in_list(old, new)

    def replace_vertice_list(self, old, new):
        for v in self.vertices:
            if isinstance(v.value, list):
                v.replace_sublist_in_list(old, new)
            if set(self.get_next_vertices(v)) == set(old):
                for n in old:
                    self.remove_edge_by_vertices(v, n)
                self.add_edge_by_vertices(v, new)
            if set(self.get_prev_vertices(v)) == set(old):
                for n in old:
                    self.remove_edge_by_vertices(n, v)
                self.add_edge_by_vertices(new, v)

    def get_next_vertices(self, v):
        next_vertices = []
        for e in self.edges:
            if e.start == v:
                next_vertices.append(e.end)
        return next_vertices

    def get_prev_vertices(self, v):
        prev_vertices = []
        for e in self.edges:
            if e.end == v:
                prev_vertices.append(e.start)
        return prev_vertices

    def get_start_vertices(self):
        start_vertices = []
        for v in self.vertices:
            if not self.get_prev_vertices(v):
                start_vertices.append(v)
        return start_vertices

    def get_end_vertices(self):
        end_vertices = []
        for v in self.vertices:
            if not self.get_next_vertices(v):
                end_vertices.append(v)
        return end_vertices

    def reduce_next_vertices(self, v):
        next_vertices = self.get_next_vertices(v)
        if len(next_vertices) <= 1:
            return
        self.reduce_changes_occurred = True
        new_vertice = ReducedVertice("require-any", next_vertices, next_vertices[0].decision)
        add_to_final = False
        for n in next_vertices:
            self.remove_edge_by_vertices(v, n)
        self.replace_vertice_list(next_vertices, new_vertice)
        for n in next_vertices:
            if n in self.final_vertices:
                self.final_vertices.remove(n)
                add_to_final = True
            # If no more next vertices, remove vertice.
            if not self.get_next_vertices(n):
                if n in self.vertices:
                    self.vertices.remove(n)
        self.add_edge_by_vertices(v, new_vertice)
        self.add_vertice(new_vertice)
        if add_to_final:
            self.final_vertices.append(new_vertice)

    def reduce_prev_vertices(self, v):
        prev_vertices = self.get_prev_vertices(v)
        if len(prev_vertices) <= 1:
            return
        self.reduce_changes_occurred = True
        new_vertice = ReducedVertice("require-any", prev_vertices, v.decision)
        for p in prev_vertices:
            self.remove_edge_by_vertices(p, v)
        self.replace_vertice_list(prev_vertices, new_vertice)
        for p in prev_vertices:
            # If no more prev vertices, remove vertice.
            if not self.get_prev_vertices(p):
                if p in self.vertices:
                    self.vertices.remove(p)
        self.add_vertice(new_vertice)
        self.add_edge_by_vertices(new_vertice, v)

    def reduce_vertice_single_prev(self, v):
        global replace_occurred
        prev = self.get_prev_vertices(v)
        if len(prev) != 1:
            logger.debug("not a single prev for node")
            return
        p = prev[0]
        nexts = self.get_next_vertices(p)
        if len(nexts) > 1 or nexts[0] != v:
            logger.debug("multiple nexts for prev")
            return
        require_all_vertices = []
        if p.is_type_require_all():
            require_all_vertices.extend(p.value)
        else:
            require_all_vertices.append(p)
        if v.is_type_require_all():
            require_all_vertices.extend(v.value)
        else:
            require_all_vertices.append(v)
        new_vertice = ReducedVertice("require-all", require_all_vertices, v.decision)
        self.remove_edge_by_vertices(p, v)
        replace_occurred = False
        self.replace_vertice_in_edge_start(v, new_vertice)
        self.replace_vertice_in_edge_end(p, new_vertice)
        self.replace_vertice_in_single_vertices(p, new_vertice)
        self.replace_vertice_in_single_vertices(v, new_vertice)
        self.remove_vertice(p)
        self.remove_vertice(v)
        if not replace_occurred:
            self.add_vertice(new_vertice)
        if v in self.final_vertices:
            self.final_vertices.remove(v)
            self.final_vertices.append(new_vertice)

    def reduce_vertice_single_next(self, v):
        global replace_occurred
        next = self.get_next_vertices(v)
        if len(next) != 1:
            return
        n = next[0]
        prevs = self.get_prev_vertices(n)
        if len(prevs) > 1 or prevs[0] != v:
            return
        require_all_vertices = []
        if v.is_type_require_all():
            require_all_vertices.extend(v.value)
        else:
            require_all_vertices.append(v)
        if n.is_type_require_all():
            require_all_vertices.extend(n.value)
        else:
            require_all_vertices.append(n)
        new_vertice = ReducedVertice("require-all", require_all_vertices, n.decision)
        self.remove_edge_by_vertices(v, n)
        replace_occurred = False
        self.replace_vertice_in_edge_start(n, new_vertice)
        self.replace_vertice_in_edge_end(e, new_vertice)
        self.replace_vertice_in_single_vertices(v, new_vertice)
        self.replace_vertice_in_single_vertices(n, new_vertice)
        self.remove_vertice(v)
        self.remove_vertice(n)
        if not replace_occurred:
            self.add_vertice(new_vertice)
        if n in self.final_vertices:
            self.final_vertices.remove(n)
            self.final_vertices.append(new_vertice)

    def reduce_graph(self):
        self.set_final_vertices()

        logger.debug("before everything:\n" + self.str_simple())
        # Do until no more changes.
        while True:
            self.reduce_changes_occurred = False
            copy_vertices = list(self.vertices)
            for v in copy_vertices:
                self.reduce_next_vertices(v)
            if self.reduce_changes_occurred == False:
                break
        logger.debug("after next:\n" + self.str_simple())
        # Do until no more changes.
        while True:
            self.reduce_changes_occurred = False
            copy_vertices = list(self.vertices)
            for v in copy_vertices:
                self.reduce_prev_vertices(v)
            if self.reduce_changes_occurred == False:
                break
        logger.debug("after next/prev:\n" + self.str_simple())

        # Reduce graph starting from final vertices. Keep going until
        # final vertices don't change during an iteration.
        while True:
            copy_final_vertices = list(self.final_vertices)
            for v in copy_final_vertices:
                logger.debug("reducing single prev vertex: " + v.str_debug())
                self.reduce_vertice_single_prev(v)
                logger.debug("### new graph is:")
                logger.debug(self.str_simple())
            if set(copy_final_vertices) == set(self.final_vertices):
                break
        for e in self.edges:
            v = e.end
            logger.debug("reducing single prev vertex: " + v.str_debug())
            self.reduce_vertice_single_prev(v)
        logger.debug("after everything:\n" + self.str_simple())

    def reduce_graph_with_metanodes(self):
        # Add require-any metanode if current node has multiple successors.
        copy_vertices = list(self.vertices)
        for v in copy_vertices:
            nlist = self.get_next_vertices(v)
            if len(nlist) >= 2:
                new_node = ReducedVertice("require-any", None, None)
                self.add_vertice(new_node)
                self.add_edge_by_vertices(v, new_node)
                for n in nlist:
                    self.remove_edge_by_vertices(v, n)
                    self.add_edge_by_vertices(new_node, n)

        start_list = self.get_start_vertices()
        new_node = ReducedVertice("start", None, None)
        self.add_vertice(new_node)
        for s in start_list:
            self.add_edge_by_vertices(new_node, s)

        # Add require-all metanode if current node has a require-any as a predecessor and is followed by another node.
        copy_vertices = list(self.vertices)
        for v in copy_vertices:
            prev_vertices = list(self.get_prev_vertices(v))
            next_vertices = list(self.get_next_vertices(v))
            for p in prev_vertices:
                if (p.is_type_require_any() or p.is_type_start()) and next_vertices:
                    # Except for when a require-entitlement ending block.
                    if v.is_type_require_entitlement():
                        has_next_nexts = False
                        for n in next_vertices:
                            if n.is_type_require_any():
                                for n2 in self.get_next_vertices(n):
                                    if self.get_next_vertices(n2):
                                        has_next_nexts = True
                                        break
                            else:
                                if self.get_next_vertices(n):
                                    has_next_nexts = True
                                    break
                        if not has_next_nexts:
                            continue
                    new_node = ReducedVertice("require-all", None, None)
                    self.add_vertice(new_node)
                    self.remove_edge_by_vertices(p, v)
                    self.add_edge_by_vertices(p, new_node)
                    self.add_edge_by_vertices(new_node, v)

    def str_simple_with_metanodes(self):
        logger.debug("==== vertices:\n")
        for v in self.vertices:
            logger.debug(v.str_simple())
        logger.debug("==== edges:\n")
        for e in self.edges:
            logger.debug(e.str_simple())

    def str_simple(self):
        message = "==== vertices:\n"
        for v in self.vertices:
            message += "decision: " + str(v.decision) + "\t" + v.str_debug() + "\n"
        message += "==== final vertices:\n"
        for v in self.final_vertices:
            message += "decision: " + str(v.decision) + "\t" + v.str_debug() + "\n"
        message += "==== edges:\n"
        for e in self.edges:
            message += "\t" + e.str_debug() + "\n"
        return message

    def __str__(self):
        result_str = ""
        for v in self.vertices:
            result_str += "(" + str(v.decision) + " "
            if len(self.get_next_vertices(v)) == 0 and len(self.get_next_vertices(v)) == 0:
                if v in self.final_vertices:
                    result_str += str(v) + "\n"
            result_str += ")\n"
        for e in self.edges:
            result_str += str(e) + "\n"
        result_str += "\n"
        return result_str

    def remove_builtin_filters(self):
        copy_vertices = list(self.vertices)
        for v in copy_vertices:
            if re.search("###\$\$\$\*\*\*", str(v)):
                self.remove_vertice_update_decision(v)

    def reduce_integrated_vertices(self, integrated_vertices):
        if len(integrated_vertices) == 0:
            return (None, None)
        if len(integrated_vertices) > 1:
            return (ReducedVertice("require-any", integrated_vertices, integrated_vertices[0].decision), integrated_vertices[0].decision)
        require_all_vertices = []
        v = integrated_vertices[0]
        decision = None
        while True:
            if not re.search("entitlement-value #t", str(v)):
                require_all_vertices.append(v)
            next_vertices = self.get_next_vertices(v)
            if decision == None and v.decision != None:
                decision = v.decision
            self.remove_vertice(v)
            if v in self.final_vertices:
                self.final_vertices.remove(v)
            if next_vertices:
                v = next_vertices[0]
            else:
                break
        if len(require_all_vertices) == 0:
            return (None, v.decision)
        if len(require_all_vertices) == 1:
            return (ReducedVertice(value=require_all_vertices[0].value, decision=require_all_vertices[0].decision, is_not=require_all_vertices[0].is_not), v.decision)
        return (ReducedVertice("require-all", require_all_vertices, require_all_vertices[len(require_all_vertices)-1].decision), v.decision)

    def aggregate_require_entitlement(self, v):
        next_vertices = []
        prev_vertices = self.get_prev_vertices(v)
        integrated_vertices = []
        for n in self.get_next_vertices(v):
            if not re.search("entitlement-value", str(n)):
                next_vertices.append(n)
                break
            integrated_vertices.append(n)
            current_list = [ n ]
            while current_list:
                current = current_list.pop()
                for n2 in self.get_next_vertices(current):
                    if not re.search("entitlement-value", str(n2)):
                        self.remove_edge_by_vertices(current, n2)
                        next_vertices.append(n2)
                    else:
                        current_list.append(n2)
        new_vertice = ReducedVertice(type="require-entitlement", value=(v, None), decision=None, is_not=v.is_not)
        for p in prev_vertices:
            self.remove_edge_by_vertices(p, v)
            self.add_edge_by_vertices(p, new_vertice)
        for n in next_vertices:
            self.remove_edge_by_vertices(v, n)
            self.add_edge_by_vertices(new_vertice, n)
        for i in integrated_vertices:
            self.remove_edge_by_vertices(v, i)
        self.remove_vertice(v)
        self.add_vertice(new_vertice)
        if v in self.final_vertices:
            self.final_vertices.remove(v)
            self.final_vertices.append(new_vertice)
        (new_integrate, decision) = self.reduce_integrated_vertices(integrated_vertices)
        for i in integrated_vertices:
            self.remove_vertice(i)
            if i in self.final_vertices:
                self.final_vertices.remove(i)
        new_vertice.set_integrated_vertice(new_integrate)
        new_vertice.set_decision(decision)

    def aggregate_require_entitlement_nodes(self):
        copy_vertices = list(self.vertices)
        idx = 0
        while idx < len(copy_vertices):
            v = copy_vertices[idx]
            if re.search("require-entitlement", str(v)):
                self.aggregate_require_entitlement(v)
            idx += 1

    def cleanup_filters(self):
        self.remove_builtin_filters()
        self.aggregate_require_entitlement_nodes()

    def remove_builtin_filters_with_metanodes(self):
        copy_vertices = list(self.vertices)
        for v in copy_vertices:
            if re.search("###\$\$\$\*\*\*", v.str_simple()):
                self.remove_vertice(v)
            elif re.search("entitlement-value #t", v.str_simple()):
                self.remove_vertice(v)
            elif re.search("entitlement-value-regex #\"\.\"", v.str_simple()):
                v.value.non_terminal.argument = "#\".+\""
            elif re.search("global-name-regex #\"\.\"", v.str_simple()):
                v.value.non_terminal.argument = "#\".+\""
            elif re.search("local-name-regex #\"\.\"", v.str_simple()):
                v.value.non_terminal.argument = "#\".+\""

    def replace_require_entitlement_with_metanodes(self, v):
        prev_list = self.get_prev_vertices(v)
        next_list = self.get_next_vertices(v)
        new_node = ReducedVertice(type="require-entitlement", value=v.value, decision=None, is_not=v.is_not)
        self.add_vertice(new_node)
        self.remove_vertice(v)
        for p in prev_list:
            self.add_edge_by_vertices(p, new_node)
        for n in next_list:
            self.add_edge_by_vertices(new_node, n)

    def aggregate_require_entitlement_with_metanodes(self):
        copy_vertices = list(self.vertices)
        for v in copy_vertices:
            if re.search("require-entitlement", str(v)):
                self.replace_require_entitlement_with_metanodes(v)

    def cleanup_filters_with_metanodes(self):
        self.remove_builtin_filters_with_metanodes()
        self.aggregate_require_entitlement_with_metanodes()

    def print_vertices_with_operation(self, operation, out_f):
        allow_vertices = [v for v in self.vertices if v.decision == "allow"]
        deny_vertices = [v for v in self.vertices if v.decision == "deny"]
        if allow_vertices:
            out_f.write("(allow %s " % (operation))
            if len(allow_vertices) > 1:
                for v in allow_vertices:
                    out_f.write("\n" + 8*" " + str(v))
            else:
                out_f.write(str(allow_vertices[0]))
            out_f.write(")\n")
        if deny_vertices:
            out_f.write("(deny %s " % (operation))
            if len(deny_vertices) > 1:
                for v in deny_vertices:
                    out_f.write("\n" + 8*" " + str(v))
            else:
                out_f.write(str(deny_vertices[0]))
            out_f.write(")\n")

    def print_vertices_with_operation_metanodes(self, operation, default_is_allow, out_f):
        # Return if only start node in list.
        if len(self.vertices) == 1 and self.vertices[0].is_type_start():
            return
        # Use reverse of default rule.
        if default_is_allow:
            out_f.write("(deny %s" % (operation))
        else:
            out_f.write("(allow %s" % (operation))
        vlist = []
        start_list = self.get_start_vertices()
        start_list.reverse()
        vlist.insert(0, (None, 0))
        for s in start_list:
            vlist.insert(0, (s, 1))
        while True:
            if not vlist:
                break
            (cnode, indent) = vlist.pop(0)
            if not cnode:
                out_f.write(")")
                continue
            (first, last) = cnode.str_print()
            if first:
                if cnode.is_not:
                    if cnode.str_print_not() != "":
                        out_f.write("\n" + indent * "\t" + cnode.str_print_not())
                    else:
                        out_f.write("\n" + indent * "\t" + "(require-not " + first)
                        if cnode.is_type_require_any() or cnode.is_type_require_all() or cnode.is_type_require_entitlement():
                            vlist.insert(0, (None, indent))
                        else:
                            out_f.write(")")
                else:
                    out_f.write("\n" + indent * "\t" + first)
            if last:
                vlist.insert(0, (None, indent))
            next_vertices_list = self.get_next_vertices(cnode)
            if next_vertices_list:
                if cnode.is_type_require_any() or cnode.is_type_require_all() or cnode.is_type_require_entitlement():
                    indent += 1
                next_vertices_list.reverse()
                if cnode.is_type_require_entitlement():
                    pos = 0
                    for n in next_vertices_list:
                        if (n.is_type_single() and not re.search("entitlement-value", n.str_simple())) or \
                                n.is_type_require_entitlement():
                            vlist.insert(pos + 1, (n, indent-1))
                        else:
                            vlist.insert(0, (n, indent))
                            pos += 1
                else:
                    for n in next_vertices_list:
                        vlist.insert(0, (n, indent))
        out_f.write("\n")

    def dump_xml(self, operation, out_f):
        allow_vertices = [v for v in self.vertices if v.decision == "allow"]
        deny_vertices = [v for v in self.vertices if v.decision == "deny"]
        if allow_vertices:
            out_f.write("\t<operation name=\"%s\" action=\"allow\">\n" % (operation))
            out_f.write("\t\t<filters>\n")
            for v in allow_vertices:
                out_f.write(v.xml_str())
            out_f.write("\t\t</filters>\n")
            out_f.write("\t</operation>\n")
        if deny_vertices:
            out_f.write("\t<operation name=\"%s\" action=\"deny\">\n" % (operation))
            out_f.write("\t\t<filters>\n")
            for v in deny_vertices:
                out_f.write(v.xml_str())
            out_f.write("\t\t</filters>\n")
            out_f.write("\t</operation>\n")


def reduce_operation_node_graph(g):
    # Create reduced graph.
    rg = ReducedGraph()
    for node_iter in g.keys():
        rv = ReducedVertice(value=node_iter, decision=g[node_iter]["decision"], is_not=g[node_iter]["not"])
        rg.add_vertice(rv)

    for node_iter in g.keys():
        rv = rg.get_vertice_by_value(node_iter)
        for node_next in g[node_iter]["list"]:
            rn = rg.get_vertice_by_value(node_next)
            rg.add_edge_by_vertices(rv, rn)

    # Handle special case for require-not (require-enitlement (...)).
    l = len(g.keys())
    for idx, node_iter in enumerate(g.keys()):
        rv = rg.get_vertice_by_value(node_iter)
        if not re.search("require-entitlement", str(rv)):
            continue
        if not rv.is_not:
            continue
        c_idx = idx
        while True:
            c_idx += 1
            if c_idx >= l:
                break
            rn = rg.get_vertice_by_value(list(g.keys())[c_idx])
            if not re.search("entitlement-value", str(rn)):
                break
            prevs_rv = rg.get_prev_vertices(rv)
            prevs_rn = rg.get_prev_vertices(rn)
            if sorted(prevs_rv) != sorted(prevs_rn):
                continue
            for pn in prevs_rn:
                rg.remove_edge_by_vertices(rn, pn)
            rg.add_edge_by_vertices(rv, rn)

    rg.cleanup_filters_with_metanodes()
    for node_iter in g.keys():
        rv = rg.get_vertice_by_value(node_iter)
    rg.reduce_graph_with_metanodes()
    return rg


def main():
    if len(sys.argv) != 4:
        print >> sys.stderr, "Usage: %s binary_sandbox_file operations_file ios_version" % (sys.argv[0])
        sys.exit(-1)

    ios_major_version = int(sys.argv[3].split('.')[0])
    # Read sandbox operations.
    sb_ops = [l.strip() for l in open(sys.argv[2])]
    num_sb_ops = len(sb_ops)
    logger.info("num_sb_ops:", num_sb_ops)

    f = open(sys.argv[1], "rb")
    operation_nodes = build_operation_nodes(f, num_sb_ops, ios_major_version)

    global num_regex
    f.seek(4)
    num_regex = struct.unpack("<H", f.read(2))[0]
    logger.debug("num_regex: %02x" % (num_regex))
    f.seek(6)
    sb_ops_offsets = struct.unpack("<%dH" % (num_sb_ops), f.read(2*num_sb_ops))

    # Extract node for 'default' operation (index 0).
    default_node = find_operation_node_by_offset(operation_nodes, sb_ops_offsets[0])
    print("(%s default)" % (default_node.terminal))

    # For each operation expand operation node.
    #for idx in range(1, len(sb_ops_offsets)):
    for idx in range(10, 11):
        offset = sb_ops_offsets[idx]
        operation = sb_ops[idx]
        node = find_operation_node_by_offset(operation_nodes, offset)
        if not node:
            logger.info("operation %s (index %d) has no operation node", operation, idx)
            continue
        logger.debug("expanding operation %s (index %d, offset: %02x)", operation, idx, offset)
        g = build_operation_node_graph(node, default_node)
        logger.debug("reducing operation %s (index %d, offset: %02x)", operation, idx, offset)
        print_operation_node_graph(g)
        if g:
            rg = reduce_operation_node_graph(g)
            rg.print_vertices_with_operation(operation)
        else:
            if node.terminal:
                if node.terminal.type != default_node.terminal.type:
                    print("(%s %s)" % (node.terminal, operation))


if __name__ == "__main__":
    sys.exit(main())


================================================
FILE: reverse-sandbox/regex_parser_v1.py
================================================
import logging
import struct

logging.config.fileConfig("logger.config")
logger = logging.getLogger(__name__)

def parse_character(node_type, node_arg, node_transition, node_idx):
    value = chr(node_arg & 0xff)
    if value == ".":
        value = "[.]"
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "character",
        "value": value}

def parse_end(node_type, node_arg, node_transition, node_idx):
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "end",
        "value": 0}

def parse_jump_forward(node_type, node_arg, node_transition, node_idx):
    jump_to = node_arg
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "jump_forward",
        "value": jump_to}

def parse_jump_backward(node_type, node_arg, node_transition, node_idx):
    jump_to = node_transition
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "jump_backward",
        "value": jump_to}

def parse_beginning_of_line(node_type, node_arg, node_transition, node_idx):
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "character",
        "value": "^"}

def parse_end_of_line(node_type, node_arg, node_transition, node_idx):
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "character",
        "value": "$"}

def parse_dot(node_type, node_arg, node_transition, node_idx):
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "character",
        "value": "."}

def parse_character_class(node_type, node_arg, node_transition, node_idx):
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "class",
        "value": node_arg}

def parse_character_neg_class(node_type, node_arg, node_transition, node_idx):
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "class_exclude",
        "value": node_arg}

def parse_parantheses_open(node_type, node_arg, node_transition, node_idx):
    return parse_jump_backward(node_type, node_arg, node_transition,
        node_idx)
    '''
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "character",
        "value": "("}
    '''

def parse_parantheses_close(node_type, node_arg, node_transition, node_idx):
    return parse_jump_backward(node_type, node_arg, node_transition,
        node_idx)
    '''
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "character",
        "value": ")"}
    '''

node_type_dispatch_table = {
  0x10: parse_character,
  0x22: parse_end,
  0x23: parse_parantheses_close,
  0x24: parse_parantheses_open,
  0x25: parse_jump_forward,
  0x30: parse_dot,
  0x31: parse_jump_backward,
  0x32: parse_beginning_of_line,
  0x33: parse_end_of_line,
  0x34: parse_character_class,
  0x35: parse_character_neg_class,
}


def node_parse(re, i, regex_list, node_idx):
    node_type = struct.unpack('>I',
        ''.join([chr(x) for x in re[i:i+4]]))[0]
    node_transition = struct.unpack('>I',
        ''.join([chr(x) for x in re[i+4:i+8]]))[0]
    node_arg = struct.unpack('>I',
        ''.join([chr(x) for x in re[i+8:i+12]]))[0]
    i += 12

    logger.debug('node idx:{:#010x} type: {:#02x} arg: {:#010x}' \
        ' transition: {:#010x}'.format(node_idx, node_type,node_arg,
            node_transition))
    assert(node_type in node_type_dispatch_table)
    regex_list.append(
        node_type_dispatch_table[node_type](
            node_type, node_arg, node_transition, node_idx))
    return i

def class_parse(re, i, classes, class_idx):
    def transform(x):
        c = chr(x)
        if c in '[]-':
            return '\\' + c
        else:
            return c

    class_size = struct.unpack('>I',
        ''.join([chr(x) for x in re[i:i+4]]))[0]
    i += 0x4
    content = struct.unpack('>{}I'.format(class_size),
        ''.join([chr(x) for x in re[i:i+4*class_size]]))
    i += 0x4 * class_size
    assert(class_size % 2 == 0)

    cls = ''
    for idx in range(0, class_size, 2):
        start = content[idx]
        end = content[idx+1]
        if start != end:
            cls += '{}-{}'.format(transform(start), transform(end))
        else:
            cls += transform(start)

    logger.debug('class idx = {:#x} size = {:#x} content=[{}]'.format(
        class_idx, class_size, cls))
    classes.append(cls)
    return i

class RegexParser(object):

    @staticmethod
    def parse(re, i, regex_list):
        node_count = struct.unpack('>I',
            ''.join([chr(x) for x in re[i:i+0x4]]))[0]
        logger.debug('node count = {:#x}'.format(node_count))

        start_node = struct.unpack('>I',
            ''.join([chr(x) for x in re[i+0x4:i+0x8]]))[0]
        logger.debug('start node = {:#x}'.format(start_node))

        end_node = struct.unpack('>I',
            ''.join([chr(x) for x in re[i+0x8:i+0xC]]))[0]
        logger.debug('end node = {:#x}'.format(end_node))

        cclass_count = struct.unpack('>I',
            ''.join([chr(x) for x in re[i+0xC:i+0x10]]))[0]
        logger.debug('character class count = {:#x}'.format(cclass_count))

        submatch_count = struct.unpack('>I',
            ''.join([chr(x) for x in re[i+0x10:i+0x14]]))[0]
        i += 0x14
        logger.debug('submatch count = {:#x}'.format(submatch_count))


        for node_idx in range(node_count):
            i = node_parse(re, i, regex_list, node_idx)

        classes = []
        for class_idx in range(cclass_count):
            i = class_parse(re, i, classes, class_idx)

        for node in regex_list:
            if node['type'] == 'class':
                node['value'] = '[{}]'.format(classes[node['value']])
            elif node['type'] == 'class_exclude':
                node['value'] = '[{}]'.format(classes[node['value']])

        regex_list[start_node]['start_node'] = True



================================================
FILE: reverse-sandbox/regex_parser_v2.py
================================================
import logging
import struct

logging.config.fileConfig("logger.config")
logger = logging.getLogger(__name__)

def parse_character(node_type, node_arg, node_transition, node_idx):
    value = chr(node_arg & 0xff)
    if value == ".":
        value = "[.]"
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "character",
        "value": value}

def parse_end(node_type, node_arg, node_transition, node_idx):
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "end",
        "value": 0}

def parse_jump_forward(node_type, node_arg, node_transition, node_idx):
    jump_to = node_arg
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "jump_forward",
        "value": jump_to}

def parse_jump_backward(node_type, node_arg, node_transition, node_idx):
    jump_to = node_transition
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "jump_backward",
        "value": jump_to}

def parse_beginning_of_line(node_type, node_arg, node_transition, node_idx):
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "character",
        "value": "^"}

def parse_end_of_line(node_type, node_arg, node_transition, node_idx):
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "character",
        "value": "$"}

def parse_dot(node_type, node_arg, node_transition, node_idx):
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "character",
        "value": "."}

def parse_character_class(node_type, node_arg, node_transition, node_idx):
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "class",
        "value": node_arg}

def parse_character_neg_class(node_type, node_arg, node_transition, node_idx):
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "class_exclude",
        "value": node_arg}

def parse_parantheses_open(node_type, node_arg, node_transition, node_idx):
    return parse_jump_backward(node_type, node_arg, node_transition,
        node_idx)
    '''
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "character",
        "value": "("}
    '''

def parse_parantheses_close(node_type, node_arg, node_transition, node_idx):
    return parse_jump_backward(node_type, node_arg, node_transition,
        node_idx)
    '''
    return {
        "pos": node_idx,
        "nextpos": node_transition,
        "type": "character",
        "value": ")"}
    '''

node_type_dispatch_table = {
  0x10: parse_character,
  0x22: parse_end,
  0x25: parse_jump_forward,
  0x26: parse_jump_forward,
  0x27: parse_jump_forward,
  0x28: parse_jump_forward,
  0x30: parse_dot,
  0x31: parse_jump_backward,
  0x32: parse_beginning_of_line,
  0x33: parse_end_of_line,
  0x34: parse_character_class,
  0x35: parse_character_neg_class,
}


def node_parse(re, i, regex_list, node_idx):
    node_type = struct.unpack('<B',
        ''.join([chr(x) for x in re[i:i+1]]))[0]
    node_transition = struct.unpack('<H',
        ''.join([chr(x) for x in re[i+1:i+3]]))[0]
    pad = struct.unpack('<B',
        ''.join([chr(x) for x in re[i+3:i+4]]))[0]
    node_arg = struct.unpack('<I',
        ''.join([chr(x) for x in re[i+4:i+8]]))[0]
    i += 8

    logger.debug('node idx:{:#06x} type: {:#02x} arg: {:#010x}' \
        ' transition: {:#06x}'.format(node_idx, node_type,node_arg,
            node_transition))

    assert(pad == 0 or node_type == 0x22)
    assert(node_type in node_type_dispatch_table)
    regex_list.append(
        node_type_dispatch_table[node_type](
            node_type, node_arg, node_transition, node_idx))
    return i

def classes_parse(re, i, cclass_count):
    def transform(x):
        c = chr(x)
        if c in '[]-':
            return '\\' + c
        else:
            return c
    def transform_range(start, end):
        if start != end:
            return '{}-{}'.format(transform(start), transform(end))
        return transform(start)
    def transform_content(content):
        cls = ''
        assert(len(content) % 2 == 0)
        for idx in range(0, len(content), 2):
            start = content[idx]
            end = content[idx+1]
            cls += transform_range(start, end)
        return cls

    if cclass_count == 0:
        return

    classes_magic, classes_size = struct.unpack('<II',
        ''.join([chr(x) for x in re[i:i+8]]))
    i += 0x8
    logger.debug('classes magic = {:#x} size = {:#x}'.format(
        classes_magic, classes_size))
    assert(len(re) - i == classes_size)
    starts = struct.unpack('<{}I'.format(cclass_count),
        ''.join([chr(x) for x in re[i:i+4*cclass_count]]))
    i += 0x4 * cclass_count

    lens = struct.unpack('<{}B'.format(cclass_count),
        ''.join([chr(x) for x in re[i:i+cclass_count]]))
    i += cclass_count

    contents = [re[i+start:i+start+clen] for start, clen in zip(starts, lens)]
    return [transform_content(content) for content in contents]

class RegexParser(object):

    @staticmethod
    def parse(re, i, regex_list):
        magic = struct.unpack('<I',
            ''.join([chr(x) for x in re[i:i+0x4]]))[0]
        logger.debug('magic = {:#x}'.format(magic))

        node_count = struct.unpack('<I',
            ''.join([chr(x) for x in re[i+0x4:i+0x8]]))[0]
        logger.debug('node count = {:#x}'.format(node_count))

        start_node = struct.unpack('<I',
            ''.join([chr(x) for x in re[i+0x8:i+0xC]]))[0]
        logger.debug('start node = {:#x}'.format(start_node))

        end_node = struct.unpack('<I',
            ''.join([chr(x) for x in re[i+0xC:i+0x10]]))[0]
        logger.debug('end node = {:#x}'.format(end_node))

        cclass_count = struct.unpack('<I',
            ''.join([chr(x) for x in re[i+0x10:i+0x14]]))[0]
        logger.debug('character class count = {:#x}'.format(cclass_count))
        i += 0x14

        for node_idx in range(node_count):
            i = node_parse(re, i, regex_list, node_idx)

        classes = classes_parse(re, i, cclass_count)

        for node in regex_list:
            if node['type'] == 'class':
                node['value'] = '[{}]'.format(classes[node['value']])
            elif node['type'] == 'class_exclude':
                node['value'] = '[{}]'.format(classes[node['value']])

        regex_list[start_node]['start_node'] = True



================================================
FILE: reverse-sandbox/regex_parser_v3.py
================================================
import logging
import struct

logging.config.fileConfig("logger.config")
logger = logging.getLogger(__name__)

def parse_character(re, i, regex_list):
    value = chr(re[i+1])
    if value == ".":
        value = "[.]"
    regex_list.append({
        "pos": i-6,
        "nextpos": i+2-6,
        "type": "character",
        "value": value}
        )
    return i + 1

def parse_beginning_of_line(i, regex_list):
    regex_list.append({
        "pos": i-6,
        "nextpos": i+1-6,
        "type": "character",
        "value": "^"}
        )

def parse_end_of_line(i, regex_list):
    regex_list.append({
        "pos": i-6,
        "nextpos": i+1-6,
        "type": "character",
        "value": "$"}
        )

def parse_any_character(i, regex_list):
    regex_list.append({
        "pos": i-6,
        "nextpos": i+1-6,
        "type": "character",
        "value": "."}
        )

def parse_jump_forward(re, i, regex_list):
    jump_to = re[i+1] + (re[i+2] << 8)
    regex_list.append({
        "pos": i-6,
        "nextpos": i+3-6,
        "type": "jump_forward",
        "value": jump_to}
        )
    return i + 2

def parse_jump_backward(re, i, regex_list):
    jump_to = re[i+1] + (re[i+2] << 8)
    regex_list.append({
        "pos": i-6,
        "nextpos": i+3-6,
        "type": "jump_backward",
        "value": jump_to}
        )
    logger.debug("(0xa) i: %d (0x%x), re[i, i+1, i+2]: 0x%x, 0x%x, 0x%x", i, i, re[i], re[i+1], re[i+2])
    logger.debug("value: 0x%x", jump_to)
    return i+2

def parse_character_class(re, i, regex_list):
    num = (re[i] >> 4)
    i = i+1
    logger.debug("i: %d, num: %d", i, num)
    values = []
    value = "["
    for j in range(0, num):
        values.append(re[i+2*j])
        values.append(re[i+2*j+1])
    first = values[0]
    last = values[2*num-1]
    # In case of excludes.
    if (first > last):
        node_type = "class_exclude"
        value += "^"
        for j in range(len(values)-1, 0, -1):
            values[j] = values[j-1]
        values[0] = last
        for j in range(0, len(values)):
            if j % 2 == 0:
                values[j] = values[j]+1
            else:
                values[j] = values[j]-1
    else:
        node_type = "class"
    for j in range(0, len(values), 2):
        if values[j] < values[j+1]:
            value += "%s-%s" % (chr(values[j]), chr(values[j+1]))
        else:
            value += "%s" % (chr(values[j]))
    value += "]"
    regex_list.append({
        "pos": i-6-1,
        "nextpos": i + 2 * num - 6,
        "type": node_type,
        "value": value
        })
    message = "values: [", ", ".join([hex(j) for j in values]), "]"
    logger.debug(message)

    return i + 2 * num - 1

def parse_end(re, i, regex_list):
    regex_list.append({
        "pos": i-6,
        "nextpos": i+2-6,
        "type": "end",
        "value": 0
        })
    return i + 1

def parse(re, i, regex_list):
    # Actual character.
    if re[i] == 0x02:
        i = parse_character(re, i, regex_list)
    # Beginning of line.
    elif re[i] == 0x19:
        parse_beginning_of_line(i, regex_list)
    # End of line.
    elif re[i] == 0x29:
        parse_end_of_line(i, regex_list)
    # Any character.
    elif re[i] == 0x09:
        parse_any_character(i, regex_list)
    # Jump forward.
    elif re[i] == 0x2f:
        i = parse_jump_forward(re, i, regex_list)
    # Jump backward.
    elif re[i] & 0xf == 0xa:
        i = parse_jump_backward(re, i, regex_list)
    # Character class.
    elif re[i] & 0xf == 0xb:
        i = parse_character_class(re, i, regex_list)
    elif re[i] & 0xf == 0x5:
        i = parse_end(re, i, regex_list)
    else:
        logger.warning("##########unknown", hex(re[i]))

    return i + 1

class RegexParser(object):

    @staticmethod
    def parse(re, i, regex_list):
        length = struct.unpack('<H', ''.join([chr(x) for x in re[i:i+2]]))[0]
        logger.debug("re.length: 0x%x", length)
        i += 2
        assert(length == len(re)-i)
        while i < len(re):
            i = parse(re, i, regex_list)

        regex_list[0]["start_node"]=True



================================================
FILE: reverse-sandbox/reverse_sandbox.py
================================================
#!/usr/bin/env python3

"""
iOS/OS X sandbox decompiler

Heavily inspired from Dion Blazakis' previous work
    https://github.com/dionthegod/XNUSandbox/tree/master/sbdis
Excellent information from Stefan Essers' slides and work
    http://www.slideshare.net/i0n1c/ruxcon-2014-stefan-esser-ios8-containers-sandboxes-and-entitlements
    https://github.com/sektioneins/sandbox_toolkit
"""

import sys
import struct
import logging
import logging.config
import argparse
import os
import re
import operation_node
import sandbox_filter
import sandbox_regex


logging.config.fileConfig("logger.config")
logger = logging.getLogger(__name__)


def extract_string_from_offset(f, offset, ios_version):
    """Extract string (literal) from given offset."""
    if ios_version >= 13:
        f.seek(get_base_addr(f, ios_version) + offset * 8)
        len = struct.unpack("<H", f.read(2))[0]-1
    else:
        f.seek(offset * 8)
        len = struct.unpack("<I", f.read(4))[0]-1
    return '%s' % f.read(len)


def create_operation_nodes(infile, regex_list, num_operation_nodes,
        ios_major_version, keep_builtin_filters, global_vars):
    # Read sandbox operations.
    operation_nodes = operation_node.build_operation_nodes(infile,
        num_operation_nodes, ios_major_version)
    logger.info("operation nodes")
    
    for idx, node in enumerate(operation_nodes):
        logger.info("%d: %s", idx, node.str_debug())
    
    for n in operation_nodes:
        n.convert_filter(sandbox_filter.convert_filter_callback, infile,
                    regex_list, ios_major_version, keep_builtin_filters,
                    global_vars, get_base_addr(infile, ios_major_version))
    logger.info("operation nodes after filter conversion")
    for idx, node in enumerate(operation_nodes):
        logger.info("%d: %s", idx, node.str_debug())

    return operation_nodes


def process_profile(infile, outfname, sb_ops, ops_to_reverse, op_table, operation_nodes):
    outfile = open(outfname, "wt")
    outfile_xml = open(outfname + ".xml", "wt")

    # Print version.
    outfile.write("(version 1)\n")

    outfile_xml.write('<?xml version="1.0" encoding="us-ascii" standalone="yes"?>\n')
    outfile_xml.write('<!DOCTYPE operations [\n')
    outfile_xml.write('<!ELEMENT operations (operation*)>\n')
    outfile_xml.write('<!ELEMENT operation (filters?)>\n')
    outfile_xml.write('<!ELEMENT filters (filter | require)*>\n')
    outfile_xml.write('<!ELEMENT require (filter | require)*>\n')
    outfile_xml.write('<!ELEMENT filter (#PCDATA)>\n')
    outfile_xml.write('<!ATTLIST operation\n')
    outfile_xml.write('\tname CDATA #REQUIRED\n')
    outfile_xml.write('\taction (deny|allow) #REQUIRED>\n')
    outfile_xml.write('<!ATTLIST require\n')
    outfile_xml.write('\ttype (require-all|require-any|require-not|require-entitlement) #REQUIRED\n')
    outfile_xml.write('\tvalue CDATA #IMPLIED>\n')
    outfile_xml.write('<!ATTLIST filter\n')
    outfile_xml.write('\tname CDATA #REQUIRED\n')
    outfile_xml.write('\targument CDATA #IMPLIED>\n')
    outfile_xml.write(']>\n')
    outfile_xml.write("<operations>\n")

    # Extract node for 'default' operation (index 0).
    default_node = operation_node.find_operation_node_by_offset(operation_nodes, op_table[0])
    outfile.write("(%s default)\n" % (default_node.terminal))
    outfile_xml.write("\t<operation name=\"default\" action=\"%s\" />\n" % (default_node.terminal))

    # For each operation expand operation node.
    for idx in range(1, len(op_table)):
        offset = op_table[idx]
        operation = sb_ops[idx]
        # Go past operations not in list, in case list is not empty.
        if ops_to_reverse:
            if operation not in ops_to_reverse:
                continue
        logger.info("parsing operation %s (index %d)", operation, idx)
        node = operation_node.find_operation_node_by_offset(operation_nodes, offset)
        if not node:
            logger.info("operation %s (index %d) has no operation node", operation, idx)
            continue
        g = operation_node.build_operation_node_graph(node, default_node)
        if g:
            rg = operation_node.reduce_operation_node_graph(g)
            rg.str_simple_with_metanodes()
            rg.print_vertices_with_operation_metanodes(operation, default_node.terminal.is_allow(), outfile)
            #rg.dump_xml(operation, outfile_xml)
        else:
            logger.info("no graph for operation %s (index %d)", operation, idx)
            if node.terminal and default_node.terminal:
                if node.terminal.type != default_node.terminal.type:
                    outfile.write("(%s %s)\n" % (node.terminal, operation))
                    outfile_xml.write("\t<operation name=\"%s\" action=\"%s\" />\n" % (operation, node.terminal))

    outfile.close()
    outfile_xml.write("</operations>\n")
    outfile_xml.close()

def get_ios_major_version(release):
    """
    Returns major version of release
    """
    return int(release.split('.')[0])

def is_ios_more_than_10_release(release):
    """
    Returns True if release is using newer (iOS >= 10) binary sandbox profile format.
    """
    major_version = get_ios_major_version(release)
    if major_version < 10:
        return False
    return True


def display_sandbox_profiles(f, re_table_offset, num_sb_ops, ios_version):
    logger.info("Printing sandbox profiles from bundle")
    if ios_version >= 13:
        f.seek(6)
    elif ios_version >= 12:
        f.seek(12)
    elif ios_version >= 10:
        f.seek(10)
    else:
        f.seek(6)
    num_profiles = struct.unpack("<H", f.read(2))[0]

    if ios_version >= 13:
        f.seek(2)
        num_operation_nodes = struct.unpack("<H", f.read(2))[0]
        print(hex(num_operation_nodes))
    else:
        # Place file pointer to start of operation nodes area.
        if ios_version >= 12:
            f.seek(14 + (num_sb_ops + 2) * 2 * num_profiles)
        elif ios_version >= 10:
            f.seek(12 + (num_sb_ops + 2) * 2 * num_profiles)
        else:
            f.seek(8 + (num_sb_ops + 2) * 2 * num_profiles)
        while True:
            word = struct.unpack("<H", f.read(2))[0]
            if word != 0:
                f.seek(-2, 1)
                break
        start = f.tell()
        end = re_table_offset * 8
        num_operation_nodes = (end - start) / 8

    logger.info("number of operation nodes: %u" % num_operation_nodes)

    for i in range(0, num_profiles):
        if ios_version >= 13:
            f.seek(8)
            regex_table_count = struct.unpack('<H', f.read(2))[0]
            f.seek(10)
            global_table_count = struct.unpack('<B', f.read(1))[0]
            f.seek(11)
            debug_table_count = struct.unpack('<B', f.read(1))[0]
            f.seek(12 + (regex_table_count + global_table_count + \
                    debug_table_count) * 2 + (num_sb_ops + 2) * 2 * i)
        elif ios_version >= 12:
            f.seek(14 + (num_sb_ops + 2) * 2 * i)
        elif ios_version >= 10:
            f.seek(12 + (num_sb_ops + 2) * 2 * i)
        else:
            f.seek(8 + (num_sb_ops + 2) * 2 * i)

        name_offset = struct.unpack("<H", f.read(2))[0]
        boundary = struct.unpack("<H", f.read(2))[0]
        name = extract_string_from_offset(f, name_offset, ios_version)

        print(name)

    logger.info("Found %d sandbox profiles." % num_profiles)


def get_global_vars(f, vars_offset, num_vars, base_offset):
    global_vars = []
    for i in range(0, num_vars):
        if base_offset > 0:
            f.seek(vars_offset + i*2)
        else:
            f.seek(vars_offset*8 + i*2)
        current_offset = struct.unpack("<H", f.read(2))[0]
        f.seek(base_offset + current_offset * 8)
        if base_offset > 0:
            len = struct.unpack("<H", f.read(2))[0]
        else:
            len = struct.unpack("<I", f.read(4))[0]
        s = f.read(len-1)
        global_vars.append(s)
    logger.info("global variables are {:s}".format(", ".join(s for s in global_vars)))
    return global_vars

def get_base_addr(f, ios_version):
    if ios_version >= 13:
        # extract operation node table count
        f.seek(2)
        op_nodes_count = struct.unpack('<H', f.read(2))[0]

        # extract sandbox operations count
        f.seek(4)
        sb_ops_count = struct.unpack('<H', f.read(2))[0]

        # extract sandbox profile count
        f.seek(6)
        sb_profiles_count = struct.unpack('<H', f.read(2))[0]

        # extract regular expressions count
        f.seek(8)
        regex_table_count = struct.unpack('<H', f.read(2))[0]

        # extract global table count
        f.seek(10)
        global_table_count = struct.unpack('<B', f.read(1))[0]

        # extract debug table count
        f.seek(11)
        debug_table_count = struct.unpack('<B', f.read(1))[0]

        return 12 + (regex_table_count + global_table_count + debug_table_count)*2 \
                + (2 + sb_ops_count) * 2 * sb_profiles_count + op_nodes_count * 8 + 4
    return 0


def main():
    """Reverse Apple binary sandbox file to SBPL (Sandbox Profile Language) format.

    Sample run:
        python reverse_sandbox.py -r 7.1.1 container.sb.bin
        python reverse_sandbox.py -r 7.1.1 -d out container.sb.bin
        python reverse_sandbox.py -r 7.1.1 -d out container.sb.bin -n network-inbound network-outbound
        python reverse_sandbox.py -r 9.0.2 -d out sandbox_bundle_iOS_9.0 -n network-inbound network-outbound -p container
    """

    parser = argparse.ArgumentParser()
    parser.add_argument("filename", help="path to the binary sandbox profile")
    parser.add_argument("-r", "--release", help="iOS release version for sandbox profile", required=True)
    parser.add_argument("-o", "--operations_file", help="file with list of operations", required=True)
    parser.add_argument("-p", "--profile", nargs='+', help="profile to reverse (for bundles) (default is to reverse all operations)")
    parser.add_argument("-n", "--operation", nargs='+', help="particular operation(s) to reverse (default is to reverse all operations)")
    parser.add_argument("-d", "--directory", help="directory where to write reversed profiles (default is current directory)")
    parser.add_argument("-psb", "--print_sandbox_profiles", action="store_true", help="print sandbox profiles of a given bundle (only for iOS versions 9+)")
    parser.add_argument("-kbf", "--keep_builtin_filters", help="keep builtin filters in output", action="store_true")

    args = parser.parse_args()

    if args.filename is None:
        parser.print_usage()
        print("no sandbox profile/bundle file to reverse")
        sys.exit(1)

    # Read sandbox operations.
    sb_ops = [l.strip() for l in open(args.operations_file)]
    num_sb_ops = len(sb_ops)
    logger.info("num_sb_ops: %d", num_sb_ops)

    ops_to_reverse = []
    if args.operation:
        for op in args.operation:
            if op not in sb_ops:
                parser.print_usage()
                print("unavailable operation: {}".format(op))
                sys.exit(1)
            ops_to_reverse.append(op)

    if args.directory:
        out_dir = args.directory
    else:
        out_dir = os.getcwd()

    f = open(args.filename, "rb")

    if get_ios_major_version(args.release) >= 6:
        header = struct.unpack("<H", f.read(2))[0]
        logger.debug("header: 0x%x", header)
    else:
        logger.debug("header: none for iOS <6; using 0")
        header = 0

    if get_ios_major_version(args.release) >= 13:
        re_table_offset = 12
    else:
        re_table_offset = struct.unpack("<H", f.read(2))[0]
    
    if get_ios_major_version(args.release) >= 12:
        f.seek(8)
    re_table_count = struct.unpack("<H", f.read(2))[0]

    logger.debug("re_table_offset: 0x%x", re_table_offset)
    logger.debug("re_table_count: 0x%x", re_table_count)

    logger.debug("\n\nregular expressions:\n")
    regex_list = []
    if re_table_count > 0:
        if get_ios_major_version(args.release) >= 13:
            f.seek(re_table_offset)
        else:
            f.seek(re_table_offset * 8)
        
        re_offsets_table = struct.unpack("<%dH" % re_table_count, f.read(2 * re_table_count))
        for offset in re_offsets_table:
            if get_ios_major_version(args.release) >= 13:
                f.seek(get_base_addr(f, get_ios_major_version(args.release)) + offset * 8)
                re_length = struct.unpack("<H", f.read(2))[0]
            else:
                f.seek(offset * 8)
                re_length = struct.unpack("<I", f.read(4))[0]
            
            re = struct.unpack("<%dB" % re_length, f.read(re_length))
            logger.debug("total_re_length: 0x%x", re_length)
            re_debug_str = "re: [", ", ".join([hex(i) for i in re]), "]"
            logger.debug(re_debug_str)
            regex_list.append(sandbox_regex.parse_regex(re))
    logger.debug(regex_list)

    if args.print_sandbox_profiles:
        if header == 0x8000:
            display_sandbox_profiles(f, re_table_offset, num_sb_ops, get_ios_major_version(args.release))
        else:
            print("cannot print sandbox profiles list; filename {} is not a sandbox bundle".format(args.filename))
        sys.exit(0)

    global_vars = None

    # In case of sandbox profile bundle, go through each profile.
    if header == 0x8000:
        logger.info("using profile bundle")
        if get_ios_major_version(args.release) >= 13:
            # get the regex table entries
            f.seek(8)
            regex_table_count = struct.unpack('<H', f.read(2))[0]
            vars_offset = 12 + regex_table_count * 2
            f.seek(10)
            num_vars = struct.unpack("<B", f.read(1))[0]
            logger.info("{:d} global vars at offset 0x{:0x}".format(num_vars, vars_offset))
            global_vars = get_global_vars(f, vars_offset, num_vars, get_base_addr(f, get_ios_major_version(args.release)))
            f.seek(6)
        elif get_ios_major_version(args.release) >= 12:
            f.seek(4)
            vars_offset = struct.unpack("<H", f.read(2))[0]
            f.seek(10)
            num_vars = struct.unpack("<B", f.read(1))[0]
            logger.info("{:d} global vars at offset 0x{:0x}".format(num_vars, vars_offset))
            global_vars = get_global_vars(f, vars_offset, num_vars, 0)
            f.seek(12)
        elif get_ios_major_version(args.release) >= 10:
            f.seek(6)
            vars_offset = struct.unpack("<H", f.read(2))[0]
            num_vars = struct.unpack("<H", f.read(2))[0]
            logger.info("{:d} global vars at offset 0x{:0x}".format(num_vars, vars_offset))
            global_vars = get_global_vars(f, vars_offset, num_vars, 0)
            f.seek(10)
        else:
            f.seek(6)
        num_profiles = struct.unpack("<H", f.read(2))[0]
        logger.info("number of profiles in bundle: %d", num_profiles)

        if get_ios_major_version(args.release) >= 13:
            f.seek(2)
            num_operation_nodes = struct.unpack("<H", f.read(2))[0]
            f.seek(get_base_addr(f, get_ios_major_version(args.release)) - num_operation_nodes*8)
        else:
            # Place file pointer to start of operation nodes area.
            if get_ios_major_version(args.release) >= 12:
                f.seek(14 + (num_sb_ops + 2) * 2 * num_profiles)
            elif get_ios_major_version(args.release) >= 10:
                f.seek(12 + (num_sb_ops + 2) * 2 * num_profiles)
            else:
     
Download .txt
gitextract_1fgfkynp/

├── .github/
│   └── workflows/
│       ├── config/
│       │   └── config.json
│       ├── linter.yml
│       └── rules/
│           ├── common/
│           │   ├── inlineTokenChildren.js
│           │   └── wordPattern.js
│           ├── md101.js
│           ├── md102.js
│           ├── md103.js
│           ├── md104.js
│           └── rules.js
├── .gitignore
├── .gitmodules
├── LICENSE
├── README.md
├── helpers/
│   └── extract_sandbox_data.py
└── reverse-sandbox/
    ├── filters/
    │   ├── filters_ios11.json
    │   ├── filters_ios12.json
    │   ├── filters_ios13.json
    │   ├── filters_ios14.json
    │   ├── filters_ios4.json
    │   ├── filters_ios5.json
    │   └── filters_ios6.json
    ├── filters.py
    ├── logger.config
    ├── operation_node.py
    ├── regex_parser_v1.py
    ├── regex_parser_v2.py
    ├── regex_parser_v3.py
    ├── reverse_sandbox.py
    ├── reverse_string.py
    ├── sandbox_filter.py
    └── sandbox_regex.py
Download .txt
SYMBOL INDEX (313 symbols across 12 files)

FILE: .github/workflows/rules/common/inlineTokenChildren.js
  class InlineTokenChildren (line 1) | class InlineTokenChildren {
    method constructor (line 2) | constructor(token) {
  method [Symbol.iterator] (line 12) | *[Symbol.iterator]() {

FILE: .github/workflows/rules/common/wordPattern.js
  class WordPattern (line 1) | class WordPattern {
    method constructor (line 2) | constructor(pattern, parameters) {
    method test (line 12) | test(line) {
  class Match (line 17) | class Match {
    method constructor (line 18) | constructor(match) {
    method range (line 22) | range() {
    method toString (line 35) | toString() {

FILE: helpers/extract_sandbox_data.py
  function binary_get_word_size (line 13) | def binary_get_word_size(binary: lief.MachO.Binary):
  function unpack (line 35) | def unpack(bytes_list):
  function binary_get_string_from_address (line 54) | def binary_get_string_from_address(binary: lief.MachO.Binary, vaddr: int):
  function untag_pointer (line 95) | def untag_pointer(tagged_pointer):
  function get_section_from_segment (line 117) | def get_section_from_segment(binary: lief.MachO.FatBinary,
  function get_xref (line 151) | def get_xref(binary: lief.MachO.Binary, vaddr: int):
  function get_tables_section (line 182) | def get_tables_section(binary: lief.MachO.Binary):
  function is_vaddr_in_section (line 217) | def is_vaddr_in_section(vaddr, section):
  function unpack_pointer (line 233) | def unpack_pointer(addr_size, binary, vaddr):
  function extract_data_tables_from_section (line 252) | def extract_data_tables_from_section(binary: lief.MachO.Binary, to_data,...
  function extract_string_tables (line 301) | def extract_string_tables(binary: lief.MachO.Binary):
  function extract_separated_profiles (line 316) | def extract_separated_profiles(binary, string_tables):
  function extract_sbops (line 406) | def extract_sbops(string_tables):
  function get_ios_major_version (line 457) | def get_ios_major_version(version: str):
  function findall (line 470) | def findall(searching, pattern):
  function check_regex (line 487) | def check_regex(data: bytes, base_index: int, ios_version: int):
  function unpack_for_newer_ios (line 525) | def unpack_for_newer_ios(base_index, count, data):
  function check_bundle (line 550) | def check_bundle(data: bytes, base_index: int, ios_version: int):
  function extract_bundle_profiles (line 607) | def extract_bundle_profiles(binary: lief.MachO.Binary, ios_version: int):
  function main (line 633) | def main(args):

FILE: reverse-sandbox/filters.py
  function read_filters (line 3) | def read_filters(file_path):
  class Filters (line 15) | class Filters(object):
    method get_filters (line 26) | def get_filters(ios_major_version):
    method exists (line 42) | def exists(ios_major_version, id):
    method get (line 46) | def get(ios_major_version, id):

FILE: reverse-sandbox/operation_node.py
  class TerminalNode (line 12) | class TerminalNode():
    method __eq__ (line 25) | def __eq__(self, other):
    method __str__ (line 28) | def __str__(self):
    method is_allow (line 36) | def is_allow(self):
    method is_deny (line 39) | def is_deny(self):
  class NonTerminalNode (line 43) | class NonTerminalNode():
    method __eq__ (line 62) | def __eq__(self, other):
    method simplify_list (line 65) | def simplify_list(self, arg_list):
    method str_debug (line 87) | def str_debug(self):
    method __str__ (line 155) | def __str__(self):
    method str_not (line 223) | def str_not(self):
    method values (line 291) | def values(self):
    method is_entitlement_start (line 296) | def is_entitlement_start(self):
    method is_entitlement (line 299) | def is_entitlement(self):
    method is_last_regular_expression (line 302) | def is_last_regular_expression(self):
    method convert_filter (line 305) | def convert_filter(self, convert_fn, f, regex_list, ios_major_version,
    method is_non_terminal_deny (line 311) | def is_non_terminal_deny(self):
    method is_non_terminal_allow (line 315) | def is_non_terminal_allow(self):
    method is_non_terminal_non_terminal (line 319) | def is_non_terminal_non_terminal(self):
    method is_allow_non_terminal (line 322) | def is_allow_non_terminal(self):
    method is_deny_non_terminal (line 326) | def is_deny_non_terminal(self):
    method is_deny_allow (line 330) | def is_deny_allow(self):
    method is_allow_deny (line 334) | def is_allow_deny(self):
  class OperationNode (line 339) | class OperationNode():
    method __init__ (line 355) | def __init__(self, offset):
    method is_terminal (line 358) | def is_terminal(self):
    method is_non_terminal (line 361) | def is_non_terminal(self):
    method parse_terminal (line 364) | def parse_terminal(self, ios_major_version):
    method parse_non_terminal (line 372) | def parse_non_terminal(self):
    method parse_raw (line 380) | def parse_raw(self, ios_major_version):
    method convert_filter (line 387) | def convert_filter(self, convert_fn, f, regex_list, ios_major_version,
    method str_debug (line 393) | def str_debug(self):
    method __str__ (line 403) | def __str__(self):
    method str_not (line 411) | def str_not(self):
    method values (line 419) | def values(self):
    method __eq__ (line 425) | def __eq__(self, other):
    method __hash__ (line 428) | def __hash__(self):
  function has_been_processed (line 442) | def has_been_processed(node):
  function build_operation_node (line 447) | def build_operation_node(raw, offset, ios_major_version):
  function build_operation_nodes (line 455) | def build_operation_nodes(f, num_operation_nodes, ios_major_version):
  function find_operation_node_by_offset (line 481) | def find_operation_node_by_offset(operation_nodes, offset):
  function ong_mark_not (line 488) | def ong_mark_not(g, node, parent_node, nodes_to_process):
  function ong_end_path (line 498) | def ong_end_path(g, node, parent_node, nodes_to_process):
  function ong_add_to_path (line 503) | def ong_add_to_path(g, node, parent_node, nodes_to_process):
  function ong_add_to_parent_path (line 509) | def ong_add_to_parent_path(g, node, parent_node, nodes_to_process):
  function build_operation_node_graph (line 516) | def build_operation_node_graph(node, default_node):
  function print_operation_node_graph (line 609) | def print_operation_node_graph(g):
  function remove_edge_in_operation_node_graph (line 621) | def remove_edge_in_operation_node_graph(g, node_start, node_end):
  function remove_node_in_operation_node_graph (line 627) | def remove_node_in_operation_node_graph(g, node_to_remove):
  function _get_operation_node_graph_paths (line 642) | def _get_operation_node_graph_paths(g, node):
  function get_operation_node_graph_paths (line 660) | def get_operation_node_graph_paths(g, start_node):
  function _remove_duplicate_node_edges (line 669) | def _remove_duplicate_node_edges(g, node, start_list):
  function remove_duplicate_node_edges (line 682) | def remove_duplicate_node_edges(g, start_list):
  function clean_edges_in_operation_node_graph (line 688) | def clean_edges_in_operation_node_graph(g):
  function clean_nodes_in_operation_node_graph (line 762) | def clean_nodes_in_operation_node_graph(g):
  class ReducedVertice (line 778) | class ReducedVertice():
    method __init__ (line 789) | def __init__(self, type=TYPE_SINGLE, value=None, decision=None, is_not...
    method set_value (line 795) | def set_value(self, value):
    method set_type (line 798) | def set_type(self, type):
    method _replace_in_list (line 801) | def _replace_in_list(self, lst, old, new):
    method replace_in_list (line 813) | def replace_in_list(self, old, new):
    method _replace_sublist_in_list (line 817) | def _replace_sublist_in_list(self, lst, old, new):
    method replace_sublist_in_list (line 837) | def replace_sublist_in_list(self, old, new):
    method set_decision (line 841) | def set_decision(self, decision):
    method set_type_single (line 844) | def set_type_single(self):
    method set_type_start (line 847) | def set_type_start(self):
    method set_type_require_entitlement (line 850) | def set_type_require_entitlement(self):
    method set_type_require_any (line 853) | def set_type_require_any(self):
    method set_type_require_all (line 856) | def set_type_require_all(self):
    method set_integrated_vertice (line 859) | def set_integrated_vertice(self, integrated_vertice):
    method is_type_single (line 863) | def is_type_single(self):
    method is_type_start (line 866) | def is_type_start(self):
    method is_type_require_entitlement (line 869) | def is_type_require_entitlement(self):
    method is_type_require_all (line 872) | def is_type_require_all(self):
    method is_type_require_any (line 875) | def is_type_require_any(self):
    method recursive_str (line 878) | def recursive_str(self, level, recursive_is_not):
    method recursive_str_debug (line 915) | def recursive_str_debug(self, level, recursive_is_not):
    method recursive_xml_str (line 948) | def recursive_xml_str(self, level, recursive_is_not):
    method __str__ (line 991) | def __str__(self):
    method str_debug (line 994) | def str_debug(self):
    method str_simple (line 997) | def str_simple(self):
    method str_print_debug (line 1011) | def str_print_debug(self):
    method str_print (line 1025) | def str_print(self):
    method str_print_not (line 1039) | def str_print_not(self):
    method xml_str (line 1050) | def xml_str(self):
  class ReducedEdge (line 1054) | class ReducedEdge():
    method __init__ (line 1058) | def __init__(self, start=None, end=None):
    method str_debug (line 1062) | def str_debug(self):
    method str_simple (line 1065) | def str_simple(self):
    method __str__ (line 1070) | def __str__(self):
  class ReducedGraph (line 1074) | class ReducedGraph():
    method __init__ (line 1080) | def __init__(self):
    method add_vertice (line 1086) | def add_vertice(self, v):
    method add_edge (line 1089) | def add_edge(self, e):
    method add_edge_by_vertices (line 1092) | def add_edge_by_vertices(self, v_start, v_end):
    method set_final_vertices (line 1096) | def set_final_vertices(self):
    method contains_vertice (line 1107) | def contains_vertice(self, v):
    method contains_edge (line 1110) | def contains_edge(self, e):
    method contains_edge_by_vertices (line 1113) | def contains_edge_by_vertices(self, v_start, v_end):
    method get_vertice_by_value (line 1119) | def get_vertice_by_value(self, value):
    method get_edge_by_vertices (line 1125) | def get_edge_by_vertices(self, v_start, v_end):
    method remove_vertice (line 1131) | def remove_vertice(self, v):
    method remove_vertice_update_decision (line 1139) | def remove_vertice_update_decision(self, v):
    method remove_edge (line 1150) | def remove_edge(self, e):
    method remove_edge_by_vertices (line 1154) | def remove_edge_by_vertices(self, v_start, v_end):
    method replace_vertice_in_edge_start (line 1159) | def replace_vertice_in_edge_start(self, old, new):
    method replace_vertice_in_edge_end (line 1171) | def replace_vertice_in_edge_end(self, old, new):
    method replace_vertice_in_single_vertices (line 1183) | def replace_vertice_in_single_vertices(self, old, new):
    method replace_vertice_list (line 1189) | def replace_vertice_list(self, old, new):
    method get_next_vertices (line 1202) | def get_next_vertices(self, v):
    method get_prev_vertices (line 1209) | def get_prev_vertices(self, v):
    method get_start_vertices (line 1216) | def get_start_vertices(self):
    method get_end_vertices (line 1223) | def get_end_vertices(self):
    method reduce_next_vertices (line 1230) | def reduce_next_vertices(self, v):
    method reduce_prev_vertices (line 1253) | def reduce_prev_vertices(self, v):
    method reduce_vertice_single_prev (line 1270) | def reduce_vertice_single_prev(self, v):
    method reduce_vertice_single_next (line 1305) | def reduce_vertice_single_next(self, v):
    method reduce_graph (line 1338) | def reduce_graph(self):
    method reduce_graph_with_metanodes (line 1378) | def reduce_graph_with_metanodes(self):
    method str_simple_with_metanodes (line 1425) | def str_simple_with_metanodes(self):
    method str_simple (line 1433) | def str_simple(self):
    method __str__ (line 1445) | def __str__(self):
    method remove_builtin_filters (line 1458) | def remove_builtin_filters(self):
    method reduce_integrated_vertices (line 1464) | def reduce_integrated_vertices(self, integrated_vertices):
    method aggregate_require_entitlement (line 1491) | def aggregate_require_entitlement(self, v):
    method aggregate_require_entitlement_nodes (line 1531) | def aggregate_require_entitlement_nodes(self):
    method cleanup_filters (line 1540) | def cleanup_filters(self):
    method remove_builtin_filters_with_metanodes (line 1544) | def remove_builtin_filters_with_metanodes(self):
    method replace_require_entitlement_with_metanodes (line 1558) | def replace_require_entitlement_with_metanodes(self, v):
    method aggregate_require_entitlement_with_metanodes (line 1569) | def aggregate_require_entitlement_with_metanodes(self):
    method cleanup_filters_with_metanodes (line 1575) | def cleanup_filters_with_metanodes(self):
    method print_vertices_with_operation (line 1579) | def print_vertices_with_operation(self, operation, out_f):
    method print_vertices_with_operation_metanodes (line 1599) | def print_vertices_with_operation_metanodes(self, operation, default_i...
    method dump_xml (line 1655) | def dump_xml(self, operation, out_f):
  function reduce_operation_node_graph (line 1674) | def reduce_operation_node_graph(g):
  function main (line 1718) | def main():

FILE: reverse-sandbox/regex_parser_v1.py
  function parse_character (line 7) | def parse_character(node_type, node_arg, node_transition, node_idx):
  function parse_end (line 17) | def parse_end(node_type, node_arg, node_transition, node_idx):
  function parse_jump_forward (line 24) | def parse_jump_forward(node_type, node_arg, node_transition, node_idx):
  function parse_jump_backward (line 32) | def parse_jump_backward(node_type, node_arg, node_transition, node_idx):
  function parse_beginning_of_line (line 40) | def parse_beginning_of_line(node_type, node_arg, node_transition, node_i...
  function parse_end_of_line (line 47) | def parse_end_of_line(node_type, node_arg, node_transition, node_idx):
  function parse_dot (line 54) | def parse_dot(node_type, node_arg, node_transition, node_idx):
  function parse_character_class (line 61) | def parse_character_class(node_type, node_arg, node_transition, node_idx):
  function parse_character_neg_class (line 68) | def parse_character_neg_class(node_type, node_arg, node_transition, node...
  function parse_parantheses_open (line 75) | def parse_parantheses_open(node_type, node_arg, node_transition, node_idx):
  function parse_parantheses_close (line 86) | def parse_parantheses_close(node_type, node_arg, node_transition, node_i...
  function node_parse (line 112) | def node_parse(re, i, regex_list, node_idx):
  function class_parse (line 130) | def class_parse(re, i, classes, class_idx):
  class RegexParser (line 160) | class RegexParser(object):
    method parse (line 163) | def parse(re, i, regex_list):

FILE: reverse-sandbox/regex_parser_v2.py
  function parse_character (line 7) | def parse_character(node_type, node_arg, node_transition, node_idx):
  function parse_end (line 17) | def parse_end(node_type, node_arg, node_transition, node_idx):
  function parse_jump_forward (line 24) | def parse_jump_forward(node_type, node_arg, node_transition, node_idx):
  function parse_jump_backward (line 32) | def parse_jump_backward(node_type, node_arg, node_transition, node_idx):
  function parse_beginning_of_line (line 40) | def parse_beginning_of_line(node_type, node_arg, node_transition, node_i...
  function parse_end_of_line (line 47) | def parse_end_of_line(node_type, node_arg, node_transition, node_idx):
  function parse_dot (line 54) | def parse_dot(node_type, node_arg, node_transition, node_idx):
  function parse_character_class (line 61) | def parse_character_class(node_type, node_arg, node_transition, node_idx):
  function parse_character_neg_class (line 68) | def parse_character_neg_class(node_type, node_arg, node_transition, node...
  function parse_parantheses_open (line 75) | def parse_parantheses_open(node_type, node_arg, node_transition, node_idx):
  function parse_parantheses_close (line 86) | def parse_parantheses_close(node_type, node_arg, node_transition, node_i...
  function node_parse (line 113) | def node_parse(re, i, regex_list, node_idx):
  function classes_parse (line 135) | def classes_parse(re, i, cclass_count):
  class RegexParser (line 175) | class RegexParser(object):
    method parse (line 178) | def parse(re, i, regex_list):

FILE: reverse-sandbox/regex_parser_v3.py
  function parse_character (line 7) | def parse_character(re, i, regex_list):
  function parse_beginning_of_line (line 19) | def parse_beginning_of_line(i, regex_list):
  function parse_end_of_line (line 27) | def parse_end_of_line(i, regex_list):
  function parse_any_character (line 35) | def parse_any_character(i, regex_list):
  function parse_jump_forward (line 43) | def parse_jump_forward(re, i, regex_list):
  function parse_jump_backward (line 53) | def parse_jump_backward(re, i, regex_list):
  function parse_character_class (line 65) | def parse_character_class(re, i, regex_list):
  function parse_end (line 107) | def parse_end(re, i, regex_list):
  function parse (line 116) | def parse(re, i, regex_list):
  class RegexParser (line 145) | class RegexParser(object):
    method parse (line 148) | def parse(re, i, regex_list):

FILE: reverse-sandbox/reverse_sandbox.py
  function extract_string_from_offset (line 29) | def extract_string_from_offset(f, offset, ios_version):
  function create_operation_nodes (line 40) | def create_operation_nodes(infile, regex_list, num_operation_nodes,
  function process_profile (line 61) | def process_profile(infile, outfname, sb_ops, ops_to_reverse, op_table, ...
  function get_ios_major_version (line 122) | def get_ios_major_version(release):
  function is_ios_more_than_10_release (line 128) | def is_ios_more_than_10_release(release):
  function display_sandbox_profiles (line 138) | def display_sandbox_profiles(f, re_table_offset, num_sb_ops, ios_version):
  function get_global_vars (line 199) | def get_global_vars(f, vars_offset, num_vars, base_offset):
  function get_base_addr (line 217) | def get_base_addr(f, ios_version):
  function main (line 248) | def main():

FILE: reverse-sandbox/reverse_string.py
  class ReverseStringState (line 11) | class ReverseStringState:
    method __init__ (line 36) | def __init__(self, binary_string):
    method update_state_unknown (line 50) | def update_state_unknown(self):
    method update_state_token_byte_read (line 54) | def update_state_token_byte_read(self):
    method update_state_concat_byte_read (line 58) | def update_state_concat_byte_read(self):
    method update_state_concat_save_byte_read (line 62) | def update_state_concat_save_byte_read(self):
    method update_state_end_byte_read (line 66) | def update_state_end_byte_read(self):
    method update_state_split_byte_read (line 70) | def update_state_split_byte_read(self):
    method update_state_range_byte_read (line 74) | def update_state_range_byte_read(self):
    method update_state_token_read (line 78) | def update_state_token_read(self):
    method update_state_reset_string (line 82) | def update_state_reset_string(self):
    method update_state_constant_read (line 86) | def update_state_constant_read(self):
    method update_state_single_byte_read (line 90) | def update_state_single_byte_read(self):
    method update_state_plus_read (line 94) | def update_state_plus_read(self):
    method update_state (line 98) | def update_state(self, b):
    method get_next_byte (line 126) | def get_next_byte(self):
    method get_length_minus_1 (line 134) | def get_length_minus_1(self):
    method read_token (line 146) | def read_token(self, substr_len):
    method update_base (line 152) | def update_base(self):
    method update_base_stack (line 157) | def update_base_stack(self):
    method end_current_token (line 161) | def end_current_token(self):
    method get_last_byte (line 166) | def get_last_byte(self):
    method get_substring (line 169) | def get_substring(self, substr_len):
    method end_with_subtokens (line 175) | def end_with_subtokens(self, subtokens):
    method is_end (line 181) | def is_end(self):
    method reset_base (line 186) | def reset_base(self):
    method reset_base_full (line 190) | def reset_base_full(self):
  class SandboxString (line 195) | class SandboxString:
    method parse_byte_string (line 199) | def parse_byte_string(self, s, global_vars):
    method __init__ (line 343) | def __init__(self):
  function main (line 347) | def main():

FILE: reverse-sandbox/sandbox_filter.py
  function get_filter_arg_string_by_offset (line 21) | def get_filter_arg_string_by_offset(f, offset):
  function get_filter_arg_string_by_offset_with_type (line 53) | def get_filter_arg_string_by_offset_with_type(f, offset):
  function get_filter_arg_string_by_offset_no_skip (line 105) | def get_filter_arg_string_by_offset_no_skip(f, offset):
  function get_filter_arg_network_address (line 115) | def get_filter_arg_network_address(f, offset):
  function get_filter_arg_integer (line 186) | def get_filter_arg_integer(f, arg):
  function get_filter_arg_octal_integer (line 191) | def get_filter_arg_octal_integer(f, arg):
  function get_filter_arg_boolean (line 196) | def get_filter_arg_boolean(f, arg):
  function get_filter_arg_regex_by_id (line 205) | def get_filter_arg_regex_by_id(f, regex_id):
  function get_filter_arg_ctl (line 217) | def get_filter_arg_ctl(f, arg):
  function get_filter_arg_vnode_type (line 224) | def get_filter_arg_vnode_type(f, arg):
  function get_filter_arg_owner (line 242) | def get_filter_arg_owner(f, arg):
  function get_filter_arg_socket_domain (line 257) | def get_filter_arg_socket_domain(f, arg):
  function get_filter_arg_socket_type (line 306) | def get_filter_arg_socket_type(f, arg):
  function get_none (line 321) | def get_none(f, arg):
  function get_filter_arg_privilege_id (line 326) | def get_filter_arg_privilege_id(f, arg):
  function get_filter_arg_process_attribute (line 356) | def get_filter_arg_process_attribute(f, arg):
  function get_filter_arg_csr (line 370) | def get_filter_arg_csr(f, arg):
  function get_filter_arg_host_port (line 388) | def get_filter_arg_host_port(f, arg):
  function convert_filter_callback (line 434) | def convert_filter_callback(f, ios_major_version_arg, keep_builtin_filte...

FILE: reverse-sandbox/sandbox_regex.py
  class Node (line 14) | class Node():
    method __init__ (line 33) | def __init__(self, name=None, type=None, value=''):
    method set_name (line 39) | def set_name(self, name):
    method set_type_jump_forward (line 42) | def set_type_jump_forward(self):
    method set_type_jump_backward (line 45) | def set_type_jump_backward(self):
    method set_type_character (line 48) | def set_type_character(self):
    method set_type_end (line 51) | def set_type_end(self):
    method is_type_end (line 54) | def is_type_end(self):
    method is_type_jump (line 57) | def is_type_jump(self):
    method is_type_jump_backward (line 60) | def is_type_jump_backward(self):
    method is_type_jump_forward (line 63) | def is_type_jump_forward(self):
    method is_type_character (line 66) | def is_type_character(self):
    method set_value (line 69) | def set_value(self, value):
    method set_flag_white (line 72) | def set_flag_white(self):
    method set_flag_grey (line 75) | def set_flag_grey(self):
    method set_flag_black (line 78) | def set_flag_black(self):
    method __str__ (line 81) | def __str__(self):
  class Graph (line 92) | class Graph():
    method __init__ (line 108) | def __init__(self):
    method add_node (line 111) | def add_node(self, node, next_list=None):
    method has_node (line 114) | def has_node(self, node):
    method update_node (line 117) | def update_node(self, node, next_list):
    method add_new_next_to_node (line 120) | def add_new_next_to_node(self, node, next):
    method __str__ (line 123) | def __str__(self):
    method get_node_for_idx (line 157) | def get_node_for_idx(self, idx):
    method get_re_index_for_pos (line 162) | def get_re_index_for_pos(self, regex_list, pos):
    method get_next_idx_for_regex_item (line 171) | def get_next_idx_for_regex_item(self, regex_list, regex_item):
    method fill_from_regex_list (line 176) | def fill_from_regex_list(self, regex_list):
    method get_character_nodes (line 229) | def get_character_nodes(self, node):
    method find_node_type_jump (line 238) | def find_node_type_jump(self, current_node, node, backup_dict):
    method reduce (line 250) | def reduce(self):
    method get_edges (line 263) | def get_edges(self, node):
    method convert_to_canonical (line 273) | def convert_to_canonical(self):
    method need_use_plus (line 292) | def need_use_plus(self, initial_string, string_to_add):
    method unify_two_strings (line 307) | def unify_two_strings(self, s1, s2):
    method unify_strings (line 347) | def unify_strings(self, string_list):
    method remove_state (line 359) | def remove_state(self, state_to_remove):
    method simplify (line 413) | def simplify(self):
    method combine_start_end_nodes (line 419) | def combine_start_end_nodes(self):
  function create_regex_list (line 449) | def create_regex_list(re):
  function parse_regex (line 477) | def parse_regex(re):
  function main (line 500) | def main():
Condensed preview — 31 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (266K chars).
[
  {
    "path": ".github/workflows/config/config.json",
    "chars": 166,
    "preview": "{\n\t\"default\": true,\n\t\"MD048\": { \"style\": \"backtick\" },\n\t\"MD046\": { \"style\": \"fenced\" },\n\t\"MD029\": { \"style\": \"one\" },\n\t\""
  },
  {
    "path": ".github/workflows/linter.yml",
    "chars": 766,
    "preview": "name: Linter\n\non: [push, pull_request]\n\njobs:\n  superlinter:\n    name: Super Linter\n    runs-on: ubuntu-latest\n\n    step"
  },
  {
    "path": ".github/workflows/rules/common/inlineTokenChildren.js",
    "chars": 758,
    "preview": " class InlineTokenChildren {\n    constructor(token) {\n        if (token.type === \"inline\") {\n            this.root = tok"
  },
  {
    "path": ".github/workflows/rules/common/wordPattern.js",
    "chars": 1415,
    "preview": "class WordPattern {\n    constructor(pattern, parameters) {\n        const escapedDots = pattern.replace(/\\\\?\\./g, \"\\\\.\");"
  },
  {
    "path": ".github/workflows/rules/md101.js",
    "chars": 3865,
    "preview": "const { InlineTokenChildren } = require(\"./common/inlineTokenChildren\");\nconst { WordPattern } = require(\"./common/wordP"
  },
  {
    "path": ".github/workflows/rules/md102.js",
    "chars": 1955,
    "preview": "const http_keywords = [\n    \"GET\",\n    \"POST\",\n    \"PUT\",\n    \"PATCH\",\n    \"DELETE\",\n    \"Content-Type\",\n    \"Content-En"
  },
  {
    "path": ".github/workflows/rules/md103.js",
    "chars": 633,
    "preview": "\"use strict\";\n\nmodule.exports = {\n  \"names\": [ \"MD103\", \"inline triple backticks\" ],\n  \"description\": \"inline triple bac"
  },
  {
    "path": ".github/workflows/rules/md104.js",
    "chars": 1093,
    "preview": "\"use strict\";\n\nmodule.exports = {\n  names: [\"MD104\", \"one line per sentence\"],\n  description: \"one line (and only one li"
  },
  {
    "path": ".github/workflows/rules/rules.js",
    "chars": 154,
    "preview": "\"use strict\";\n\nconst rules = [\n\trequire(\"./md101.js\"),\n\trequire(\"./md102.js\"),\n\trequire(\"./md103.js\"),\n\trequire(\"./md104"
  },
  {
    "path": ".gitignore",
    "chars": 136,
    "preview": "*~\n*.o\n*.zip\n*.rar\n*.tar\n*gz\n*bz2\n*.obj\n*.a\n*.so\n*.lib\n*.dll\n*.swp\n*.swo\ntags\nTAGS\n*.exe\n*.class\n*.jar\n*.pyc\n*.log\n*.bin"
  },
  {
    "path": ".gitmodules",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "LICENSE",
    "chars": 1580,
    "preview": "BSD 3-Clause License\n\nCopyright (c) 2016, North Carolina State University and University POLITEHNICA\nof Bucharest.\nAll r"
  },
  {
    "path": "README.md",
    "chars": 7600,
    "preview": "# SandBlaster: Reversing the Apple Sandbox\n\nSandBlaster is a tool for reversing (decompiling) binary Apple sandbox profi"
  },
  {
    "path": "helpers/extract_sandbox_data.py",
    "chars": 21400,
    "preview": "#!/usr/bin/env python3\n\nimport sys\nimport argparse\nimport struct\nimport lief\n\nCSTRING_SECTION = '__cstring'\nCONST_SECTIO"
  },
  {
    "path": "reverse-sandbox/filters/filters_ios11.json",
    "chars": 7261,
    "preview": "{\n    \"0x01\":{\n        \"name\":\"\",\n        \"arg_process_fn\":\"get_filter_arg_string_by_offset_with_type\"\n    },\n    \"0x02\""
  },
  {
    "path": "reverse-sandbox/filters/filters_ios12.json",
    "chars": 9959,
    "preview": "{\n    \"0x01\":{\n        \"name\":\"\",\n        \"arg_process_fn\":\"get_filter_arg_string_by_offset_with_type\"\n    },\n    \"0x02\""
  },
  {
    "path": "reverse-sandbox/filters/filters_ios13.json",
    "chars": 11243,
    "preview": "{\r\n    \"0x01\":{\r\n        \"name\":\"\",\r\n        \"arg_process_fn\":\"get_filter_arg_string_by_offset_with_type\"\r\n    },\r\n    \""
  },
  {
    "path": "reverse-sandbox/filters/filters_ios14.json",
    "chars": 12463,
    "preview": "{\r\n    \"0x01\":{\r\n        \"name\":\"\",\r\n        \"arg_process_fn\":\"get_filter_arg_string_by_offset_with_type\"\r\n    },\r\n    \""
  },
  {
    "path": "reverse-sandbox/filters/filters_ios4.json",
    "chars": 1332,
    "preview": "{\n    \"0x01\":{\n        \"name\":\"regex\",\n        \"arg_process_fn\":\"get_filter_arg_regex_by_id\"\n    },\n    \"0x02\":{\n       "
  },
  {
    "path": "reverse-sandbox/filters/filters_ios5.json",
    "chars": 3669,
    "preview": "{\n    \"0x01\":{\n        \"name\":\"\",\n        \"arg_process_fn\":\"get_filter_arg_string_by_offset_with_type\"\n    },\n    \"0x02\""
  },
  {
    "path": "reverse-sandbox/filters/filters_ios6.json",
    "chars": 4714,
    "preview": "{\n    \"0x01\":{\n        \"name\":\"\",\n        \"arg_process_fn\":\"get_filter_arg_string_by_offset_with_type\"\n    },\n    \"0x02\""
  },
  {
    "path": "reverse-sandbox/filters.py",
    "chars": 1500,
    "preview": "import json\n\ndef read_filters(file_path):\n    temp = {}\n    filters = {}\n    with open(file_path) as data:\n        temp "
  },
  {
    "path": "reverse-sandbox/logger.config",
    "chars": 498,
    "preview": "[loggers]\nkeys=root\n\n[logger_root]\nlevel=NOTSET\nhandlers=file,screen\n\n[formatters]\nkeys=simple,complex\n\n[formatter_simpl"
  },
  {
    "path": "reverse-sandbox/operation_node.py",
    "chars": 69838,
    "preview": "#!/usr/bin/python3\n\nimport sys\nimport struct\nimport re\nimport logging\nimport logging.config\n\nlogging.config.fileConfig(\""
  },
  {
    "path": "reverse-sandbox/regex_parser_v1.py",
    "chars": 5988,
    "preview": "import logging\nimport struct\n\nlogging.config.fileConfig(\"logger.config\")\nlogger = logging.getLogger(__name__)\n\ndef parse"
  },
  {
    "path": "reverse-sandbox/regex_parser_v2.py",
    "chars": 6504,
    "preview": "import logging\nimport struct\n\nlogging.config.fileConfig(\"logger.config\")\nlogger = logging.getLogger(__name__)\n\ndef parse"
  },
  {
    "path": "reverse-sandbox/regex_parser_v3.py",
    "chars": 4100,
    "preview": "import logging\nimport struct\n\nlogging.config.fileConfig(\"logger.config\")\nlogger = logging.getLogger(__name__)\n\ndef parse"
  },
  {
    "path": "reverse-sandbox/reverse_sandbox.py",
    "chars": 20825,
    "preview": "#!/usr/bin/env python3\n\n\"\"\"\niOS/OS X sandbox decompiler\n\nHeavily inspired from Dion Blazakis' previous work\n    https://"
  },
  {
    "path": "reverse-sandbox/reverse_string.py",
    "chars": 13360,
    "preview": "import sys\nimport struct\nimport logging\nimport time\n\n\nlogging.config.fileConfig(\"logger.config\")\nlogger = logging.getLog"
  },
  {
    "path": "reverse-sandbox/sandbox_filter.py",
    "chars": 15927,
    "preview": "#!/usr/bin/env python\n\nimport struct\nimport re\nimport logging\nimport logging.config\nimport reverse_sandbox\nimport revers"
  },
  {
    "path": "reverse-sandbox/sandbox_regex.py",
    "chars": 17969,
    "preview": "#!/usr/bin/env python3\n\nimport struct\nimport logging\nimport logging.config\n\nlogging.config.fileConfig(\"logger.config\")\nl"
  }
]

About this extraction

This page contains the full source code of the malus-security/sandblaster GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 31 files (242.8 KB), approximately 60.3k tokens, and a symbol index with 313 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!