[
  {
    "path": ".travis.yml",
    "content": "os: windows\nlanguage: sh\npython: \"3.8\"\nbefore_install:\n  - choco install python3\n  - export PATH=\"/c/Python38:/c/Python38/Scripts:$PATH\"\n  - python -m pip install https://github.com/pyinstaller/pyinstaller/archive/develop.zip\nscript:\n  - /c/Python38/Scripts/pyinstaller.exe -F --clean -c \"src/cdqr.py\" -n cdqr.exe -i \"Icons/Martin-Berube-Character-Knight.ico\"\ndeploy:\n  provider: releases\n  api_key:\n    secure: BcuY8Or8PCre26nKczH9426UPoRkUddK4eZelit309w8SXfrHE3aRZILZ9cbU+8bmi5noGoeLWLiscl70COX018cnf8yzkPnAj9tigVALKojlF2Cv8Vt0p9b2l5gHZb73HMWbARkkaGlycrqofSv9LFqUyf6BSQu0UtWUaV1Y7ofZAhU3whVw+TB/0y6RrnIIq2C1qLrxMqPIrzcUcR6Bf+2RtVwqqEmjxFLawZA/IuY7HzTJd/YcWv1G6VjgELXAab0YI2BV0ZxpAmuofnV7IxBst355djlSxFy+/cHBn1YbWiIA9STWBgNUvDtp9wX12CcAUQiqeNsljb/nAUMx9gp9jkBKRtbXSW0i8wZSAH7xBVeTLYfE7oDRMAHa+jU8OTtmpgXX1zHHIcTj5stAZi+wk3GRvwNKkC+els/R+Xl8R3GLHkRS28qhYNHPIm7bEZfawM5pSOhNyxRc62mIs0zmLhjQu7eLDheJKBioM35xgCl00aV/531xjvzazRMJ/75E49LtODeE61H/I2jE2dZ8wchHHxTTvV77/kwWilYdXYfoUAFhXc1WTPyvmkMoYL/Dq0hGk05ID2/A9YYafyZRhyzfInV1NLTSezkBnjFnXIaK3REzKRVJK+WkaHxLLFJPSFVVzgZ7UlPp+CBVKbgfksp0syjyOgUpQfpJlU=\n  file: dist/cdqr.exe\n  name: Draft Release\n  draft: true\n  skip_cleanup: true\n  on:\n    tags: true\n"
  },
  {
    "path": "Docker/Dockerfile",
    "content": "# Use the official Docker Hub Ubuntu 18.04 base image\nFROM ubuntu:18.04\nMAINTAINER @aorlikoski\n\nENV DEBIAN_FRONTEND noninteractive\n\n# Setup install environment, Plaso, and Timesketch dependencies\nRUN apt-get -qq -y update && \\\n    apt-get -qq -y --no-install-recommends install \\\n      software-properties-common \\\n      apt-transport-https && \\\n    add-apt-repository -u -y ppa:gift/stable && \\\n    apt-get -qq -y update && \\\n    apt-get -qq -y --assume-yes --no-install-recommends install \\\n      python-setuptools \\\n      build-essential \\\n      curl \\\n      git \\\n      gpg-agent \\\n      libffi-dev \\\n      lsb-release \\\n      locales \\\n      python3-dev \\\n      python3-setuptools \\\n      python3 \\\n      python3-pip \\\n      python3-psycopg2 \\\n      python3-wheel && \\\n    curl -sS https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - && \\\n    VERSION=node_8.x && \\\n    DISTRO=\"$(lsb_release -s -c)\" && \\\n    echo \"deb https://deb.nodesource.com/$VERSION $DISTRO main\" > /etc/apt/sources.list.d/nodesource.list && \\\n    curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - && \\\n    echo \"deb https://dl.yarnpkg.com/debian/ stable main\" > /etc/apt/sources.list.d/yarn.list && \\\n    apt-get -qq -y update && \\\n    apt-get -qq -y --no-install-recommends install \\\n      nodejs \\\n      yarn && \\\n    apt-get -y dist-upgrade && \\\n    apt-get -qq -y clean && \\\n    apt-get -qq -y autoclean && \\\n    apt-get -qq -y autoremove && \\\n    rm -rf /var/cache/apt/ /var/lib/apt/lists/\n\n# Download and install Plaso from GitHub Release\nRUN curl -sL -o /tmp/plaso-20190916.tar.gz https://github.com/log2timeline/plaso/archive/20190916.tar.gz && \\\n    cd /tmp/ && \\\n    tar zxf plaso-20190916.tar.gz && \\\n    cd plaso-20190916 && \\\n    pip3 install -r requirements.txt && \\\n    pip3 install mock && \\\n    python3 setup.py build && \\\n    python3 setup.py install && \\\n    rm -rf /tmp/*\n\n# Build and Install Timesketch from GitHub Master with Pip\nRUN git clone https://github.com/google/timesketch.git /tmp/timesketch && \\\n    cd /tmp/timesketch && \\\n    git checkout aded1b19acca44b99854083088ef920390f75457 && \\\n    cd /tmp/timesketch && ls && yarn install && \\\n    yarn run build  && \\\n    sed -i -e '/pyyaml/d' /tmp/timesketch/requirements.txt && \\\n    pip3 install /tmp/timesketch/ && \\\n    rm -rf /tmp/*\n\n# Set terminal to UTF-8 by default\nRUN locale-gen en_US.UTF-8 && \\\n    update-locale LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8\n\nENV LANG en_US.UTF-8\nENV LC_ALL en_US.UTF-8\n\n# Download and install CDQR\nRUN curl -s -o /usr/local/bin/cdqr.py \\\n    https://raw.githubusercontent.com/orlikoski/CDQR/master/src/cdqr.py && \\\n    chmod 755 /usr/local/bin/cdqr.py\n\n# Load the entrypoint script to be run later\nENTRYPOINT [\"/usr/local/bin/cdqr.py\"]\n"
  },
  {
    "path": "Docker/README.md",
    "content": "# CDQR Docker\r\nThe CDQR docker is a docker image with CDQR and all of the dependencies installed.\r\n\r\nThe docker itself is stored on DockerHub at https://hub.docker.com/r/aorlikoski/cdqr. The docker can be used by `docker run aorlikoski/cdqr`.\r\n\r\n# Skadi Compatibility\r\nDue to the complexity of using docker a helper bash script `cdqr` was created. It works with the Docker, OVA, Vagrant, Signed Installer versions of Skadi. It can be easily modified to work in any environment.\r\n\r\n# Command Line Changes\r\nIt is not required to use the `cdqr` bash script to make `aorlikoski/cdqr` work but it makes the transition much easier. That said, there is one critical difference in the commands used with the bash script `cdqr` vs the original python `cdqr.py`. The path to the data being processed (input) and the path to the output folder (output) are parsed differently in the bash script.\r\n\r\n_TL;DR_ use `in:` and `out:` to specify the input and output paths. The `-y` flag to accept default answers to all CDQR questions is added automatically by the script at run time. _This is important since the process will fail if any user input is required._  \r\n\r\n\r\n# Windows, MacOS and Linux Support\r\n## Bash\r\n`cdqr` is a translation script that does the heavy lifting of volume mapping and networking for docker.  \r\n`cdqr.d` is a daemon version that doesn't output to the screen, thereby enabling processing in the background  \r\nExample: `bash cdqr in:artifacts.zip`\r\n\r\n## PowerShell\r\n`cdqr.ps1` is a translation script that does the heavy lifting of volume mapping and networking for docker.  \r\n`cdqr.d.ps1` is a daemon version that doesn't output to the screen, thereby enabling processing in the background  \r\nExample: `powershell -ExecutionPolicy Bypass cdqr.ps1 in:artifacts.zip`\r\n\r\n### How it Works\r\nHelper Script Command  \r\n`cdqr in:winevt.zip out:Results -z --max_cpu`  \r\n\r\nSame Command Manually  \r\n```docker run   -v /etc/hosts:/etc/hosts:ro   --network host -v /home/skadi/winevt.zip:/home/skadi/winevt.zip -v /home/skadi/Results:/home/skadi/Results aorlikoski/cdqr:4.4.0 -y /home/skadi/winevt.zip /home/skadi/Results -z --max_cpu```  \r\n\r\n## Process ZIP file (default windows parser list)\r\nThis uses the default win parser list and saves output to Results folder on host  \r\n*cdqr in:winevt.zip out:Results -z --max_cpu*  \r\n![](/objects/images/zip_demo.gif?)\r\n\r\n## Use the same .plaso file but output into Kibana\r\nThis uses existing .plaso file and doesn't save the output on the host (it is ephemeral and deleted when the CDQR docker run completes)  \r\n*cdqr in:Results/winevt.plaso --plaso_db --es_kb winevt*  \r\n![](/objects/images/plaso_kibana.gif?)\r\n\r\n## Use the same .plaso file but output into TimeSketch\r\nThis uses existing .plaso file and doesn't save the output on the host. This uses `/etc/timesketh.conf` on the host to pass the values it needs to insert into TimeSketch.  \r\n*cdqr in:Results/winevt.plaso --plaso_db --es_ts winevt*  \r\n![](/objects/images/plaso_ts.gif?)\r\n"
  },
  {
    "path": "Docker/cdqr",
    "content": "#!/bin/bash\ncdqr_version=\"5.1.0.1\"\ncur_dir=\"$(pwd)\"\ndocker_network=${DOCKER_NETWORK}\ntimesketch_conf=${TIMESKETCH_CONF:-\"/opt/Skadi/Docker/timesketch/timesketch_default.conf\"}\ntimesketch_conf_legacy=\"/etc/timesketch.conf\"\ntimesketch_server_ipaddress=${TIMESKETCH_SERVER_IPADDRESS:-\"\"}\ndocker_args=\"docker run \"\nargs=()\n\nfix_path () {\n  file_path=$1\n  file_path=\"$(echo $file_path | sed 's/ /\\\\ /g')\"\n  eval file_path=$file_path\n  if [ \"${file_path:0:1}\" == \"/\" ]; then\n    #this is a root level path, do not modify\n    final_path=$file_path\n  elif [ \"${file_path:0:2}\" == \"./\" ]; then\n    #this is a current dir path, modify to add absolute path\n    final_path=(\"$cur_dir/${file_path:2:${#file_path}}\")\n  elif [ \"${file_path:0:1}\" == \"~\" ]; then\n    #this is a home dir path, modify to add absolute path\n    final_path=(\"$(echo $HOME)/${file_path:2:${#file_path}}\")\n  else\n    final_path=(\"$cur_dir/$file_path\")\n  fi\n  echo \"$final_path\"\n}\n\n# Set the docker network (if any) to use\nif [ $docker_network ]; then\n  echo \"Validating the Docker network exists: $docker_network\"\n  if [ $(docker network ls |grep $docker_network |awk '{print $2}' ) ]; then\n    echo \"Connecting CDQR to the Docker network: $docker_network\"\n    docker_args=\"$docker_args --network $docker_network \"\n  else\n    echo \"Docker network $docker_network does not exist, quitting\"\n    exit\n  fi\nelse\n  echo \"Assigning CDQR to the host network\"\n  echo \"The Docker network can be changed by modifying the \\\"DOCKER_NETWORK\\\" environment variable\"\n  echo \"Example (default Skadi mode): export DOCKER_NETWORK=host\"\n  echo \"Example (use other Docker network): export DOCKER_NETWORK=skadi-backend\"\n  docker_args=\"$docker_args --network host \"\nfi\n\nfor i in \"$@\"; do\n  # If it's timesketch add the timesketch mapping\n  if [ \"$i\" == \"--es_ts\" ]; then\n    if [ ! -f \"$timesketch_conf\" ]; then\n      timesketch_conf=\"\"\n      if [ ! -f \"$timesketch_conf_legacy\" ]; then\n        while [ \"$timesketch_conf\" == \"\" ]; do\n          echo \"TimeSketch default configuration file must be set. This can be done with an Environment variable.\"\n          echo \"The default configuration is the absolute path to Skadi/Docker/timesketch/timesketch_default.conf.\"\n          echo \"Example with Skadi git repo in \\\"/opt/Skadi\\\"): export TIMESKETCH_CONF=\\\"/opt/Skadi/Docker/timesketch/timesketch_default.conf\\\"\"\n          echo \"\"\n          read -e -p \"Enter the location of the timesketch.conf file to use in this operation: \" timesketch_conf\n          timesketch_conf=\"$(fix_path $timesketch_conf)\"\n          if [ ! -f \"$timesketch_conf\" ]; then\n            echo \"Invalid file path, re-enter.\"\n            timesketch_conf=\"\"\n          fi\n        done\n      else\n        timesketch_conf=$timesketch_conf_legacy\n      fi\n    fi\n    if [ \"$timesketch_server_ipaddress\" == \"\" ]; then\n        timesketch_server_ipaddress='127.0.0.1'\n    fi\n    docker_args=\"$docker_args --add-host=elasticsearch:$timesketch_server_ipaddress --add-host=postgres:$timesketch_server_ipaddress -v ${timesketch_conf}:/etc/timesketch.conf\"\n  fi\n\n  # If it's an input file/dir (denoted by \"in:\" then resolve absolute path)\n  if [ \"${i:0:3}\" == \"in:\" ]; then\n    input_map=\"${i:3:${#i}}\"\n    final_input_path=\"$(fix_path '$input_map')\"\n    args+=($final_input_path)\n    docker_args=\"$docker_args -v $final_input_path:$final_input_path\"\n  # If it's an output file/dir (denoted by \"out:\" then resolve absolute path)\n  elif [ \"${i:0:4}\" == \"out:\" ]; then\n    output_map=\"${i:4:${#i}}\"\n    final_output_path=\"$(fix_path '$output_map')\"\n    args+=($final_output_path)\n    docker_args=\"$docker_args -v $final_output_path:$final_output_path\"\n  # Everything is is copied over as is\n  else\n    args+=(\"$i\")\n  fi\ndone\n\nfinal_command=\"$docker_args aorlikoski/cdqr:$cdqr_version -y ${args[@]}\"\necho \"$final_command\"\n$final_command\n"
  },
  {
    "path": "Docker/cdqr.d",
    "content": "#!/bin/bash\ncdqr_version=\"5.1.0.1\"\ncur_dir=\"$(pwd)\"\ndocker_network=${DOCKER_NETWORK}\ntimesketch_conf=${TIMESKETCH_CONF:-\"/opt/Skadi/Docker/timesketch/timesketch_default.conf\"}\ntimesketch_conf_legacy=\"/etc/timesketch.conf\"\ntimesketch_server_ipaddress=${TIMESKETCH_SERVER_IPADDRESS:-\"\"}\ndocker_args=\"docker run -d \"\nargs=()\n\nfix_path () {\n  file_path=$1\n  file_path=\"$(echo $file_path | sed 's/ /\\\\ /g')\"\n  eval file_path=$file_path\n  if [ \"${file_path:0:1}\" == \"/\" ]; then\n    #this is a root level path, do not modify\n    final_path=$file_path\n  elif [ \"${file_path:0:2}\" == \"./\" ]; then\n    #this is a current dir path, modify to add absolute path\n    final_path=(\"$cur_dir/${file_path:2:${#file_path}}\")\n  elif [ \"${file_path:0:1}\" == \"~\" ]; then\n    #this is a home dir path, modify to add absolute path\n    final_path=(\"$(echo $HOME)/${file_path:2:${#file_path}}\")\n  else\n    final_path=(\"$cur_dir/$file_path\")\n  fi\n  echo \"$final_path\"\n}\n\n# Set the docker network (if any) to use\nif [ $docker_network ]; then\n  echo \"Validating the Docker network exists: $docker_network\"\n  if [ $(docker network ls |grep $docker_network |awk '{print $2}' ) ]; then\n    echo \"Connecting CDQR to the Docker network: $docker_network\"\n    docker_args=\"$docker_args --network $docker_network \"\n  else\n    echo \"Docker network $docker_network does not exist, quitting\"\n    exit\n  fi\nelse\n  echo \"Assigning CDQR to the host network\"\n  echo \"The Docker network can be changed by modifying the \\\"DOCKER_NETWORK\\\" environment variable\"\n  echo \"Example (default Skadi mode): export DOCKER_NETWORK=host\"\n  echo \"Example (use other Docker network): export DOCKER_NETWORK=skadi-backend\"\n  docker_args=\"$docker_args --network host \"\nfi\n\nfor i in \"$@\"; do\n  # If it's timesketch add the timesketch mapping\n  if [ \"$i\" == \"--es_ts\" ]; then\n    if [ ! -f \"$timesketch_conf\" ]; then\n      if [ -f \"$timesketch_conf_legacy\" ]; then\n        timesketch_conf=$timesketch_conf_legacy\n      else\n        echo \"TimeSketch default configuration file must be set with Environment variable in daemon mode.\"\n        echo \"The default configuration is the absolute path to Skadi/Docker/timesketch/timesketch_default.conf.\"\n        echo \"Example with Skadi git repo in \\\"/opt/Skadi\\\"): export TIMESKETCH_CONF=\\\"/opt/Skadi/Docker/timesketch/timesketch_default.conf\\\"\"\n        echo \"Exiting\"\n        exit\n      fi\n    fi\n    if [ \"$timesketch_server_ipaddress\" == \"\" ]; then\n        timesketch_server_ipaddress='127.0.0.1'\n    fi\n    docker_args=\"$docker_args --add-host=elasticsearch:$timesketch_server_ipaddress --add-host=postgres:$timesketch_server_ipaddress -v ${timesketch_conf}:/etc/timesketch.conf\"\n  fi\n\n  # If it's an input file/dir (denoted by \"in:\" then resolve absolute path)\n  if [ \"${i:0:3}\" == \"in:\" ]; then\n    input_map=\"${i:3:${#i}}\"\n    final_input_path=\"$(fix_path '$input_map')\"\n    args+=($final_input_path)\n    docker_args=\"$docker_args -v $final_input_path:$final_input_path\"\n  # If it's an output file/dir (denoted by \"out:\" then resolve absolute path)\n  elif [ \"${i:0:4}\" == \"out:\" ]; then\n    output_map=\"${i:4:${#i}}\"\n    final_output_path=\"$(fix_path '$output_map')\"\n    args+=($final_output_path)\n    docker_args=\"$docker_args -v $final_output_path:$final_output_path\"\n  # Everything is is copied over as is\n  else\n    args+=(\"$i\")\n  fi\ndone\n\nfinal_command=\"$docker_args aorlikoski/cdqr:$cdqr_version -y ${args[@]}\"\necho \"$final_command\"\n$final_command\n"
  },
  {
    "path": "Docker/cdqr.d.ps1",
    "content": "#! /usr/bin/pwsh\n$ErrorActionPreference = \"Stop\"\n\n$cdqr_version=\"5.1.0.1\"\n$cur_dir=Get-Location\n$docker_network=$env:DOCKER_NETWORK\n$timesketch_conf=$env:TIMESKETCH_CONF\n$timesketch_server_ipaddress=$env:TIMESKETCH_SERVER_IPADDRESS\n$docker_args=\"docker run -d\"\n$custom_args=@()\n\n# Set the docker network (if any) to use\nif ( $docker_network ) {\n  echo \"Validating the Docker network exists: $docker_network\"\n  $test = docker network ls | findstr $docker_network | %{ $_.Split(\" \")[8]; }\n  if ( $test ) {\n    echo \"Connecting CDQR to the Docker network: $docker_network\"\n    $docker_args=\"$docker_args --network $docker_network \"\n  }\n  else {\n    echo \"Docker network $docker_network does not exist, quitting\"\n    echo \"Exiting\"\n    exit\n  }\n}\nelse {\n  echo \"Assigning CDQR to the host network\"\n  echo \"The Docker network can be changed by modifying the `\"DOCKER_NETWORK`\" environment variable\"\n  echo \"Example (default Skadi mode): `$env:DOCKER_NETWORK = `\"host`\"\"\n  echo \"Example (use other Docker network): `$env:DOCKER_NETWORK = `\"skadi-backend`\"\"\n  $docker_args=\"$docker_args --network host \"\n}\n\n# Parse the arguments\nforeach ($i in $args) {\n    # If it's timesketch add the timesketch config file mapping\n    if ( $i -eq \"--es_ts\" ) {\n      if ($timesketch_conf -ne $null){\n        if (-not(test-path $timesketch_conf)){\n          Write-host \"Invalid file path, exiting.\"\n          exit\n        }\n        elseif ((get-item $timesketch_conf).psiscontainer){\n          Write-host \"Source must be a file, exiting.\"\n          exit\n        }\n      }\n      else {\n        echo \"TimeSketch default configuration file must be set with Environment variable in daemon mode.\"\n        echo \"The default configuration is the absolute path to Skadi\\Docker\\timesketch\\timesketch_default.conf.\"\n        echo \"Example with Skadi git repo in `\"C:\\GitHub\\Skadi`\"): `$env:TIMESKETCH_CONF = `\"C:\\GitHub\\Skadi\\Docker\\timesketch\\timesketch_default.conf`\"\"\n        echo \"Exiting\"\n        exit\n      }\n      if ( $timesketch_server_ipaddress -eq $null) {\n          $timesketch_server_ipaddress = '127.0.0.1'\n      }\n      $docker_args=\"$docker_args --add-host=elasticsearch:$timesketch_server_ipaddress --add-host=postgres:$timesketch_server_ipaddress -v '${timesketch_conf}:/etc/timesketch.conf'\"\n    }\n    # If it's an input file/dir (denoted by \"in:\" then resolve absolute path)\n    if ( $i.SubString(0,3) -eq \"in:\" ) {\n      $input_path=$i.SubString(3,$i.length - 3)\n      $input_path_full=Resolve-Path -Path $input_path\n      $docker_input_path=\"$input_path_full\".SubString(2,\"$input_path_full\".length - 2).Replace(\"\\\",\"/\")\n      $docker_args+=\" -v '${input_path_full}:/data$docker_input_path'\"\n      $custom_args+=\"'/data$docker_input_path'\"\n    }\n    # If it's an output file/dir (denoted by \"out:\" then resolve absolute path)\n    elseif ( $i.SubString(0,4) -eq \"out:\" ) {\n      $output_path=$i.SubString(4,$i.length - 4)\n      If(!(test-path $output_path))\n      {\n            New-Item -ItemType Directory -Force -Path $output_path | Out-Null\n      }\n      $output_path_full=Resolve-Path -Path $output_path\n      $docker_output_path=\"$output_path_full\".SubString(2,\"$output_path_full\".length - 2).Replace(\"\\\",\"/\")\n      $docker_args+=\" -v '${output_path_full}:/output$docker_output_path'\"\n      $custom_args+=\"'/output$docker_output_path'\"\n    }\n    else {\n      $custom_args+=$i\n    }\n}\n$final_command=\"$docker_args aorlikoski/cdqr:$cdqr_version -y $custom_args\"\n$final_command\niex $final_command\n"
  },
  {
    "path": "Docker/cdqr.ps1",
    "content": "#! /usr/bin/pwsh\n$ErrorActionPreference = \"Stop\"\n\n$cdqr_version=\"5.1.0.1\"\n$cur_dir=Get-Location\n$docker_network=$env:DOCKER_NETWORK\n$timesketch_conf=$env:TIMESKETCH_CONF\n$timesketch_server_ipaddress=$env:TIMESKETCH_SERVER_IPADDRESS\n$docker_args=\"docker run\"\n$custom_args=@()\n\n# Set the docker network (if any) to use\nif ( $docker_network ) {\n  echo \"Validating the Docker network exists: $docker_network\"\n  $test = docker network ls | findstr $docker_network | %{ $_.Split(\" \")[8]; }\n  if ( $test ) {\n    echo \"Connecting CDQR to the Docker network: $docker_network\"\n    $docker_args=\"$docker_args --network $docker_network \"\n  }\n  else {\n    echo \"Docker network $docker_network does not exist, quitting\"\n    echo \"Exiting\"\n    exit\n  }\n}\nelse {\n  echo \"Assigning CDQR to the host network\"\n  echo \"The Docker network can be changed by modifying the `\"DOCKER_NETWORK`\" environment variable\"\n  echo \"Example (default Skadi mode): `$env:DOCKER_NETWORK = `\"host`\"\"\n  echo \"Example (use other Docker network): `$env:DOCKER_NETWORK = `\"skadi-backend`\"\"\n  $docker_args=\"$docker_args --network host \"\n}\n\n# Parse the arguments\nforeach ($i in $args) {\n    # If it's timesketch add the timesketch config file mapping\n    if ( $i -eq \"--es_ts\" ) {\n      while ($timesketch_conf -eq $null){\n        echo \"TimeSketch default configuration file must be set. This can be done with an Environment variable.\"\n        echo \"The default configuration is the absolute path to Skadi\\Docker\\timesketch\\timesketch_default.conf.\"\n        echo \"Example with Skadi git repo in `\"C:\\GitHub\\Skadi`\"): `$env:TIMESKETCH_CONF = `\"C:\\GitHub\\Skadi\\Docker\\timesketch\\timesketch_default.conf`\"\"\n        echo \"\"\n        $timesketch_conf = read-host \"Enter the location of the TimeSketch configuration file to use in this operation \"\n        if (-not(test-path $timesketch_conf)){\n          Write-host \"Invalid file path, re-enter.\"\n          $timesketch_conf = $null\n        }\n        elseif ((get-item $timesketch_conf).psiscontainer){\n          Write-host \"Source must be a file, re-enter.\"\n          $timesketch_conf = $null\n        }\n      }\n      if ( $timesketch_server_ipaddress -eq $null) {\n          $timesketch_server_ipaddress = '127.0.0.1'\n      }\n      $docker_args=\"$docker_args --add-host=elasticsearch:$timesketch_server_ipaddress --add-host=postgres:$timesketch_server_ipaddress -v '${timesketch_conf}:/etc/timesketch.conf'\"\n    }\n    # If it's an input file/dir (denoted by \"in:\" then resolve absolute path)\n    if ( $i.SubString(0,3) -eq \"in:\" ) {\n      $input_path=$i.SubString(3,$i.length - 3)\n      $input_path_full=Resolve-Path -Path $input_path\n      $docker_input_path=\"$input_path_full\".SubString(2,\"$input_path_full\".length - 2).Replace(\"\\\",\"/\")\n      $docker_args+=\" -v '${input_path_full}:/data$docker_input_path'\"\n      $custom_args+=\"'/data$docker_input_path'\"\n    }\n    # If it's an output file/dir (denoted by \"out:\" then resolve absolute path)\n    elseif ( $i.SubString(0,4) -eq \"out:\" ) {\n      $output_path=$i.SubString(4,$i.length - 4)\n      If(!(test-path $output_path))\n      {\n            New-Item -ItemType Directory -Force -Path $output_path | Out-Null\n      }\n      $output_path_full=Resolve-Path -Path $output_path\n      $docker_output_path=\"$output_path_full\".SubString(2,\"$output_path_full\".length - 2).Replace(\"\\\",\"/\")\n      $docker_args+=\" -v '${output_path_full}:/output$docker_output_path'\"\n      $custom_args+=\"'/output$docker_output_path'\"\n    }\n    else {\n      $custom_args+=$i\n    }\n}\n$final_command=\"$docker_args aorlikoski/cdqr:$cdqr_version -y $custom_args\"\n$final_command\niex $final_command\n"
  },
  {
    "path": "LICENSE",
    "content": "           GNU GENERAL PUBLIC LICENSE\n                       Version 3, 29 June 2007\n\n Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n                            Preamble\n\n  The GNU General Public License is a free, copyleft license for\nsoftware and other kinds of works.\n\n  The licenses for most software and other practical works are designed\nto take away your freedom to share and change the works.  By contrast,\nthe GNU General Public License is intended to guarantee your freedom to\nshare and change all versions of a program--to make sure it remains free\nsoftware for all its users.  We, the Free Software Foundation, use the\nGNU General Public License for most of our software; it applies also to\nany other work released this way by its authors.  You can apply it to\nyour programs, too.\n\n  When we speak of free software, we are referring to freedom, not\nprice.  Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthem if you wish), that you receive source code or can get it if you\nwant it, that you can change the software or use pieces of it in new\nfree programs, and that you know you can do these things.\n\n  To protect your rights, we need to prevent others from denying you\nthese rights or asking you to surrender the rights.  Therefore, you have\ncertain responsibilities if you distribute copies of the software, or if\nyou modify it: responsibilities to respect the freedom of others.\n\n  For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must pass on to the recipients the same\nfreedoms that you received.  You must make sure that they, too, receive\nor can get the source code.  And you must show them these terms so they\nknow their rights.\n\n  Developers that use the GNU GPL protect your rights with two steps:\n(1) assert copyright on the software, and (2) offer you this License\ngiving you legal permission to copy, distribute and/or modify it.\n\n  For the developers' and authors' protection, the GPL clearly explains\nthat there is no warranty for this free software.  For both users' and\nauthors' sake, the GPL requires that modified versions be marked as\nchanged, so that their problems will not be attributed erroneously to\nauthors of previous versions.\n\n  Some devices are designed to deny users access to install or run\nmodified versions of the software inside them, although the manufacturer\ncan do so.  This is fundamentally incompatible with the aim of\nprotecting users' freedom to change the software.  The systematic\npattern of such abuse occurs in the area of products for individuals to\nuse, which is precisely where it is most unacceptable.  Therefore, we\nhave designed this version of the GPL to prohibit the practice for those\nproducts.  If such problems arise substantially in other domains, we\nstand ready to extend this provision to those domains in future versions\nof the GPL, as needed to protect the freedom of users.\n\n  Finally, every program is threatened constantly by software patents.\nStates should not allow patents to restrict development and use of\nsoftware on general-purpose computers, but in those that do, we wish to\navoid the special danger that patents applied to a free program could\nmake it effectively proprietary.  To prevent this, the GPL assures that\npatents cannot be used to render the program non-free.\n\n  The precise terms and conditions for copying, distribution and\nmodification follow.\n\n                       TERMS AND CONDITIONS\n\n  0. Definitions.\n\n  \"This License\" refers to version 3 of the GNU General Public License.\n\n  \"Copyright\" also means copyright-like laws that apply to other kinds of\nworks, such as semiconductor masks.\n\n  \"The Program\" refers to any copyrightable work licensed under this\nLicense.  Each licensee is addressed as \"you\".  \"Licensees\" and\n\"recipients\" may be individuals or organizations.\n\n  To \"modify\" a work means to copy from or adapt all or part of the work\nin a fashion requiring copyright permission, other than the making of an\nexact copy.  The resulting work is called a \"modified version\" of the\nearlier work or a work \"based on\" the earlier work.\n\n  A \"covered work\" means either the unmodified Program or a work based\non the Program.\n\n  To \"propagate\" a work means to do anything with it that, without\npermission, would make you directly or secondarily liable for\ninfringement under applicable copyright law, except executing it on a\ncomputer or modifying a private copy.  Propagation includes copying,\ndistribution (with or without modification), making available to the\npublic, and in some countries other activities as well.\n\n  To \"convey\" a work means any kind of propagation that enables other\nparties to make or receive copies.  Mere interaction with a user through\na computer network, with no transfer of a copy, is not conveying.\n\n  An interactive user interface displays \"Appropriate Legal Notices\"\nto the extent that it includes a convenient and prominently visible\nfeature that (1) displays an appropriate copyright notice, and (2)\ntells the user that there is no warranty for the work (except to the\nextent that warranties are provided), that licensees may convey the\nwork under this License, and how to view a copy of this License.  If\nthe interface presents a list of user commands or options, such as a\nmenu, a prominent item in the list meets this criterion.\n\n  1. Source Code.\n\n  The \"source code\" for a work means the preferred form of the work\nfor making modifications to it.  \"Object code\" means any non-source\nform of a work.\n\n  A \"Standard Interface\" means an interface that either is an official\nstandard defined by a recognized standards body, or, in the case of\ninterfaces specified for a particular programming language, one that\nis widely used among developers working in that language.\n\n  The \"System Libraries\" of an executable work include anything, other\nthan the work as a whole, that (a) is included in the normal form of\npackaging a Major Component, but which is not part of that Major\nComponent, and (b) serves only to enable use of the work with that\nMajor Component, or to implement a Standard Interface for which an\nimplementation is available to the public in source code form.  A\n\"Major Component\", in this context, means a major essential component\n(kernel, window system, and so on) of the specific operating system\n(if any) on which the executable work runs, or a compiler used to\nproduce the work, or an object code interpreter used to run it.\n\n  The \"Corresponding Source\" for a work in object code form means all\nthe source code needed to generate, install, and (for an executable\nwork) run the object code and to modify the work, including scripts to\ncontrol those activities.  However, it does not include the work's\nSystem Libraries, or general-purpose tools or generally available free\nprograms which are used unmodified in performing those activities but\nwhich are not part of the work.  For example, Corresponding Source\nincludes interface definition files associated with source files for\nthe work, and the source code for shared libraries and dynamically\nlinked subprograms that the work is specifically designed to require,\nsuch as by intimate data communication or control flow between those\nsubprograms and other parts of the work.\n\n  The Corresponding Source need not include anything that users\ncan regenerate automatically from other parts of the Corresponding\nSource.\n\n  The Corresponding Source for a work in source code form is that\nsame work.\n\n  2. Basic Permissions.\n\n  All rights granted under this License are granted for the term of\ncopyright on the Program, and are irrevocable provided the stated\nconditions are met.  This License explicitly affirms your unlimited\npermission to run the unmodified Program.  The output from running a\ncovered work is covered by this License only if the output, given its\ncontent, constitutes a covered work.  This License acknowledges your\nrights of fair use or other equivalent, as provided by copyright law.\n\n  You may make, run and propagate covered works that you do not\nconvey, without conditions so long as your license otherwise remains\nin force.  You may convey covered works to others for the sole purpose\nof having them make modifications exclusively for you, or provide you\nwith facilities for running those works, provided that you comply with\nthe terms of this License in conveying all material for which you do\nnot control copyright.  Those thus making or running the covered works\nfor you must do so exclusively on your behalf, under your direction\nand control, on terms that prohibit them from making any copies of\nyour copyrighted material outside their relationship with you.\n\n  Conveying under any other circumstances is permitted solely under\nthe conditions stated below.  Sublicensing is not allowed; section 10\nmakes it unnecessary.\n\n  3. Protecting Users' Legal Rights From Anti-Circumvention Law.\n\n  No covered work shall be deemed part of an effective technological\nmeasure under any applicable law fulfilling obligations under article\n11 of the WIPO copyright treaty adopted on 20 December 1996, or\nsimilar laws prohibiting or restricting circumvention of such\nmeasures.\n\n  When you convey a covered work, you waive any legal power to forbid\ncircumvention of technological measures to the extent such circumvention\nis effected by exercising rights under this License with respect to\nthe covered work, and you disclaim any intention to limit operation or\nmodification of the work as a means of enforcing, against the work's\nusers, your or third parties' legal rights to forbid circumvention of\ntechnological measures.\n\n  4. Conveying Verbatim Copies.\n\n  You may convey verbatim copies of the Program's source code as you\nreceive it, in any medium, provided that you conspicuously and\nappropriately publish on each copy an appropriate copyright notice;\nkeep intact all notices stating that this License and any\nnon-permissive terms added in accord with section 7 apply to the code;\nkeep intact all notices of the absence of any warranty; and give all\nrecipients a copy of this License along with the Program.\n\n  You may charge any price or no price for each copy that you convey,\nand you may offer support or warranty protection for a fee.\n\n  5. Conveying Modified Source Versions.\n\n  You may convey a work based on the Program, or the modifications to\nproduce it from the Program, in the form of source code under the\nterms of section 4, provided that you also meet all of these conditions:\n\n    a) The work must carry prominent notices stating that you modified\n    it, and giving a relevant date.\n\n    b) The work must carry prominent notices stating that it is\n    released under this License and any conditions added under section\n    7.  This requirement modifies the requirement in section 4 to\n    \"keep intact all notices\".\n\n    c) You must license the entire work, as a whole, under this\n    License to anyone who comes into possession of a copy.  This\n    License will therefore apply, along with any applicable section 7\n    additional terms, to the whole of the work, and all its parts,\n    regardless of how they are packaged.  This License gives no\n    permission to license the work in any other way, but it does not\n    invalidate such permission if you have separately received it.\n\n    d) If the work has interactive user interfaces, each must display\n    Appropriate Legal Notices; however, if the Program has interactive\n    interfaces that do not display Appropriate Legal Notices, your\n    work need not make them do so.\n\n  A compilation of a covered work with other separate and independent\nworks, which are not by their nature extensions of the covered work,\nand which are not combined with it such as to form a larger program,\nin or on a volume of a storage or distribution medium, is called an\n\"aggregate\" if the compilation and its resulting copyright are not\nused to limit the access or legal rights of the compilation's users\nbeyond what the individual works permit.  Inclusion of a covered work\nin an aggregate does not cause this License to apply to the other\nparts of the aggregate.\n\n  6. Conveying Non-Source Forms.\n\n  You may convey a covered work in object code form under the terms\nof sections 4 and 5, provided that you also convey the\nmachine-readable Corresponding Source under the terms of this License,\nin one of these ways:\n\n    a) Convey the object code in, or embodied in, a physical product\n    (including a physical distribution medium), accompanied by the\n    Corresponding Source fixed on a durable physical medium\n    customarily used for software interchange.\n\n    b) Convey the object code in, or embodied in, a physical product\n    (including a physical distribution medium), accompanied by a\n    written offer, valid for at least three years and valid for as\n    long as you offer spare parts or customer support for that product\n    model, to give anyone who possesses the object code either (1) a\n    copy of the Corresponding Source for all the software in the\n    product that is covered by this License, on a durable physical\n    medium customarily used for software interchange, for a price no\n    more than your reasonable cost of physically performing this\n    conveying of source, or (2) access to copy the\n    Corresponding Source from a network server at no charge.\n\n    c) Convey individual copies of the object code with a copy of the\n    written offer to provide the Corresponding Source.  This\n    alternative is allowed only occasionally and noncommercially, and\n    only if you received the object code with such an offer, in accord\n    with subsection 6b.\n\n    d) Convey the object code by offering access from a designated\n    place (gratis or for a charge), and offer equivalent access to the\n    Corresponding Source in the same way through the same place at no\n    further charge.  You need not require recipients to copy the\n    Corresponding Source along with the object code.  If the place to\n    copy the object code is a network server, the Corresponding Source\n    may be on a different server (operated by you or a third party)\n    that supports equivalent copying facilities, provided you maintain\n    clear directions next to the object code saying where to find the\n    Corresponding Source.  Regardless of what server hosts the\n    Corresponding Source, you remain obligated to ensure that it is\n    available for as long as needed to satisfy these requirements.\n\n    e) Convey the object code using peer-to-peer transmission, provided\n    you inform other peers where the object code and Corresponding\n    Source of the work are being offered to the general public at no\n    charge under subsection 6d.\n\n  A separable portion of the object code, whose source code is excluded\nfrom the Corresponding Source as a System Library, need not be\nincluded in conveying the object code work.\n\n  A \"User Product\" is either (1) a \"consumer product\", which means any\ntangible personal property which is normally used for personal, family,\nor household purposes, or (2) anything designed or sold for incorporation\ninto a dwelling.  In determining whether a product is a consumer product,\ndoubtful cases shall be resolved in favor of coverage.  For a particular\nproduct received by a particular user, \"normally used\" refers to a\ntypical or common use of that class of product, regardless of the status\nof the particular user or of the way in which the particular user\nactually uses, or expects or is expected to use, the product.  A product\nis a consumer product regardless of whether the product has substantial\ncommercial, industrial or non-consumer uses, unless such uses represent\nthe only significant mode of use of the product.\n\n  \"Installation Information\" for a User Product means any methods,\nprocedures, authorization keys, or other information required to install\nand execute modified versions of a covered work in that User Product from\na modified version of its Corresponding Source.  The information must\nsuffice to ensure that the continued functioning of the modified object\ncode is in no case prevented or interfered with solely because\nmodification has been made.\n\n  If you convey an object code work under this section in, or with, or\nspecifically for use in, a User Product, and the conveying occurs as\npart of a transaction in which the right of possession and use of the\nUser Product is transferred to the recipient in perpetuity or for a\nfixed term (regardless of how the transaction is characterized), the\nCorresponding Source conveyed under this section must be accompanied\nby the Installation Information.  But this requirement does not apply\nif neither you nor any third party retains the ability to install\nmodified object code on the User Product (for example, the work has\nbeen installed in ROM).\n\n  The requirement to provide Installation Information does not include a\nrequirement to continue to provide support service, warranty, or updates\nfor a work that has been modified or installed by the recipient, or for\nthe User Product in which it has been modified or installed.  Access to a\nnetwork may be denied when the modification itself materially and\nadversely affects the operation of the network or violates the rules and\nprotocols for communication across the network.\n\n  Corresponding Source conveyed, and Installation Information provided,\nin accord with this section must be in a format that is publicly\ndocumented (and with an implementation available to the public in\nsource code form), and must require no special password or key for\nunpacking, reading or copying.\n\n  7. Additional Terms.\n\n  \"Additional permissions\" are terms that supplement the terms of this\nLicense by making exceptions from one or more of its conditions.\nAdditional permissions that are applicable to the entire Program shall\nbe treated as though they were included in this License, to the extent\nthat they are valid under applicable law.  If additional permissions\napply only to part of the Program, that part may be used separately\nunder those permissions, but the entire Program remains governed by\nthis License without regard to the additional permissions.\n\n  When you convey a copy of a covered work, you may at your option\nremove any additional permissions from that copy, or from any part of\nit.  (Additional permissions may be written to require their own\nremoval in certain cases when you modify the work.)  You may place\nadditional permissions on material, added by you to a covered work,\nfor which you have or can give appropriate copyright permission.\n\n  Notwithstanding any other provision of this License, for material you\nadd to a covered work, you may (if authorized by the copyright holders of\nthat material) supplement the terms of this License with terms:\n\n    a) Disclaiming warranty or limiting liability differently from the\n    terms of sections 15 and 16 of this License; or\n\n    b) Requiring preservation of specified reasonable legal notices or\n    author attributions in that material or in the Appropriate Legal\n    Notices displayed by works containing it; or\n\n    c) Prohibiting misrepresentation of the origin of that material, or\n    requiring that modified versions of such material be marked in\n    reasonable ways as different from the original version; or\n\n    d) Limiting the use for publicity purposes of names of licensors or\n    authors of the material; or\n\n    e) Declining to grant rights under trademark law for use of some\n    trade names, trademarks, or service marks; or\n\n    f) Requiring indemnification of licensors and authors of that\n    material by anyone who conveys the material (or modified versions of\n    it) with contractual assumptions of liability to the recipient, for\n    any liability that these contractual assumptions directly impose on\n    those licensors and authors.\n\n  All other non-permissive additional terms are considered \"further\nrestrictions\" within the meaning of section 10.  If the Program as you\nreceived it, or any part of it, contains a notice stating that it is\ngoverned by this License along with a term that is a further\nrestriction, you may remove that term.  If a license document contains\na further restriction but permits relicensing or conveying under this\nLicense, you may add to a covered work material governed by the terms\nof that license document, provided that the further restriction does\nnot survive such relicensing or conveying.\n\n  If you add terms to a covered work in accord with this section, you\nmust place, in the relevant source files, a statement of the\nadditional terms that apply to those files, or a notice indicating\nwhere to find the applicable terms.\n\n  Additional terms, permissive or non-permissive, may be stated in the\nform of a separately written license, or stated as exceptions;\nthe above requirements apply either way.\n\n  8. Termination.\n\n  You may not propagate or modify a covered work except as expressly\nprovided under this License.  Any attempt otherwise to propagate or\nmodify it is void, and will automatically terminate your rights under\nthis License (including any patent licenses granted under the third\nparagraph of section 11).\n\n  However, if you cease all violation of this License, then your\nlicense from a particular copyright holder is reinstated (a)\nprovisionally, unless and until the copyright holder explicitly and\nfinally terminates your license, and (b) permanently, if the copyright\nholder fails to notify you of the violation by some reasonable means\nprior to 60 days after the cessation.\n\n  Moreover, your license from a particular copyright holder is\nreinstated permanently if the copyright holder notifies you of the\nviolation by some reasonable means, this is the first time you have\nreceived notice of violation of this License (for any work) from that\ncopyright holder, and you cure the violation prior to 30 days after\nyour receipt of the notice.\n\n  Termination of your rights under this section does not terminate the\nlicenses of parties who have received copies or rights from you under\nthis License.  If your rights have been terminated and not permanently\nreinstated, you do not qualify to receive new licenses for the same\nmaterial under section 10.\n\n  9. Acceptance Not Required for Having Copies.\n\n  You are not required to accept this License in order to receive or\nrun a copy of the Program.  Ancillary propagation of a covered work\noccurring solely as a consequence of using peer-to-peer transmission\nto receive a copy likewise does not require acceptance.  However,\nnothing other than this License grants you permission to propagate or\nmodify any covered work.  These actions infringe copyright if you do\nnot accept this License.  Therefore, by modifying or propagating a\ncovered work, you indicate your acceptance of this License to do so.\n\n  10. Automatic Licensing of Downstream Recipients.\n\n  Each time you convey a covered work, the recipient automatically\nreceives a license from the original licensors, to run, modify and\npropagate that work, subject to this License.  You are not responsible\nfor enforcing compliance by third parties with this License.\n\n  An \"entity transaction\" is a transaction transferring control of an\norganization, or substantially all assets of one, or subdividing an\norganization, or merging organizations.  If propagation of a covered\nwork results from an entity transaction, each party to that\ntransaction who receives a copy of the work also receives whatever\nlicenses to the work the party's predecessor in interest had or could\ngive under the previous paragraph, plus a right to possession of the\nCorresponding Source of the work from the predecessor in interest, if\nthe predecessor has it or can get it with reasonable efforts.\n\n  You may not impose any further restrictions on the exercise of the\nrights granted or affirmed under this License.  For example, you may\nnot impose a license fee, royalty, or other charge for exercise of\nrights granted under this License, and you may not initiate litigation\n(including a cross-claim or counterclaim in a lawsuit) alleging that\nany patent claim is infringed by making, using, selling, offering for\nsale, or importing the Program or any portion of it.\n\n  11. Patents.\n\n  A \"contributor\" is a copyright holder who authorizes use under this\nLicense of the Program or a work on which the Program is based.  The\nwork thus licensed is called the contributor's \"contributor version\".\n\n  A contributor's \"essential patent claims\" are all patent claims\nowned or controlled by the contributor, whether already acquired or\nhereafter acquired, that would be infringed by some manner, permitted\nby this License, of making, using, or selling its contributor version,\nbut do not include claims that would be infringed only as a\nconsequence of further modification of the contributor version.  For\npurposes of this definition, \"control\" includes the right to grant\npatent sublicenses in a manner consistent with the requirements of\nthis License.\n\n  Each contributor grants you a non-exclusive, worldwide, royalty-free\npatent license under the contributor's essential patent claims, to\nmake, use, sell, offer for sale, import and otherwise run, modify and\npropagate the contents of its contributor version.\n\n  In the following three paragraphs, a \"patent license\" is any express\nagreement or commitment, however denominated, not to enforce a patent\n(such as an express permission to practice a patent or covenant not to\nsue for patent infringement).  To \"grant\" such a patent license to a\nparty means to make such an agreement or commitment not to enforce a\npatent against the party.\n\n  If you convey a covered work, knowingly relying on a patent license,\nand the Corresponding Source of the work is not available for anyone\nto copy, free of charge and under the terms of this License, through a\npublicly available network server or other readily accessible means,\nthen you must either (1) cause the Corresponding Source to be so\navailable, or (2) arrange to deprive yourself of the benefit of the\npatent license for this particular work, or (3) arrange, in a manner\nconsistent with the requirements of this License, to extend the patent\nlicense to downstream recipients.  \"Knowingly relying\" means you have\nactual knowledge that, but for the patent license, your conveying the\ncovered work in a country, or your recipient's use of the covered work\nin a country, would infringe one or more identifiable patents in that\ncountry that you have reason to believe are valid.\n\n  If, pursuant to or in connection with a single transaction or\narrangement, you convey, or propagate by procuring conveyance of, a\ncovered work, and grant a patent license to some of the parties\nreceiving the covered work authorizing them to use, propagate, modify\nor convey a specific copy of the covered work, then the patent license\nyou grant is automatically extended to all recipients of the covered\nwork and works based on it.\n\n  A patent license is \"discriminatory\" if it does not include within\nthe scope of its coverage, prohibits the exercise of, or is\nconditioned on the non-exercise of one or more of the rights that are\nspecifically granted under this License.  You may not convey a covered\nwork if you are a party to an arrangement with a third party that is\nin the business of distributing software, under which you make payment\nto the third party based on the extent of your activity of conveying\nthe work, and under which the third party grants, to any of the\nparties who would receive the covered work from you, a discriminatory\npatent license (a) in connection with copies of the covered work\nconveyed by you (or copies made from those copies), or (b) primarily\nfor and in connection with specific products or compilations that\ncontain the covered work, unless you entered into that arrangement,\nor that patent license was granted, prior to 28 March 2007.\n\n  Nothing in this License shall be construed as excluding or limiting\nany implied license or other defenses to infringement that may\notherwise be available to you under applicable patent law.\n\n  12. No Surrender of Others' Freedom.\n\n  If conditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License.  If you cannot convey a\ncovered work so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you may\nnot convey it at all.  For example, if you agree to terms that obligate you\nto collect a royalty for further conveying from those to whom you convey\nthe Program, the only way you could satisfy both those terms and this\nLicense would be to refrain entirely from conveying the Program.\n\n  13. Use with the GNU Affero General Public License.\n\n  Notwithstanding any other provision of this License, you have\npermission to link or combine any covered work with a work licensed\nunder version 3 of the GNU Affero General Public License into a single\ncombined work, and to convey the resulting work.  The terms of this\nLicense will continue to apply to the part which is the covered work,\nbut the special requirements of the GNU Affero General Public License,\nsection 13, concerning interaction through a network will apply to the\ncombination as such.\n\n  14. Revised Versions of this License.\n\n  The Free Software Foundation may publish revised and/or new versions of\nthe GNU General Public License from time to time.  Such new versions will\nbe similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\n  Each version is given a distinguishing version number.  If the\nProgram specifies that a certain numbered version of the GNU General\nPublic License \"or any later version\" applies to it, you have the\noption of following the terms and conditions either of that numbered\nversion or of any later version published by the Free Software\nFoundation.  If the Program does not specify a version number of the\nGNU General Public License, you may choose any version ever published\nby the Free Software Foundation.\n\n  If the Program specifies that a proxy can decide which future\nversions of the GNU General Public License can be used, that proxy's\npublic statement of acceptance of a version permanently authorizes you\nto choose that version for the Program.\n\n  Later license versions may give you additional or different\npermissions.  However, no additional obligations are imposed on any\nauthor or copyright holder as a result of your choosing to follow a\nlater version.\n\n  15. Disclaimer of Warranty.\n\n  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY\nAPPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT\nHOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY\nOF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,\nTHE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\nPURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM\nIS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF\nALL NECESSARY SERVICING, REPAIR OR CORRECTION.\n\n  16. Limitation of Liability.\n\n  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS\nTHE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY\nGENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE\nUSE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF\nDATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD\nPARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),\nEVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF\nSUCH DAMAGES.\n\n  17. Interpretation of Sections 15 and 16.\n\n  If the disclaimer of warranty and limitation of liability provided\nabove cannot be given local legal effect according to their terms,\nreviewing courts shall apply local law that most closely approximates\nan absolute waiver of all civil liability in connection with the\nProgram, unless a warranty or assumption of liability accompanies a\ncopy of the Program in return for a fee.\n\n                     END OF TERMS AND CONDITIONS\n\n            How to Apply These Terms to Your New Programs\n\n  If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n  To do so, attach the following notices to the program.  It is safest\nto attach them to the start of each source file to most effectively\nstate the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n    CDQR — Cold Disk Quick Response tool.\n    Copyright (C) 2017  Alan Orlikoski\n\n    This program is free software: you can redistribute it and/or modify\n    it under the terms of the GNU General Public License as published by\n    the Free Software Foundation, either version 3 of the License, or\n    (at your option) any later version.\n\n    This program is distributed in the hope that it will be useful,\n    but WITHOUT ANY WARRANTY; without even the implied warranty of\n    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n    GNU General Public License for more details.\n\n    You should have received a copy of the GNU General Public License\n    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\nAlso add information on how to contact you by electronic and paper mail.\n\n  If the program does terminal interaction, make it output a short\nnotice like this when it starts in an interactive mode:\n\n    CDQR  Copyright (C) 2017  Alan Orlikoski\n    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n    This is free software, and you are welcome to redistribute it\n    under certain conditions; type `show c' for details.\n\nThe hypothetical commands `show w' and `show c' should show the appropriate\nparts of the General Public License.  Of course, your program's commands\nmight be different; for a GUI interface, you would use an \"about box\".\n\n  You should also get your employer (if you work as a programmer) or school,\nif any, to sign a \"copyright disclaimer\" for the program, if necessary.\nFor more information on this, and how to apply and follow the GNU GPL, see\n<http://www.gnu.org/licenses/>.\n\n  The GNU General Public License does not permit incorporating your program\ninto proprietary programs.  If your program is a subroutine library, you\nmay consider it more useful to permit linking proprietary applications with\nthe library.  If this is what you want to do, use the GNU Lesser General\nPublic License instead of this License.  But first, please read\n<http://www.gnu.org/philosophy/why-not-lgpl.html>."
  },
  {
    "path": "README.md",
    "content": "## NAME\n\nCDQR — Cold Disk Quick Response tool by Alan Orlikoski\n\nFor latest release click [here](https://github.com/orlikoski/CDQR/releases/latest)\n\n## Please Read\n[Open Letter to the users of Skadi, CyLR, and CDQR](https://docs.google.com/document/d/1L6CBvFd7d1Qf4IxSJSdkKMTdbBuWzSzUM3u_h5ZCegY/edit?usp=sharing)\n\n## Videos and Media\n*  [OSDFCON 2017](http://www.osdfcon.org/presentations/2017/Asif-Matadar_Rapid-Incident-Response.pdf) Slides: Walk-through different techniques that are required to provide forensics results for Windows and *nix environments (Including CyLR and CDQR)\n\n## What is CDQR?\nThe CDQR tool uses Plaso to parse forensic artifacts and/or disk images with specific parsers and create easy to analyze custom reports. The parsers were chosen based triaging best practices and the custom reports group like items together to make analysis easier. The design came from the Live Response Model of investigating the important artifacts first. This is meant to be a starting point for investigations, not the complete investigation.\n\nIn addition to processing entire forensic images it also parses extracted forensic artifact(s) as an individual file or collection of files inside of a folder structure (or inside a .zip file).\n\nIt creates up to 18 Reports (.csv files) based on triaging best practices and the parsing option selected\n*  18 Reports for DATT:  \n      ```\n      Appcompat, Amcache, Bash, Event Logs, File System, MFT, UsnJrnl, Internet History, Prefetch, Registry, Scheduled Tasks, Persistence, System Information, AntiVirus, Firewall, Mac, Linux, and Android\n      ```\n*  14 Reports for Win:  \n      ```\n      Appcompat, Amcache, Bash, Event Logs, File System, MFT, UsnJrnl, Internet History, Prefetch, Registry, Scheduled Tasks, Persistence, System Information, AntiVirus, Firewall\n      ```\n*   8 Reports for Mac and Lin:  \n      ```\n      File System, Internet History, System Information, AntiVirus, Firewall, Mac, and Linux\n      ```\n*   7 Reports for Android:  \n      ```\n      File System, Internet History, Persistence, System Information, AntiVirus, Firewall, and Android\n      ```\n\n\n## Important Notes\n* Make sure account has permissions to create files and directories when running (when in doubt, run as administrator)\n*  Ensure line endings are correct for the OS it is running on\n\n## DESCRIPTION\n\nThis program uses [Plaso](https://github.com/log2timeline/plaso/wiki) and a streamlined list of its parsers to quickly analyze a forenisic image file (dd, E01, .vmdk, etc) or group of forensic artifacts.  The results are output in either ElasticSearch, JSON (line delimited), or the following report files in CSV format:\n*  18 Reports for DATT:  \n      ```\n      Appcompat, Amcache, Bash, Event Logs, File System, MFT, UsnJrnl, Internet History, Prefetch, Registry, Scheduled Tasks, Persistence, System Information, AntiVirus, Firewall, Mac, Linux, and Android\n      ```\n*  14 Reports for Win:  \n      ```\n      Appcompat, Amcache, Bash, Event Logs, File System, MFT, UsnJrnl, Internet History, Prefetch, Registry, Scheduled Tasks, Persistence, System Information, AntiVirus, Firewall\n      ```\n*   8 Reports for Mac and Lin:  \n      ```\n      File System, Internet History, System Information, AntiVirus, Firewall, Mac, and Linux\n      ```\n*   7 Reports for Android:  \n      ```\n      File System, Internet History, Persistence, System Information, AntiVirus, Firewall, and Android\n      ```\n\n## ARGUMENTS & OPTIONS\n```\npositional arguments:\n  src_location          Source File location: Y:/Case/Tag009/sample.E01\n  dst_location          Destination Folder location. If nothing is supplied\n                        then the default is 'Results'\n\noptional arguments:\n  -h, --help            show this help message and exit\n  -p PARSER, --parser PARSER\n                        Choose parser to use. If nothing chosen then 'win' is\n                        used. The parsing options are: win, mft_usnjrnl, lin,\n                        mac, datt\n  --nohash              Do not hash all the files as part of the processing of\n                        the image\n  --mft                 Process the MFT file (disabled by default except for\n                        DATT)\n  --usnjrnl             Process the USNJRNL file (disabled by default except\n                        for DATT)\n  --max_cpu             Use the maximum number of cpu cores to process the\n                        image\n  --export              Creates zipped, line delimited json export file\n  --artifact_filters ARTIFACT_FILTERS\n                        Plaso passthrough: Names of forensic artifact\n                        definitions, provided on the command command line\n                        (comma separated). Forensic artifacts are stored in\n                        .yaml files that are directly pulled from the artifact\n                        definitions project. You can also specify a custom\n                        artifacts yaml file (see\n                        --custom_artifact_definitions). Artifact definitions\n                        can be used to describe and quickly collect data of\n                        interest, such as specific files or Windows Registry\n                        keys.\n  --artifact_filters_file ARTIFACT_FILTERS_FILE\n                        Plaso passthrough: Names of forensic artifact\n                        definitions, provided in a file with one artifact name\n                        per line. Forensic artifacts are stored in .yaml files\n                        that are directly pulled from the artifact definitions\n                        project. You can also specify a custom artifacts yaml\n                        file (see --custom_artifact_definitions). Artifact\n                        definitions can be used to describe and quickly\n                        collect data of interest, such as specific files or\n                        Windows Registry keys.\n  --artifact_definitions ARTIFACT_DEFINITIONS\n                        Plaso passthrough: Path to a directory containing\n                        artifact definitions, which are .yaml files. Artifact\n                        definitions can be used to describe and quickly\n                        collect data of interest, such as specific files or\n                        Windows Registry keys.\n  --custom_artifact_definitions CUSTOM_ARTIFACT_DEFINITIONS\n                        Plaso passthrough: Path to a file containing custom\n                        artifact definitions, which are .yaml files. Artifact\n                        definitions can be used to describe and quickly\n                        collect data of interest, such as specific files or\n                        Windows Registry keys.\n  --file_filter FILE_FILTER, -f FILE_FILTER\n                        Plaso passthrough: List of files to include for\n                        targeted collection of files to parse, one line per\n                        file path, setup is /path|file - where each element\n                        can contain either a variable set in the preprocessing\n                        stage or a regular expression.\n  --es_kb ES_KB         Outputs Kibana format to elasticsearch database.\n                        Requires index name. Example: '--es_kb my_index'\n  --es_kb_server ES_KB_SERVER\n                        Kibana Format Only: Exports to remote (default is\n                        127.0.0.1) elasticsearch database. Requires Server\n                        name or IP address Example: '--es_kb_server\n                        myserver.elk.go' or '--es_kb_server 192.168.1.10'\n  --es_kb_port ES_KB_PORT\n                        Kibana Format Only: Port (default is 9200) for remote\n                        elasticsearch database. Requires port number Example:\n                        '--es_kb_port 9200 '\n  --es_kb_user ES_KB_USER\n                        Kibana Format Only: Username (default is none) for\n                        remote elasticsearch database. Requires port number\n                        Example: '--es_kb_user skadi '\n  --es_ts ES_TS         Outputs TimeSketch format to elasticsearch database.\n                        Requires index/timesketch name. Example: '--es_ts\n                        my_name'\n  --plaso_db            Process an existing Plaso DB file. Example:\n                        artifacts.plaso\n  -z                    Indicates the input file is a zip file and needs to be\n                        decompressed\n  --no_dependencies_check\n                        Re-enables the log2timeline the dependencies check. It\n                        is skipped by default\n  --process_archives    Extract and inspect contents of archives found inside\n                        of artifacts or disk images\n  -v, --version         show program's version number and exit\n  -y                    Accepts all defaults on prompted questions in the\n                        program.\n```\n\n## DEPENDENCIES\n\n1. 64-bit Windows, Linux, or Mac Operating System (OS)\n2. The appropriate version of Plaso for the OS https://github.com/log2timeline/plaso/releases\n3. [Python v3.x](https://www.python.org/downloads/) (if using cdqr.py source code)\n\n## EXAMPLES\n\n```\ncdqr.py c:\\mydiskimage.vmdk myresults\n```\n```\ncdqr.exe -p win c:\\images\\badlaptop.e01\n```\n```\ncdqr.exe -p datt --max_cpu C:\\artifacts\\tag009\n```\n```\ncdqr.exe -p datt --max_cpu C:\\artifacts\\tag009\\$MFT --export\n```\n```\ncdqr.exe -z --max_cpu C:\\artifacts\\tag009\\artifacts.zip\n```\n```\ncdqr.exe -z --max_cpu C:\\artifacts\\tag009\\artifacts.zip --es myindexname\n```\n\n\n## AUTHOR\n\nAlan Orlikoski\n* [GitHub](https://github.com/orlikoski)\n* [Twitter](https://twitter.com/AlanOrlikoski)\n"
  },
  {
    "path": "ThankYou",
    "content": "Thanks to the Plaso team who's product is great (https://github.com/log2timeline/plaso/wiki)\nThanks to Andrew Moore for teaching me the ways of making .md files\nThank you to my friends and coworkers at Mandiant\nThank you to everyone who helped me along the way"
  },
  {
    "path": "docs/parser_datt.csv",
    "content": "amcache,\nandroid_app_usage,\napache_access,\nasl_log,\nbash_history,\nbash,\nbencode,\nbinary_cookies,\nbsm_log,\nchrome_cache,\nchrome_preferences,\ncups_ipp,\ncustom_destinations,\nczip,\ndockerjson,\ndpkg,\nesedb,\nfilestat,\nfirefox_cache,\nfirefox_cache2,\nfsevents,\ngdrive_synclog,\nhachoir,\njava_idx,\nlnk,\nmac_appfirewall_log,\nmac_keychain,\nmac_securityd,\nmactime,\nmacwifi,\nmcafee_protection,\nmft,\nmsiecf,\nolecf,\nopera_global,\nopera_typed_history,\npe,\nplist,\npls_recall,\npopularity_contest,\nprefetch,\nrecycle_bin_info2,\nrecycle_bin,\nrplog,\nsanta,\nsccm,\nselinux,\nskydrive_log_old,\nskydrive_log,\nsophos_av,\nsqlite,\nsymantec_scanlog,\nsyslog,\nsystemd_journal,\ntrendmicro_url,\ntrendmicro_vd,\nusnjrnl,\nutmp,\nutmpx,\nwinevt,\nwinevtx,\nwinfirewall,\nwiniis,\nwinjob,\nwinreg,\nxchatlog,\nxchatscrollback,\nzsh_extended_history\n"
  },
  {
    "path": "docs/parser_lin.csv",
    "content": "bash,\nbash_history,\nbencode,\nczip,\ndockerjson,\ndpkg,\nfilestat,\nmcafee_protection,\nolecf,\npls_recall,\npopularity_contest,\nselinux,\nsophos_av,\nsqlite,\nsymantec_scanlog,\nsyslog,\nsystemd_journal,\nutmp,\nwebhist,\nxchatlog,\nxchatscrollback,\nzsh_extended_history\n"
  },
  {
    "path": "docs/parser_mac.csv",
    "content": "asl_log,\nbash_history,\nbash,\nbencode,\nbsm_log,\nccleaner,\ncups_ipp,\nczipplist,\nfilestat,\nfseventsd,\nmcafee_protection,\nmac_appfirewall_log,\nmac_keychain,\nmac_securityd,\nmacwifi,\nmcafee_protection,\nolecf,\nsophos_av,\nsqlite,\nsymantec_scanlog,\nsyslog,\nutmpx,\nwebhist,\nzsh_extended_history\n"
  },
  {
    "path": "docs/parser_win.csv",
    "content": "bencode,\nczip,\nccleaner,\nesedb,\nfilestat,\nlnk,\nmft,\nmcafee_protection,\nolecf,\npe,\nprefetch,\nrecycle_bin,\nrecycle_bin_info2,\nsccm,\nsophos_av,\nsqlite,\nsymantec_scanlog,\nusnjrnl,\nwinevt,\nwinevtx,\nwebhist,\nwinfirewall,\nwinjob,\nwindows_typed_urls,\nwinreg\n"
  },
  {
    "path": "src/cdqr.py",
    "content": "#!/usr/bin/python3\n\"\"\"\nThis program is free software: you can redistribute it and/or modify it under\nthe terms of the GNU General Public License as published by the Free Software\nFoundation, either version 3 of the License, or (at your option) any later\nversion.\nThis program is distributed in the hope that it will be useful, but WITHOUT\nANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\nFOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.\nYou should have received a copy of the GNU General Public License along with\nthis program. If not, see <http://www.gnu.org/licenses/>.\n\"\"\"\nimport io, \\\n  os, \\\n  sys, \\\n  argparse, \\\n  subprocess, \\\n  csv, \\\n  time, \\\n  datetime, \\\n  re, \\\n  multiprocessing, \\\n  shutil, \\\n  zipfile, \\\n  queue, \\\n  threading\ntry:\n    import zlib\n    compression = zipfile.ZIP_DEFLATED\nexcept:\n    compression = zipfile.ZIP_STORED\n\nif sys.version_info[0] < 3:\n    print(\n        'CDQR requires python3 and python2 was detected. Please run this script with an compatible python interpreter.'\n    )\n    sys.exit(1)\n\nmodes = {\n    zipfile.ZIP_DEFLATED: 'deflated',\n    zipfile.ZIP_STORED: 'stored',\n}\n###############################################################################\n# Created by: Alan Orlikoski\ncdqr_version = \"CDQR Version: 20191226\"\n#\n###############################################################################\n# Global Variables\nparser_opt = \"\"\nsrc_loc = \"\"\ndst_loc = \"\"\nstart_dt = datetime.datetime.now()\nend_dt = start_dt\nduration = end_dt - start_dt\nduration01 = end_dt - start_dt\nduration02 = end_dt - start_dt\nduration03 = end_dt - start_dt\ncreate_db = True\n\n# Dictionary of parsing options from command line to log2timeline\nparse_optionslatest = {\n    'win':\n    \"bash_history,bencode,czip,esedb,filestat,lnk,mcafee_protection,olecf,pe,prefetch,recycle_bin,recycle_bin_info2,sccm,sophos_av,sqlite,symantec_scanlog,winevt,winevtx,webhist,winfirewall,winjob,winreg,zsh_extended_history\",\n    'mft_usnjrnl':\n    \"mft,usnjrnl\",\n    'lin':\n    \"bash_history,bencode,binary_cookies,chrome_cache,chrome_preferences,czip/oxml,dockerjson,dpkg,esedb/msie_webcache,filestat,firefox_cache,gdrive_synclog,java_idx,msiecf,olecf,opera_global,opera_typed_history,plist/safari_history,pls_recall,popularity_contest,selinux,sqlite/chrome_27_history,sqlite/chrome_8_history,sqlite/chrome_autofill,sqlite/chrome_cookies,sqlite/chrome_extension_activity,sqlite/firefox_cookies,sqlite/firefox_downloads,sqlite/firefox_history,sqlite/google_drive,sqlite/skype,sqlite/zeitgeist,syslog,systemd_journal,utmp,xchatlog,xchatscrollback,zsh_extended_history\",\n    'mac':\n    \"asl_log,bash_history,bencode,binary_cookies,bsm_log,chrome_cache,chrome_preferences,cups_ipp,czip/oxml,esedb/msie_webcache,filestat,firefox_cache,fseventsd,gdrive_synclog,java_idx,mac_appfirewall_log,mac_keychain,mac_securityd,macwifi,msiecf,olecf,opera_global,opera_typed_history,plist,plist/safari_history,sqlite/appusage,sqlite/chrome_27_history,sqlite/chrome_8_history,sqlite/chrome_autofill,sqlite/chrome_cookies,sqlite/chrome_extension_activity,sqlite/firefox_cookies,sqlite/firefox_downloads,sqlite/firefox_history,sqlite/google_drive,sqlite/imessage,sqlite/ls_quarantine,sqlite/mac_document_versions,sqlite/mac_knowledgec,sqlite/mac_notes,sqlite/mackeeper_cache,sqlite/skype,syslog,utmpx,zsh_extended_history\",\n    'android':\n    \"android_app_usage,chrome_cache,filestat,sqlite/android_calls,sqlite/android_sms,sqlite/android_webview,sqlite/android_webviewcache,sqlite/chrome_27_history,sqlite/chrome_8_history,sqlite/chrome_cookies,sqlite/skype\",\n    'datt':\n    \"amcache,android_app_usage,apache_access,asl_log,bash_history,bencode,binary_cookies,bsm_log,chrome_cache,chrome_preferences,cups_ipp,custom_destinations,czip,dockerjson,dpkg,esedb,filestat,firefox_cache,firefox_cache2,fseventsd,gdrive_synclog,java_idx,lnk,mac_appfirewall_log,mac_keychain,mac_securityd,mactime,macwifi,mcafee_protection,mft,msiecf,olecf,opera_global,opera_typed_history,pe,plist,pls_recall,popularity_contest,prefetch,recycle_bin,recycle_bin_info2,rplog,santa,sccm,selinux,skydrive_log,skydrive_log_old,sophos_av,sqlite,symantec_scanlog,syslog,systemd_journal,trendmicro_url,trendmicro_vd,usnjrnl,utmp,utmpx,winevt,winevtx,winfirewall,winiis,winjob,winreg,xchatlog,xchatscrollback,zsh_extended_history\",\n}\n\n# All credit for these definitions below to: https://www.ultimatewindowssecurity.com/securitylog/encyclopedia/default.aspx\neventlog_dict = {\n    '512':\n    'Windows NT is starting up',\n    '513':\n    'Windows is shutting down',\n    '514':\n    'An authentication package has been loaded by the Local Security Authority',\n    '515':\n    'A trusted logon process has registered with the Local Security Authority',\n    '516':\n    'Internal resources allocated for the queuing of audit messages have been exhausted, leading to the loss of some audits',\n    '517':\n    'The audit log was cleared',\n    '518':\n    'A notification package has been loaded by the Security Account Manager',\n    '519':\n    'A process is using an invalid local procedure call (LPC) port',\n    '520':\n    'The system time was changed',\n    '521':\n    'Unable to log events to security log',\n    '528':\n    'Successful Logon',\n    '529':\n    'Logon Failure - Unknown user name or bad password',\n    '530':\n    'Logon Failure - Account logon time restriction violation',\n    '531':\n    'Logon Failure - Account currently disabled',\n    '532':\n    'Logon Failure - The specified user account has expired',\n    '533':\n    'Logon Failure - User not allowed to logon at this computer',\n    '534':\n    'Logon Failure - The user has not been granted the requested logon type at this machine',\n    '535':\n    'Logon Failure - The specified accounts password has expired',\n    '536':\n    'Logon Failure - The NetLogon component is not active',\n    '537':\n    'Logon failure - The logon attempt failed for other reasons.',\n    '538':\n    'User Logoff',\n    '539':\n    'Logon Failure - Account locked out',\n    '540':\n    'Successful Network Logon',\n    '551':\n    'User initiated logoff',\n    '552':\n    'Logon attempt using explicit credentials',\n    '560':\n    'Object Open',\n    '561':\n    'Handle Allocated',\n    '562':\n    'Handle Closed',\n    '563':\n    'Object Open for Delete',\n    '564':\n    'Object Deleted',\n    '565':\n    'Object Open (Active Directory)',\n    '566':\n    'Object Operation (W3 Active Directory)',\n    '567':\n    'Object Access Attempt',\n    '576':\n    'Special privileges assigned to new logon',\n    '577':\n    'Privileged Service Called',\n    '578':\n    'Privileged object operation',\n    '592':\n    'A new process has been created',\n    '593':\n    'A process has exited',\n    '594':\n    'A handle to an object has been duplicated',\n    '595':\n    'Indirect access to an object has been obtained',\n    '596':\n    'Backup of data protection master key',\n    '600':\n    'A process was assigned a primary token',\n    '601':\n    'Attempt to install service',\n    '602':\n    'Scheduled Task created',\n    '608':\n    'User Right Assigned',\n    '609':\n    'User Right Removed',\n    '610':\n    'New Trusted Domain',\n    '611':\n    'Removing Trusted Domain',\n    '612':\n    'Audit Policy Change',\n    '613':\n    'IPSec policy agent started',\n    '614':\n    'IPSec policy agent disabled',\n    '615':\n    'IPSEC PolicyAgent Service',\n    '616':\n    'IPSec policy agent encountered a potentially serious failure.',\n    '617':\n    'Kerberos Policy Changed',\n    '618':\n    'Encrypted Data Recovery Policy Changed',\n    '619':\n    'Quality of Service Policy Changed',\n    '620':\n    'Trusted Domain Information Modified',\n    '621':\n    'System Security Access Granted',\n    '622':\n    'System Security Access Removed',\n    '623':\n    'Per User Audit Policy was refreshed',\n    '624':\n    'User Account Created',\n    '625':\n    'User Account Type Changed',\n    '626':\n    'User Account Enabled',\n    '627':\n    'Change Password Attempt',\n    '628':\n    'User Account password set',\n    '629':\n    'User Account Disabled',\n    '630':\n    'User Account Deleted',\n    '631':\n    'Security Enabled Global Group Created',\n    '632':\n    'Security Enabled Global Group Member Added',\n    '633':\n    'Security Enabled Global Group Member Removed',\n    '634':\n    'Security Enabled Global Group Deleted',\n    '635':\n    'Security Enabled Local Group Created',\n    '636':\n    'Security Enabled Local Group Member Added',\n    '637':\n    'Security Enabled Local Group Member Removed',\n    '638':\n    'Security Enabled Local Group Deleted',\n    '639':\n    'Security Enabled Local Group Changed',\n    '640':\n    'General Account Database Change',\n    '641':\n    'Security Enabled Global Group Changed',\n    '642':\n    'User Account Changed',\n    '643':\n    'Domain Policy Changed',\n    '644':\n    'User Account Locked Out',\n    '645':\n    'Computer Account Created',\n    '646':\n    'Computer Account Changed',\n    '647':\n    'Computer Account Deleted',\n    '648':\n    'Security Disabled Local Group Created',\n    '649':\n    'Security Disabled Local Group Changed',\n    '650':\n    'Security Disabled Local Group Member Added',\n    '651':\n    'Security Disabled Local Group Member Removed',\n    '652':\n    'Security Disabled Local Group Deleted',\n    '653':\n    'Security Disabled Global Group Created',\n    '654':\n    'Security Disabled Global Group Changed',\n    '655':\n    'Security Disabled Global Group Member Added',\n    '656':\n    'Security Disabled Global Group Member Removed',\n    '657':\n    'Security Disabled Global Group Deleted',\n    '658':\n    'Security Enabled Universal Group Created',\n    '659':\n    'Security Enabled Universal Group Changed',\n    '660':\n    'Security Enabled Universal Group Member Added',\n    '661':\n    'Security Enabled Universal Group Member Removed',\n    '662':\n    'Security Enabled Universal Group Deleted',\n    '663':\n    'Security Disabled Universal Group Created',\n    '664':\n    'Security Disabled Universal Group Changed',\n    '665':\n    'Security Disabled Universal Group Member Added',\n    '666':\n    'Security Disabled Universal Group Member Removed',\n    '667':\n    'Security Disabled Universal Group Deleted',\n    '668':\n    'Group Type Changed',\n    '669':\n    'Add SID History',\n    '670':\n    'Add SID History',\n    '671':\n    'User Account Unlocked',\n    '672':\n    'Authentication Ticket Granted',\n    '673':\n    'Service Ticket Granted',\n    '674':\n    'Ticket Granted Renewed',\n    '675':\n    'Pre-authentication failed',\n    '676':\n    'Authentication Ticket Request Failed',\n    '677':\n    'Service Ticket Request Failed',\n    '678':\n    'Account Mapped for Logon by',\n    '679':\n    'The name: %2 could not be mapped for logon by: %1',\n    '680':\n    'Account Used for Logon by',\n    '681':\n    'The logon to account: %2 by: %1 from workstation: %3 failed.',\n    '682':\n    'Session reconnected to winstation',\n    '683':\n    'Session disconnected from winstation',\n    '684':\n    'Set ACLs of members in administrators groups',\n    '685':\n    'Account Name Changed',\n    '686':\n    'Password of the following user accessed',\n    '687':\n    'Basic Application Group Created',\n    '688':\n    'Basic Application Group Changed',\n    '689':\n    'Basic Application Group Member Added',\n    '690':\n    'Basic Application Group Member Removed',\n    '691':\n    'Basic Application Group Non-Member Added',\n    '692':\n    'Basic Application Group Non-Member Removed',\n    '693':\n    'Basic Application Group Deleted',\n    '694':\n    'LDAP Query Group Created',\n    '695':\n    'LDAP Query Group Changed',\n    '696':\n    'LDAP Query Group Deleted',\n    '697':\n    'Password Policy Checking API is called',\n    '806':\n    'Per User Audit Policy was refreshed',\n    '807':\n    'Per user auditing policy set for user',\n    '808':\n    'A security event source has attempted to register',\n    '809':\n    'A security event source has attempted to unregister',\n    '848':\n    'The following policy was active when the Windows Firewall started',\n    '849':\n    'An application was listed as an exception when the Windows Firewall started',\n    '850':\n    'A port was listed as an exception when the Windows Firewall started',\n    '851':\n    'A change has been made to the Windows Firewall application exception list',\n    '852':\n    'A change has been made to the Windows Firewall port exception list',\n    '853':\n    'The Windows Firewall operational mode has changed',\n    '854':\n    'The Windows Firewall logging settings have changed',\n    '855':\n    'A Windows Firewall ICMP setting has changed',\n    '856':\n    'The Windows Firewall setting to allow unicast responses to multicast/broadcast traffic has changed',\n    '857':\n    'The Windows Firewall setting to allow remote administration, allowing port TCP 135 and DCOM/RPC, has changed',\n    '858':\n    'Windows Firewall group policy settings have been applied',\n    '859':\n    'The Windows Firewall group policy settings have been removed',\n    '860':\n    'The Windows Firewall has switched the active policy profile',\n    '861':\n    'The Windows Firewall has detected an application listening for incoming traffic',\n    '1100':\n    'The event logging service has shut down',\n    '1101':\n    'Audit events have been dropped by the transport.',\n    '1102':\n    'The audit log was cleared',\n    '1104':\n    'The security Log is now full',\n    '1105':\n    'Event log automatic backup',\n    '1108':\n    'The event logging service encountered an error',\n    '4608':\n    'Windows is starting up',\n    '4609':\n    'Windows is shutting down',\n    '4610':\n    'An authentication package has been loaded by the Local Security Authority',\n    '4611':\n    'A trusted logon process has been registered with the Local Security Authority',\n    '4612':\n    'Internal resources allocated for the queuing of audit messages have been exhausted, leading to the loss of some audits.',\n    '4614':\n    'A notification package has been loaded by the Security Account Manager.',\n    '4615':\n    'Invalid use of LPC port',\n    '4616':\n    'The system time was changed.',\n    '4618':\n    'A monitored security event pattern has occurred',\n    '4621':\n    'Administrator recovered system from CrashOnAuditFail',\n    '4622':\n    'A security package has been loaded by the Local Security Authority.',\n    '4624':\n    'An account was successfully logged on',\n    '4625':\n    'An account failed to log on',\n    '4626':\n    'User/Device claims information',\n    '4627':\n    'Group membership information.',\n    '4634':\n    'An account was logged off',\n    '4646':\n    'IKE DoS-prevention mode started',\n    '4647':\n    'User initiated logoff',\n    '4648':\n    'A logon was attempted using explicit credentials',\n    '4649':\n    'A replay attack was detected',\n    '4650':\n    'An IPsec Main Mode security association was established',\n    '4651':\n    'An IPsec Main Mode security association was established',\n    '4652':\n    'An IPsec Main Mode negotiation failed',\n    '4653':\n    'An IPsec Main Mode negotiation failed',\n    '4654':\n    'An IPsec Quick Mode negotiation failed',\n    '4655':\n    'An IPsec Main Mode security association ended',\n    '4656':\n    'A handle to an object was requested',\n    '4657':\n    'A registry value was modified',\n    '4658':\n    'The handle to an object was closed',\n    '4659':\n    'A handle to an object was requested with intent to delete',\n    '4660':\n    'An object was deleted',\n    '4661':\n    'A handle to an object was requested',\n    '4662':\n    'An operation was performed on an object',\n    '4663':\n    'An attempt was made to access an object',\n    '4664':\n    'An attempt was made to create a hard link',\n    '4665':\n    'An attempt was made to create an application client context.',\n    '4666':\n    'An application attempted an operation',\n    '4667':\n    'An application client context was deleted',\n    '4668':\n    'An application was initialized',\n    '4670':\n    'Permissions on an object were changed',\n    '4671':\n    'An application attempted to access a blocked ordinal through the TBS',\n    '4672':\n    'Special privileges assigned to new logon',\n    '4673':\n    'A privileged service was called',\n    '4674':\n    'An operation was attempted on a privileged object',\n    '4675':\n    'SIDs were filtered',\n    '4688':\n    'A new process has been created',\n    '4689':\n    'A process has exited',\n    '4690':\n    'An attempt was made to duplicate a handle to an object',\n    '4691':\n    'Indirect access to an object was requested',\n    '4692':\n    'Backup of data protection master key was attempted',\n    '4693':\n    'Recovery of data protection master key was attempted',\n    '4694':\n    'Protection of auditable protected data was attempted',\n    '4695':\n    'Unprotection of auditable protected data was attempted',\n    '4696':\n    'A primary token was assigned to process',\n    '4697':\n    'A service was installed in the system',\n    '4698':\n    'A scheduled task was created',\n    '4699':\n    'A scheduled task was deleted',\n    '4700':\n    'A scheduled task was enabled',\n    '4701':\n    'A scheduled task was disabled',\n    '4702':\n    'A scheduled task was updated',\n    '4703':\n    'A token right was adjusted',\n    '4704':\n    'A user right was assigned',\n    '4705':\n    'A user right was removed',\n    '4706':\n    'A new trust was created to a domain',\n    '4707':\n    'A trust to a domain was removed',\n    '4709':\n    'IPsec Services was started',\n    '4710':\n    'IPsec Services was disabled',\n    '4711':\n    'PAStore Engine (1%)',\n    '4712':\n    'IPsec Services encountered a potentially serious failure',\n    '4713':\n    'Kerberos policy was changed',\n    '4714':\n    'Encrypted data recovery policy was changed',\n    '4715':\n    'The audit policy (SACL) on an object was changed',\n    '4716':\n    'Trusted domain information was modified',\n    '4717':\n    'System security access was granted to an account',\n    '4718':\n    'System security access was removed from an account',\n    '4719':\n    'System audit policy was changed',\n    '4720':\n    'A user account was created',\n    '4722':\n    'A user account was enabled',\n    '4723':\n    'An attempt was made to change an accounts password',\n    '4724':\n    'An attempt was made to reset an accounts password',\n    '4725':\n    'A user account was disabled',\n    '4726':\n    'A user account was deleted',\n    '4727':\n    'A security-enabled global group was created',\n    '4728':\n    'A member was added to a security-enabled global group',\n    '4729':\n    'A member was removed from a security-enabled global group',\n    '4730':\n    'A security-enabled global group was deleted',\n    '4731':\n    'A security-enabled local group was created',\n    '4732':\n    'A member was added to a security-enabled local group',\n    '4733':\n    'A member was removed from a security-enabled local group',\n    '4734':\n    'A security-enabled local group was deleted',\n    '4735':\n    'A security-enabled local group was changed',\n    '4737':\n    'A security-enabled global group was changed',\n    '4738':\n    'A user account was changed',\n    '4739':\n    'Domain Policy was changed',\n    '4740':\n    'A user account was locked out',\n    '4741':\n    'A computer account was created',\n    '4742':\n    'A computer account was changed',\n    '4743':\n    'A computer account was deleted',\n    '4744':\n    'A security-disabled local group was created',\n    '4745':\n    'A security-disabled local group was changed',\n    '4746':\n    'A member was added to a security-disabled local group',\n    '4747':\n    'A member was removed from a security-disabled local group',\n    '4748':\n    'A security-disabled local group was deleted',\n    '4749':\n    'A security-disabled global group was created',\n    '4750':\n    'A security-disabled global group was changed',\n    '4751':\n    'A member was added to a security-disabled global group',\n    '4752':\n    'A member was removed from a security-disabled global group',\n    '4753':\n    'A security-disabled global group was deleted',\n    '4754':\n    'A security-enabled universal group was created',\n    '4755':\n    'A security-enabled universal group was changed',\n    '4756':\n    'A member was added to a security-enabled universal group',\n    '4757':\n    'A member was removed from a security-enabled universal group',\n    '4758':\n    'A security-enabled universal group was deleted',\n    '4759':\n    'A security-disabled universal group was created',\n    '4760':\n    'A security-disabled universal group was changed',\n    '4761':\n    'A member was added to a security-disabled universal group',\n    '4762':\n    'A member was removed from a security-disabled universal group',\n    '4763':\n    'A security-disabled universal group was deleted',\n    '4764':\n    'A groups type was changed',\n    '4765':\n    'SID History was added to an account',\n    '4766':\n    'An attempt to add SID History to an account failed',\n    '4767':\n    'A user account was unlocked',\n    '4768':\n    'A Kerberos authentication ticket (TGT) was requested',\n    '4769':\n    'A Kerberos service ticket was requested',\n    '4770':\n    'A Kerberos service ticket was renewed',\n    '4771':\n    'Kerberos pre-authentication failed',\n    '4772':\n    'A Kerberos authentication ticket request failed',\n    '4773':\n    'A Kerberos service ticket request failed',\n    '4774':\n    'An account was mapped for logon',\n    '4775':\n    'An account could not be mapped for logon',\n    '4776':\n    'The domain controller attempted to validate the credentials for an account',\n    '4777':\n    'The domain controller failed to validate the credentials for an account',\n    '4778':\n    'A session was reconnected to a Window Station',\n    '4779':\n    'A session was disconnected from a Window Station',\n    '4780':\n    'The ACL was set on accounts which are members of administrators groups',\n    '4781':\n    'The name of an account was changed',\n    '4782':\n    'The password hash an account was accessed',\n    '4783':\n    'A basic application group was created',\n    '4784':\n    'A basic application group was changed',\n    '4785':\n    'A member was added to a basic application group',\n    '4786':\n    'A member was removed from a basic application group',\n    '4787':\n    'A non-member was added to a basic application group',\n    '4788':\n    'A non-member was removed from a basic application group..',\n    '4789':\n    'A basic application group was deleted',\n    '4790':\n    'An LDAP query group was created',\n    '4791':\n    'A basic application group was changed',\n    '4792':\n    'An LDAP query group was deleted',\n    '4793':\n    'The Password Policy Checking API was called',\n    '4794':\n    'An attempt was made to set the Directory Services Restore Mode administrator password',\n    '4797':\n    'An attempt was made to query the existence of a blank password for an account',\n    '4798':\n    'A users local group membership was enumerated.',\n    '4799':\n    'A security-enabled local group membership was enumerated',\n    '4800':\n    'The workstation was locked',\n    '4801':\n    'The workstation was unlocked',\n    '4802':\n    'The screen saver was invoked',\n    '4803':\n    'The screen saver was dismissed',\n    '4816':\n    'RPC detected an integrity violation while decrypting an incoming message',\n    '4817':\n    'Auditing settings on object were changed.',\n    '4818':\n    'Proposed Central Access Policy does not grant the same access permissions as the current Central Access Policy',\n    '4819':\n    'Central Access Policies on the machine have been changed',\n    '4820':\n    'A Kerberos Ticket-granting-ticket (TGT) was denied because the device does not meet the access control restrictions',\n    '4821':\n    'A Kerberos service ticket was denied because the user, device, or both does not meet the access control restrictions',\n    '4822':\n    'NTLM authentication failed because the account was a member of the Protected User group',\n    '4823':\n    'NTLM authentication failed because access control restrictions are required',\n    '4824':\n    'Kerberos preauthentication by using DES or RC4 failed because the account was a member of the Protected User group',\n    '4825':\n    'A user was denied the access to Remote Desktop. By default, users are allowed to connect only if they are members of the Remote Desktop Users group or Administrators group',\n    '4826':\n    'Boot Configuration Data loaded',\n    '4830':\n    'SID History was removed from an account',\n    '4864':\n    'A namespace collision was detected',\n    '4865':\n    'A trusted forest information entry was added',\n    '4866':\n    'A trusted forest information entry was removed',\n    '4867':\n    'A trusted forest information entry was modified',\n    '4868':\n    'The certificate manager denied a pending certificate request',\n    '4869':\n    'Certificate Services received a resubmitted certificate request',\n    '4870':\n    'Certificate Services revoked a certificate',\n    '4871':\n    'Certificate Services received a request to publish the certificate revocation list (CRL)',\n    '4872':\n    'Certificate Services published the certificate revocation list (CRL)',\n    '4873':\n    'A certificate request extension changed',\n    '4874':\n    'One or more certificate request attributes changed.',\n    '4875':\n    'Certificate Services received a request to shut down',\n    '4876':\n    'Certificate Services backup started',\n    '4877':\n    'Certificate Services backup completed',\n    '4878':\n    'Certificate Services restore started',\n    '4879':\n    'Certificate Services restore completed',\n    '4880':\n    'Certificate Services started',\n    '4881':\n    'Certificate Services stopped',\n    '4882':\n    'The security permissions for Certificate Services changed',\n    '4883':\n    'Certificate Services retrieved an archived key',\n    '4884':\n    'Certificate Services imported a certificate into its database',\n    '4885':\n    'The audit filter for Certificate Services changed',\n    '4886':\n    'Certificate Services received a certificate request',\n    '4887':\n    'Certificate Services approved a certificate request and issued a certificate',\n    '4888':\n    'Certificate Services denied a certificate request',\n    '4889':\n    'Certificate Services set the status of a certificate request to pending',\n    '4890':\n    'The certificate manager settings for Certificate Services changed.',\n    '4891':\n    'A configuration entry changed in Certificate Services',\n    '4892':\n    'A property of Certificate Services changed',\n    '4893':\n    'Certificate Services archived a key',\n    '4894':\n    'Certificate Services imported and archived a key',\n    '4895':\n    'Certificate Services published the CA certificate to Active Directory Domain Services',\n    '4896':\n    'One or more rows have been deleted from the certificate database',\n    '4897':\n    'Role separation enabled',\n    '4898':\n    'Certificate Services loaded a template',\n    '4899':\n    'A Certificate Services template was updated',\n    '4900':\n    'Certificate Services template security was updated',\n    '4902':\n    'The Per-user audit policy table was created',\n    '4904':\n    'An attempt was made to register a security event source',\n    '4905':\n    'An attempt was made to unregister a security event source',\n    '4906':\n    'The CrashOnAuditFail value has changed',\n    '4907':\n    'Auditing settings on object were changed',\n    '4908':\n    'Special Groups Logon table modified',\n    '4909':\n    'The local policy settings for the TBS were changed',\n    '4910':\n    'The group policy settings for the TBS were changed',\n    '4911':\n    'Resource attributes of the object were changed',\n    '4912':\n    'Per User Audit Policy was changed',\n    '4913':\n    'Central Access Policy on the object was changed',\n    '4928':\n    'An Active Directory replica source naming context was established',\n    '4929':\n    'An Active Directory replica source naming context was removed',\n    '4930':\n    'An Active Directory replica source naming context was modified',\n    '4931':\n    'An Active Directory replica destination naming context was modified',\n    '4932':\n    'Synchronization of a replica of an Active Directory naming context has begun',\n    '4933':\n    'Synchronization of a replica of an Active Directory naming context has ended',\n    '4934':\n    'Attributes of an Active Directory object were replicated',\n    '4935':\n    'Replication failure begins',\n    '4936':\n    'Replication failure ends',\n    '4937':\n    'A lingering object was removed from a replica',\n    '4944':\n    'The following policy was active when the Windows Firewall started',\n    '4945':\n    'A rule was listed when the Windows Firewall started',\n    '4946':\n    'A change has been made to Windows Firewall exception list. A rule was added',\n    '4947':\n    'A change has been made to Windows Firewall exception list. A rule was modified',\n    '4948':\n    'A change has been made to Windows Firewall exception list. A rule was deleted',\n    '4949':\n    'Windows Firewall settings were restored to the default values',\n    '4950':\n    'A Windows Firewall setting has changed',\n    '4951':\n    'A rule has been ignored because its major version number was not recognized by Windows Firewall',\n    '4952':\n    'Parts of a rule have been ignored because its minor version number was not recognized by Windows Firewall',\n    '4953':\n    'A rule has been ignored by Windows Firewall because it could not parse the rule',\n    '4954':\n    'Windows Firewall Group Policy settings has changed. The new settings have been applied',\n    '4956':\n    'Windows Firewall has changed the active profile',\n    '4957':\n    'Windows Firewall did not apply the following rule',\n    '4958':\n    'Windows Firewall did not apply the following rule because the rule referred to items not configured on this computer',\n    '4960':\n    'IPsec dropped an inbound packet that failed an integrity check',\n    '4961':\n    'IPsec dropped an inbound packet that failed a replay check',\n    '4962':\n    'IPsec dropped an inbound packet that failed a replay check',\n    '4963':\n    'IPsec dropped an inbound clear text packet that should have been secured',\n    '4964':\n    'Special groups have been assigned to a new logon',\n    '4965':\n    'IPsec received a packet from a remote computer with an incorrect Security Parameter Index (SPI).',\n    '4976':\n    'During Main Mode negotiation, IPsec received an invalid negotiation packet.',\n    '4977':\n    'During Quick Mode negotiation, IPsec received an invalid negotiation packet.',\n    '4978':\n    'During Extended Mode negotiation, IPsec received an invalid negotiation packet.',\n    '4979':\n    'IPsec Main Mode and Extended Mode security associations were established.',\n    '4980':\n    'IPsec Main Mode and Extended Mode security associations were established',\n    '4981':\n    'IPsec Main Mode and Extended Mode security associations were established',\n    '4982':\n    'IPsec Main Mode and Extended Mode security associations were established',\n    '4983':\n    'An IPsec Extended Mode negotiation failed',\n    '4984':\n    'An IPsec Extended Mode negotiation failed',\n    '4985':\n    'The state of a transaction has changed',\n    '5024':\n    'The Windows Firewall Service has started successfully',\n    '5025':\n    'The Windows Firewall Service has been stopped',\n    '5027':\n    'The Windows Firewall Service was unable to retrieve the security policy from the local storage',\n    '5028':\n    'The Windows Firewall Service was unable to parse the new security policy.',\n    '5029':\n    'The Windows Firewall Service failed to initialize the driver',\n    '5030':\n    'The Windows Firewall Service failed to start',\n    '5031':\n    'The Windows Firewall Service blocked an application from accepting incoming connections on the network.',\n    '5032':\n    'Windows Firewall was unable to notify the user that it blocked an application from accepting incoming connections on the network',\n    '5033':\n    'The Windows Firewall Driver has started successfully',\n    '5034':\n    'The Windows Firewall Driver has been stopped',\n    '5035':\n    'The Windows Firewall Driver failed to start',\n    '5037':\n    'The Windows Firewall Driver detected critical runtime error. Terminating',\n    '5038':\n    'Code integrity determined that the image hash of a file is not valid',\n    '5039':\n    'A registry key was virtualized.',\n    '5040':\n    'A change has been made to IPsec settings. An Authentication Set was added.',\n    '5041':\n    'A change has been made to IPsec settings. An Authentication Set was modified',\n    '5042':\n    'A change has been made to IPsec settings. An Authentication Set was deleted',\n    '5043':\n    'A change has been made to IPsec settings. A Connection Security Rule was added',\n    '5044':\n    'A change has been made to IPsec settings. A Connection Security Rule was modified',\n    '5045':\n    'A change has been made to IPsec settings. A Connection Security Rule was deleted',\n    '5046':\n    'A change has been made to IPsec settings. A Crypto Set was added',\n    '5047':\n    'A change has been made to IPsec settings. A Crypto Set was modified',\n    '5048':\n    'A change has been made to IPsec settings. A Crypto Set was deleted',\n    '5049':\n    'An IPsec Security Association was deleted',\n    '5050':\n    'An attempt to programmatically disable the Windows Firewall using a call to INetFwProfile.FirewallEnabled(FALSE',\n    '5051':\n    'A file was virtualized',\n    '5056':\n    'A cryptographic self test was performed',\n    '5057':\n    'A cryptographic primitive operation failed',\n    '5058':\n    'Key file operation',\n    '5059':\n    'Key migration operation',\n    '5060':\n    'Verification operation failed',\n    '5061':\n    'Cryptographic operation',\n    '5062':\n    'A kernel-mode cryptographic self test was performed',\n    '5063':\n    'A cryptographic provider operation was attempted',\n    '5064':\n    'A cryptographic context operation was attempted',\n    '5065':\n    'A cryptographic context modification was attempted',\n    '5066':\n    'A cryptographic function operation was attempted',\n    '5067':\n    'A cryptographic function modification was attempted',\n    '5068':\n    'A cryptographic function provider operation was attempted',\n    '5069':\n    'A cryptographic function property operation was attempted',\n    '5070':\n    'A cryptographic function property operation was attempted',\n    '5071':\n    'Key access denied by Microsoft key distribution service',\n    '5120':\n    'OCSP Responder Service Started',\n    '5121':\n    'OCSP Responder Service Stopped',\n    '5122':\n    'A Configuration entry changed in the OCSP Responder Service',\n    '5123':\n    'A configuration entry changed in the OCSP Responder Service',\n    '5124':\n    'A security setting was updated on OCSP Responder Service',\n    '5125':\n    'A request was submitted to OCSP Responder Service',\n    '5126':\n    'Signing Certificate was automatically updated by the OCSP Responder Service',\n    '5127':\n    'The OCSP Revocation Provider successfully updated the revocation information',\n    '5136':\n    'A directory service object was modified',\n    '5137':\n    'A directory service object was created',\n    '5138':\n    'A directory service object was undeleted',\n    '5139':\n    'A directory service object was moved',\n    '5140':\n    'A network share object was accessed',\n    '5141':\n    'A directory service object was deleted',\n    '5142':\n    'A network share object was added.',\n    '5143':\n    'A network share object was modified',\n    '5144':\n    'A network share object was deleted.',\n    '5145':\n    'A network share object was checked to see whether client can be granted desired access',\n    '5146':\n    'The Windows Filtering Platform has blocked a packet',\n    '5147':\n    'A more restrictive Windows Filtering Platform filter has blocked a packet',\n    '5148':\n    'The Windows Filtering Platform has detected a DoS attack and entered a defensive mode; packets associated with this attack will be discarded.',\n    '5149':\n    'The DoS attack has subsided and normal processing is being resumed.',\n    '5150':\n    'The Windows Filtering Platform has blocked a packet.',\n    '5151':\n    'A more restrictive Windows Filtering Platform filter has blocked a packet.',\n    '5152':\n    'The Windows Filtering Platform blocked a packet',\n    '5153':\n    'A more restrictive Windows Filtering Platform filter has blocked a packet',\n    '5154':\n    'The Windows Filtering Platform has permitted an application or service to listen on a port for incoming connections',\n    '5155':\n    'The Windows Filtering Platform has blocked an application or service from listening on a port for incoming connections',\n    '5156':\n    'The Windows Filtering Platform has allowed a connection',\n    '5157':\n    'The Windows Filtering Platform has blocked a connection',\n    '5158':\n    'The Windows Filtering Platform has permitted a bind to a local port',\n    '5159':\n    'The Windows Filtering Platform has blocked a bind to a local port',\n    '5168':\n    'Spn check for SMB/SMB2 fails.',\n    '5169':\n    'A directory service object was modified',\n    '5170':\n    'A directory service object was modified during a background cleanup task',\n    '5376':\n    'Credential Manager credentials were backed up',\n    '5377':\n    'Credential Manager credentials were restored from a backup',\n    '5378':\n    'The requested credentials delegation was disallowed by policy',\n    '5440':\n    'The following callout was present when the Windows Filtering Platform Base Filtering Engine started',\n    '5441':\n    'The following filter was present when the Windows Filtering Platform Base Filtering Engine started',\n    '5442':\n    'The following provider was present when the Windows Filtering Platform Base Filtering Engine started',\n    '5443':\n    'The following provider context was present when the Windows Filtering Platform Base Filtering Engine started',\n    '5444':\n    'The following sub-layer was present when the Windows Filtering Platform Base Filtering Engine started',\n    '5446':\n    'A Windows Filtering Platform callout has been changed',\n    '5447':\n    'A Windows Filtering Platform filter has been changed',\n    '5448':\n    'A Windows Filtering Platform provider has been changed',\n    '5449':\n    'A Windows Filtering Platform provider context has been changed',\n    '5450':\n    'A Windows Filtering Platform sub-layer has been changed',\n    '5451':\n    'An IPsec Quick Mode security association was established',\n    '5452':\n    'An IPsec Quick Mode security association ended',\n    '5453':\n    'An IPsec negotiation with a remote computer failed because the IKE and AuthIP IPsec Keying Modules (IKEEXT) service is not started',\n    '5456':\n    'PAStore Engine applied Active Directory storage IPsec policy on the computer',\n    '5457':\n    'PAStore Engine failed to apply Active Directory storage IPsec policy on the computer',\n    '5458':\n    'PAStore Engine applied locally cached copy of Active Directory storage IPsec policy on the computer',\n    '5459':\n    'PAStore Engine failed to apply locally cached copy of Active Directory storage IPsec policy on the computer',\n    '5460':\n    'PAStore Engine applied local registry storage IPsec policy on the computer',\n    '5461':\n    'PAStore Engine failed to apply local registry storage IPsec policy on the computer',\n    '5462':\n    'PAStore Engine failed to apply some rules of the active IPsec policy on the computer',\n    '5463':\n    'PAStore Engine polled for changes to the active IPsec policy and detected no changes',\n    '5464':\n    'PAStore Engine polled for changes to the active IPsec policy, detected changes, and applied them to IPsec Services',\n    '5465':\n    'PAStore Engine received a control for forced reloading of IPsec policy and processed the control successfully',\n    '5466':\n    'PAStore Engine polled for changes to the Active Directory IPsec policy, determined that Active Directory cannot be reached, and will use the cached copy of the Active Directory IPsec policy instead',\n    '5467':\n    'PAStore Engine polled for changes to the Active Directory IPsec policy, determined that Active Directory can be reached, and found no changes to the policy',\n    '5468':\n    'PAStore Engine polled for changes to the Active Directory IPsec policy, determined that Active Directory can be reached, found changes to the policy, and applied those changes',\n    '5471':\n    'PAStore Engine loaded local storage IPsec policy on the computer',\n    '5472':\n    'PAStore Engine failed to load local storage IPsec policy on the computer',\n    '5473':\n    'PAStore Engine loaded directory storage IPsec policy on the computer',\n    '5474':\n    'PAStore Engine failed to load directory storage IPsec policy on the computer',\n    '5477':\n    'PAStore Engine failed to add quick mode filter',\n    '5478':\n    'IPsec Services has started successfully',\n    '5479':\n    'IPsec Services has been shut down successfully',\n    '5480':\n    'IPsec Services failed to get the complete list of network interfaces on the computer',\n    '5483':\n    'IPsec Services failed to initialize RPC server. IPsec Services could not be started',\n    '5484':\n    'IPsec Services has experienced a critical failure and has been shut down',\n    '5485':\n    'IPsec Services failed to process some IPsec filters on a plug-and-play event for network interfaces',\n    '5632':\n    'A request was made to authenticate to a wireless network',\n    '5633':\n    'A request was made to authenticate to a wired network',\n    '5712':\n    'A Remote Procedure Call (RPC) was attempted',\n    '5888':\n    'An object in the COM+ Catalog was modified',\n    '5889':\n    'An object was deleted from the COM+ Catalog',\n    '5890':\n    'An object was added to the COM+ Catalog',\n    '6144':\n    'Security policy in the group policy objects has been applied successfully',\n    '6145':\n    'One or more errors occured while processing security policy in the group policy objects',\n    '6272':\n    'Network Policy Server granted access to a user',\n    '6273':\n    'Network Policy Server denied access to a user',\n    '6274':\n    'Network Policy Server discarded the request for a user',\n    '6275':\n    'Network Policy Server discarded the accounting request for a user',\n    '6276':\n    'Network Policy Server quarantined a user',\n    '6277':\n    'Network Policy Server granted access to a user but put it on probation because the host did not meet the defined health policy',\n    '6278':\n    'Network Policy Server granted full access to a user because the host met the defined health policy',\n    '6279':\n    'Network Policy Server locked the user account due to repeated failed authentication attempts',\n    '6280':\n    'Network Policy Server unlocked the user account',\n    '6281':\n    'Code Integrity determined that the page hashes of an image file are not valid...',\n    '6400':\n    'BranchCache: Received an incorrectly formatted response while discovering availability of content.',\n    '6401':\n    'BranchCache: Received invalid data from a peer. Data discarded.',\n    '6402':\n    'BranchCache: The message to the hosted cache offering it data is incorrectly formatted.',\n    '6403':\n    'BranchCache: The hosted cache sent an incorrectly formatted response to the clients message to offer it data.',\n    '6404':\n    'BranchCache: Hosted cache could not be authenticated using the provisioned SSL certificate.',\n    '6405':\n    'BranchCache: %2 instance(s) of event id %1 occurred.',\n    '6406':\n    '%1 registered to Windows Firewall to control filtering for the following:',\n    '6407':\n    '%1',\n    '6408':\n    'Registered product %1 failed and Windows Firewall is now controlling the filtering for %2.',\n    '6409':\n    'BranchCache: A service connection point object could not be parsed',\n    '6410':\n    'Code integrity determined that a file does not meet the security requirements to load into a process. This could be due to the use of shared sections or other issues',\n    '6416':\n    'A new external device was recognized by the system.',\n    '6417':\n    'The FIPS mode crypto selftests succeeded',\n    '6418':\n    'The FIPS mode crypto selftests failed',\n    '6419':\n    'A request was made to disable a device',\n    '6420':\n    'A device was disabled',\n    '6421':\n    'A request was made to enable a device',\n    '6422':\n    'A device was enabled',\n    '6423':\n    'The installation of this device is forbidden by system policy',\n    '6424':\n    'The installation of this device was allowed, after having previously been forbidden by policy',\n}\n\n####################### BEGIN FUNCTIONS ############################\n\n\ndef verify_file(file_location_tmp):\n    file_loc = file_location_tmp\n    file_loc = file_loc.replace(\"\\\\\\\\\", \"/\").replace(\n        \"\\\\\", \"/\").rstrip(\"/\")\n    if file_loc.count(\"/\") > 1:\n        filter_file_loc = file_loc.rstrip(\"/\")\n\n    if not os.path.exists(file_loc):\n        print(\n            \"ERROR: \\\"\" + file_loc +\n            \"\\\" cannot be found by the system.  Please verify filename and path are correct.\")\n        print(\"Exiting...\")\n        sys.exit(1)\n    else:\n        return file_loc\n\ndef query_plaso_location():\n    # This prompts user for a plaso location and confirms it exists before returning\n    # a valided file location\n    while True:\n        sys.stdout.writelines(\n            \"Please enter valid location for Plaso directory: \")\n        p_path = input()\n        # Verify files exist\n        l2t_loc = p_path.rstrip(\"/\").rstrip().strip(\"\\\"\") + \"/log2timeline.exe\"\n        p_loc = p_path.rstrip(\"/\").rstrip().strip(\"\\\"\") + \"/psort.exe\"\n        if not os.path.isfile(l2t_loc):\n            print(\"ERROR: \" + l2t_loc + \" does not exist\")\n        else:\n            if not os.path.isfile(p_loc):\n                print(\"ERROR: \" + p_loc + \" does not exist\")\n            else:\n                return l2t_loc, p_loc\n\n\n# Ask a yes/no question via input() and return their answer.\ndef query_yes_no(args, question, default=\"yes\"):\n    if args.confirmAll:\n        if default == \"yes\":\n            return True\n        else:\n            return False\n    if default == \"yes\":\n        prompt = \" [Y/n]\"\n        yes = set(['yes', 'y', 'ye', ''])\n        no = set(['no', 'n'])\n    else:\n        prompt = \" [y/N]\"\n        yes = set(['yes', 'ye', 'y'])\n        no = set(['no', 'n', ''])\n\n    while True:\n        sys.stdout.writelines(question + prompt + \": \")\n        choice = input().lower()\n        if choice in yes:\n            return True\n        elif choice in no:\n            return False\n        else:\n            sys.stdout.write(\"Please respond with 'yes' or 'no'\")\n\n\ndef status_marker(mylogfile, myproc):\n    counter = 1\n    while myproc.poll() is None:\n        if counter % 2 == 0:\n            sys.stdout.writelines(\"| Still working...\\r\")\n        else:\n            sys.stdout.writelines(\"- Still working...\\r\")\n        sys.stdout.flush()\n        counter += 1\n        time.sleep(1)\n\n    if myproc.poll() != 0:\n        print(\"ERROR: There was a problem. See log for details in log.\")\n        mylogfile.writelines(\n            \"ERROR: There was a problem. See details in log.\\n\")\n        print(\"\\nExiting.......\")\n        sys.exit(1)\n\n\ndef multi_thread_reports(mqueue, infile, terms):\n    for line in infile:\n        if terms[0].search(line, re.I):\n            mqueue.put(terms[1].writelines(\n                line.replace(\"\\n\", \" \").replace(\"\\r\", \" \") + \"\\n\"))\n    print(\"Report Created:\", terms[2])\n\n\ndef create_reports(args, mylogfile, dst_loc, csv_file, parser_opt):\n    start_dt = datetime.datetime.now()\n    print(\"Reporting started at: \" + str(start_dt))\n    mylogfile.writelines(\"Reporting started at: \" + str(start_dt) + \"\\n\")\n    # Create individual reports\n    print(\n        \"\\nCreating the individual reports (This will take a long time for large files)\"\n    )\n    mylogfile.writelines(\n        \"\\nCreating the individual reports (This will take a long time for large files)\\n\"\n    )\n    # Create report directory and file names\n    rpt_dir_name = dst_loc + \"/Reports\"\n    rpt_evt_name = rpt_dir_name + \"/Event Log Report.csv\"\n    rpt_fsfs_name = rpt_dir_name + \"/File System Report.csv\"\n    rpt_fsmft_name = rpt_dir_name + \"/MFT Report.csv\"\n    rpt_fsusnjrnl_name = rpt_dir_name + \"/UsnJrnl Report.csv\"\n    rpt_ih_name = rpt_dir_name + \"/Internet History Report.csv\"\n    rpt_pf_name = rpt_dir_name + \"/Prefetch Report.csv\"\n    rpt_appc_name = rpt_dir_name + \"/Appcompat Report.csv\"\n    rpt_reg_name = rpt_dir_name + \"/Registry Report.csv\"\n    rpt_st_name = rpt_dir_name + \"/Scheduled Tasks Report.csv\"\n    rpt_per_name = rpt_dir_name + \"/Persistence Report.csv\"\n    rpt_si_name = rpt_dir_name + \"/System Information Report.csv\"\n    rpt_av_name = rpt_dir_name + \"/AntiVirus Report.csv\"\n    rpt_fw_name = rpt_dir_name + \"/Firewall Report.csv\"\n    rpt_mac_name = rpt_dir_name + \"/Mac Report.csv\"\n    rpt_lin_name = rpt_dir_name + \"/Linux Report.csv\"\n    rpt_android_name = rpt_dir_name + \"/Android Report.csv\"\n    rpt_amcache_name = rpt_dir_name + \"/Amcache Report.csv\"\n    rpt_bash_name = rpt_dir_name + \"/Bash Report.csv\"\n\n    # RC1 search strings for each report\n    rpt_evt_search = re.compile(r'winevt')\n    rpt_fsfs_search = re.compile(r'filestat|recycle_bin|fseventsd')\n    rpt_fsmft_search = re.compile(r',mft,')\n    rpt_fsusnjrnl_search = re.compile(r',usnjrnl,')\n    rpt_ih_search = re.compile(r'chrome_cache|chrome_preferences|firefox_cache|gdrive_synclog|opera_global|opera_typed_history|sqlite/chrome_27_history|sqlite/chrome_8_history|sqlite/chrome_autofill|sqlite/chrome_cookies|sqlite/chrome_extension_activity|sqlite/firefox_cookies|sqlite/firefox_downloads|sqlite/firefox_history|sqlite/google_drive|sqlite/skype|binary_cookies|esedb/msie_webcache|plist/safari_history|xchatlog|xchatscrollback')\n    rpt_pf_search = re.compile(r'prefetch')\n    rpt_appc_search = re.compile(r'appcompatcache')\n    rpt_reg_search = re.compile(r'winreg')\n    rpt_st_search = re.compile(r'winjob|windows_task_cache|cron')\n    rpt_per_search = re.compile(\n        r'bagmru|bencode|mrulist|msie_zone|mstsc_rdp|userassist|windows_bootwindows_run|windows_sam_users|windows_services|winrar_mru'\n    )\n    rpt_si_search = re.compile(\n        r'explorer_|mac_keychain|mac_securityd|mackeeper_cache|macosx_bluetooth|macosx_install_history|mactime|macuser|macwifi|network_drives|rplog|windows_shutdown|windows_timezone|windows_usb_devices|windows_usbstor_devices|windows_version'\n    )\n    rpt_av_search = re.compile(r'mcafee_protection|symantec_scanlog|sophos_av')\n    rpt_fw_search = re.compile(r'winfirewall|mac_appfirewall_log')\n    rpt_mac_search = re.compile(\n        r'bencode|czip/oxml|dockerjson|java_idx|msiecf|olecf|pls_recall|popularity_contest|selinux|syslog|systemd_journal|utmpx|xchatlog|xchatscrollback|asl_log|bsm_log|cups_ipp|dockerjson|mac_keychain|mac_securityd|macwifi|plist|sqlite/appusage|sqlite/imessage|sqlite/ls_quarantine|sqlite/mac_document_versions|sqlite/mac_knowledgec|sqlite/mac_notes|sqlite/mackeeper_cache|systemd_journal'\n    )\n    rpt_lin_search = re.compile(\n        r'bencode|czip/oxml|dockerjson|dpkg|java_idx|msiecf|olecf|sqlite/zeitgeist|syslog|systemd_journal|utmp|xchatlog|xchatscrollback'\n    )\n    rpt_android_search = re.compile(\n        r'android_app_usage|sqlite/android_calls|sqlite/android_sms|sqlite/android_webview'\n    )\n    rpt_bash_search = re.compile(r'bash|zsh_extended_history')\n    rpt_amcache_search = re.compile(r'amcache')\n\n    # Create a list of the report names\n    if parser_opt == \"datt\":\n        lor = [\n                rpt_appc_name,\n                rpt_evt_name,\n                rpt_fsfs_name,\n                rpt_fsmft_name,\n                rpt_fsusnjrnl_name,\n                rpt_ih_name,\n                rpt_pf_name,\n                rpt_reg_name,\n                rpt_st_name,\n                rpt_per_name,\n                rpt_si_name,\n                rpt_av_name,\n                rpt_fw_name,\n                rpt_mac_name,\n                rpt_lin_name,\n                rpt_android_name,\n                rpt_amcache_name,\n                rpt_bash_name]\n    elif parser_opt == \"win\":\n        lor = [\n                rpt_appc_name,\n                rpt_evt_name,\n                rpt_fsfs_name,\n                rpt_fsmft_name,\n                rpt_fsusnjrnl_name,\n                rpt_ih_name,\n                rpt_pf_name,\n                rpt_reg_name,\n                rpt_st_name,\n                rpt_per_name,\n                rpt_si_name,\n                rpt_av_name,\n                rpt_fw_name,\n                rpt_amcache_name,\n                rpt_bash_name]\n    elif parser_opt == \"mac\":\n        lor = [\n                rpt_fsfs_name,\n                rpt_ih_name,\n                rpt_per_name,\n                rpt_si_name,\n                rpt_av_name,\n                rpt_fw_name,\n                rpt_mac_name,\n                rpt_bash_name]\n    elif parser_opt == \"android\":\n        lor = [\n                rpt_fsfs_name,\n                rpt_ih_name,\n                rpt_per_name,\n                rpt_si_name,\n                rpt_av_name,\n                rpt_fw_name,\n                rpt_android_name]\n    else:\n        lor = [\n                rpt_fsfs_name,\n                rpt_ih_name,\n                rpt_per_name,\n                rpt_si_name,\n                rpt_av_name,\n                rpt_fw_name,\n                rpt_lin_name,\n                rpt_bash_name]\n\n    # Create Report directory\n    if not os.path.isdir(rpt_dir_name):\n        os.makedirs(rpt_dir_name)\n\n    # Check if files exist\n    create_rep = True\n    all_reports_exit = True\n    existing_report_list = []\n    for rpt_name in lor:\n        if not os.path.isfile(rpt_name):\n            all_reports_exit = False\n        else:\n            existing_report_list.append(rpt_name)\n\n    if all_reports_exit:\n        if query_yes_no(\n                args,\n                \"\\nAll sub-reports already exist.  Would you like to delete these files?\",\n                \"no\"):\n            for rpt_name in lor:\n                os.remove(rpt_name)\n        else:\n            return\n\n    # Create list of file handles + search terms based on the parser option selected\n    if parser_opt == \"datt\":\n        # Open all report files for writing\n        rpt_appc = open(rpt_appc_name, 'a+', encoding='utf-8')\n        rpt_evt = open(rpt_evt_name, 'a+', encoding='utf-8')\n        rpt_fsfs = open(rpt_fsfs_name, 'a+', encoding='utf-8')\n        rpt_fsmft = open(rpt_fsmft_name, 'a+', encoding='utf-8')\n        rpt_fsusnjrnl = open(rpt_fsusnjrnl_name, 'a+', encoding='utf-8')\n        rpt_ih = open(rpt_ih_name, 'a+', encoding='utf-8')\n        rpt_pf = open(rpt_pf_name, 'a+', encoding='utf-8')\n        rpt_reg = open(rpt_reg_name, 'a+', encoding='utf-8')\n        rpt_st = open(rpt_st_name, 'a+', encoding='utf-8')\n        rpt_per = open(rpt_per_name, 'a+', encoding='utf-8')\n        rpt_si = open(rpt_si_name, 'a+', encoding='utf-8')\n        rpt_av = open(rpt_av_name, 'a+', encoding='utf-8')\n        rpt_fw = open(rpt_fw_name, 'a+', encoding='utf-8')\n        rpt_mac = open(rpt_mac_name, 'a+', encoding='utf-8')\n        rpt_lin = open(rpt_lin_name, 'a+', encoding='utf-8')\n        rpt_android = open(rpt_android_name, 'a+', encoding='utf-8')\n        rpt_amcache = open(rpt_amcache_name, 'a+', encoding='utf-8')\n        rpt_bash = open(rpt_bash_name, 'a+', encoding='utf-8')\n        lofh = [\n                [rpt_appc_search, rpt_appc, rpt_appc_name],\n                [rpt_evt_search, rpt_evt, rpt_evt_name],\n                [rpt_fsfs_search, rpt_fsfs, rpt_fsfs_name],\n                [rpt_fsmft_search, rpt_fsmft, rpt_fsmft_name],\n                [rpt_fsusnjrnl_search, rpt_fsusnjrnl, rpt_fsusnjrnl_name],\n                [rpt_ih_search, rpt_ih, rpt_ih_name],\n                [rpt_pf_search, rpt_pf, rpt_pf_name],\n                [rpt_reg_search, rpt_reg, rpt_reg_name],\n                [rpt_st_search, rpt_st, rpt_st_name],\n                [rpt_per_search, rpt_per, rpt_per_name],\n                [rpt_si_search, rpt_si, rpt_si_name],\n                [rpt_av_search, rpt_av, rpt_av_name],\n                [rpt_fw_search, rpt_fw, rpt_fw_name],\n                [rpt_mac_search, rpt_mac, rpt_mac_name],\n                [rpt_lin_search, rpt_lin, rpt_lin_name],\n                [rpt_android_search, rpt_android, rpt_android_name],\n                [rpt_amcache_search, rpt_amcache, rpt_amcache_name],\n                [rpt_bash_search, rpt_bash, rpt_bash_name]]\n    elif parser_opt == \"android\":\n        # Open all report files for writing\n        # Open Linux report files for writing\n        rpt_fsfs = open(rpt_fsfs_name, 'a+', encoding='utf-8')\n        rpt_ih = open(rpt_ih_name, 'a+', encoding='utf-8')\n        rpt_per = open(rpt_per_name, 'a+', encoding='utf-8')\n        rpt_si = open(rpt_si_name, 'a+', encoding='utf-8')\n        rpt_av = open(rpt_av_name, 'a+', encoding='utf-8')\n        rpt_fw = open(rpt_fw_name, 'a+', encoding='utf-8')\n        rpt_android = open(rpt_android_name, 'a+', encoding='utf-8')\n        lofh = [\n                [rpt_fsfs_search, rpt_fsfs, rpt_fsfs_name],\n                [rpt_ih_search, rpt_ih, rpt_ih_name],\n                [rpt_per_search, rpt_per, rpt_per_name],\n                [rpt_si_search, rpt_si, rpt_si_name],\n                [rpt_av_search, rpt_av, rpt_av_name],\n                [rpt_fw_search, rpt_fw, rpt_fw_name],\n                [rpt_android_search, rpt_android, rpt_android_name]]\n    elif parser_opt == \"win\":\n        # Open windows report files for writing\n        rpt_appc = open(rpt_appc_name, 'a+', encoding='utf-8')\n        rpt_evt = open(rpt_evt_name, 'a+', encoding='utf-8')\n        rpt_fsfs = open(rpt_fsfs_name, 'a+', encoding='utf-8')\n        rpt_fsmft = open(rpt_fsmft_name, 'a+', encoding='utf-8')\n        rpt_fsusnjrnl = open(rpt_fsusnjrnl_name, 'a+', encoding='utf-8')\n        rpt_ih = open(rpt_ih_name, 'a+', encoding='utf-8')\n        rpt_pf = open(rpt_pf_name, 'a+', encoding='utf-8')\n        rpt_reg = open(rpt_reg_name, 'a+', encoding='utf-8')\n        rpt_st = open(rpt_st_name, 'a+', encoding='utf-8')\n        rpt_per = open(rpt_per_name, 'a+', encoding='utf-8')\n        rpt_si = open(rpt_si_name, 'a+', encoding='utf-8')\n        rpt_av = open(rpt_av_name, 'a+', encoding='utf-8')\n        rpt_fw = open(rpt_fw_name, 'a+', encoding='utf-8')\n        rpt_amcache = open(rpt_amcache_name, 'a+', encoding='utf-8')\n        rpt_bash = open(rpt_bash_name, 'a+', encoding='utf-8')\n        lofh = [\n                [rpt_appc_search, rpt_appc, rpt_appc_name],\n                [rpt_evt_search, rpt_evt, rpt_evt_name],\n                [rpt_fsfs_search, rpt_fsfs, rpt_fsfs_name],\n                [rpt_fsmft_search, rpt_fsmft, rpt_fsmft_name],\n                [rpt_fsusnjrnl_search, rpt_fsusnjrnl, rpt_fsusnjrnl_name],\n                [rpt_ih_search, rpt_ih, rpt_ih_name],\n                [rpt_pf_search, rpt_pf, rpt_pf_name],\n                [rpt_reg_search, rpt_reg, rpt_reg_name],\n                [rpt_st_search, rpt_st, rpt_st_name],\n                [rpt_per_search, rpt_per, rpt_per_name],\n                [rpt_si_search, rpt_si, rpt_si_name],\n                [rpt_av_search, rpt_av, rpt_av_name],\n                [rpt_fw_search, rpt_fw, rpt_fw_name],\n                [rpt_amcache_search, rpt_amcache, rpt_amcache_name],\n                [rpt_bash_search, rpt_bash, rpt_bash_name]]\n    elif parser_opt == \"mac\":\n        # Open Mac report files for writing\n        rpt_fsfs = open(rpt_fsfs_name, 'a+', encoding='utf-8')\n        rpt_ih = open(rpt_ih_name, 'a+', encoding='utf-8')\n        rpt_per = open(rpt_per_name, 'a+', encoding='utf-8')\n        rpt_si = open(rpt_si_name, 'a+', encoding='utf-8')\n        rpt_av = open(rpt_av_name, 'a+', encoding='utf-8')\n        rpt_fw = open(rpt_fw_name, 'a+', encoding='utf-8')\n        rpt_mac = open(rpt_mac_name, 'a+', encoding='utf-8')\n        rpt_bash = open(rpt_bash_name, 'a+', encoding='utf-8')\n\n        lofh = [\n                [rpt_fsfs_search, rpt_fsfs, rpt_fsfs_name],\n                [rpt_ih_search, rpt_ih, rpt_ih_name],\n                [rpt_per_search, rpt_per, rpt_per_name],\n                [rpt_si_search, rpt_si, rpt_si_name],\n                [rpt_av_search, rpt_av, rpt_av_name],\n                [rpt_fw_search, rpt_fw, rpt_fw_name],\n                [rpt_mac_search, rpt_mac, rpt_mac_name],\n                [rpt_bash_search, rpt_bash, rpt_bash_name]]\n    else:\n        # Open Linux report files for writing\n        rpt_fsfs = open(rpt_fsfs_name, 'a+', encoding='utf-8')\n        rpt_ih = open(rpt_ih_name, 'a+', encoding='utf-8')\n        rpt_per = open(rpt_per_name, 'a+', encoding='utf-8')\n        rpt_si = open(rpt_si_name, 'a+', encoding='utf-8')\n        rpt_av = open(rpt_av_name, 'a+', encoding='utf-8')\n        rpt_fw = open(rpt_fw_name, 'a+', encoding='utf-8')\n        rpt_lin = open(rpt_lin_name, 'a+', encoding='utf-8')\n        rpt_bash = open(rpt_bash_name, 'a+', encoding='utf-8')\n        lofh = [\n                [rpt_fsfs_search, rpt_fsfs, rpt_fsfs_name],\n                [rpt_ih_search, rpt_ih, rpt_ih_name],\n                [rpt_per_search, rpt_per, rpt_per_name],\n                [rpt_si_search, rpt_si, rpt_si_name],\n                [rpt_av_search, rpt_av, rpt_av_name],\n                [rpt_fw_search, rpt_fw, rpt_fw_name],\n                [rpt_lin_search, rpt_lin, rpt_lin_name],\n                [rpt_bash_search, rpt_bash, rpt_bash_name]]\n\n    # Write the header line in each new report file\n    for item in lofh:\n        if os.stat(item[2]).st_size == 0:\n            item[1].writelines(\n                \"date,time,timezone,MACB,source,sourcetype,type,user,host,short,desc,version,filename,inode,notes,format,extra\\n\"\n            )\n\n    if not os.path.isfile(csv_file):\n        print(\"File not found\", csv_file)\n        mylogfile.writelines(\"File not found \" + csv_file + \"\\n\")\n        sys.exit(1)\n\n    # Run each search for each report (in parallel) and write the results to the report CSV files\n    counter = 1\n    counter2 = True\n    mqueue = queue.Queue()\n    #Open file and read to memory\n    SuperTimeline_file = io.open(csv_file, 'r', encoding='utf-8').readlines()\n    # Create all threads to start\n    threads = []\n    for terms in lofh:\n        threads.append(\n            threading.Thread(\n                target=multi_thread_reports,\n                args=(mqueue, SuperTimeline_file, terms)))\n\n    [t.start() for t in threads]\n    [t.join() for t in threads]\n\n    # Close all report files\n    for item in lofh:\n        item[1].close()\n\n    # Removing files with no output\n    final_lor = []\n    final_lor_nodata = []\n    for i_filename in lor:\n        if os.stat(i_filename).st_size == 111:\n            os.remove(i_filename)\n            final_lor_nodata.append(i_filename)\n        else:\n            final_lor.append(i_filename)\n\n    # Print report not created messages\n    print(\"\\nDid not keep \" + str(len(final_lor_nodata)) +\n          \" Reports due to no matching data from SuperTimeline\")\n    mylogfile.writelines(\n        \"\\nDid not keep \" + str(len(final_lor_nodata)) +\n        \" Reports due to no matching data from SuperTimeline\\n\")\n    for item in final_lor_nodata:\n        print(\"Report not kept: \" + item)\n        mylogfile.writelines(\"Report not kept:\" + item + \"\\n\")\n\n    # Print report created messages\n    print(\"\\nCreated \" + str(len(final_lor)) + \" Reports.  Now improving them\")\n    mylogfile.writelines(\"\\nCreated \" + str(len(final_lor)) + \" Reports.\")\n\n    # Function to improve reports (in parallel)\n    print(\n        \"Improving Reports if possible (This will take a long time for large files)\"\n    )\n    mqueue.put(\n        mylogfile.writelines(\n            \"Improving Reports if possible (This will take a long time for large files)\"\n            + \"\\n\"))\n    report_improvements(lor, mylogfile)\n\n    print(\"\\nAll reporting complete\")\n    mylogfile.writelines(\"\\nAll reporting complete\\n\")\n    end_dt = datetime.datetime.now()\n    duration02 = end_dt - start_dt\n    print(\"Reporting ended at: \" + str(end_dt))\n    print(\"Reporting duration was: \" + str(duration02))\n    mylogfile.writelines(\"Reporting ended at: \" + str(end_dt) + \"\\n\")\n    mylogfile.writelines(\"Reporting duration was: \" + str(duration02) + \"\\n\")\n    return\n\n\ndef plaso_version(log2timeline_location):\n    newout = subprocess.check_output(\n        [log2timeline_location, \"--version\"],\n        stderr=subprocess.STDOUT).decode(\"utf-8\")\n    pver_out = \".\".join(str(newout).split(\" \")[-1].split(\".\")[0:2]).rstrip(\n        \"\\\\n\\'\").rstrip(\"\\\\r\").strip()\n    return (pver_out)\n\n\ndef output_elasticsearch(mylogfile, srcfilename, casename, psort_location,\n                         server, port, user, logname):\n    # Run psort against plaso db file to output to an ElasticSearch server running on the localhost\n    print(\"Exporting results in Kibana format to the ElasticSearch server\")\n    mylogfile.writelines(\n        \"Exporting results in Kibana format to the ElasticSearch server\\n\")\n\n    # Create psort command to run\n    command = [\n        psort_location, \"-o\", \"elastic\", \"--status_view\", \"none\",\n        \"--index_name\", \"case_cdqr-\" + casename.lower(), \"--logfile\", logname, \"--server\", server,\n        \"--port\", port, srcfilename\n    ]\n    if user != \"\":\n        command.append(\"--elastic_user\")\n        command.append(user)\n\n    print(\"\\\"\" + \"\\\" \\\"\".join(command) + \"\\\"\")\n    mylogfile.writelines(\"\\\"\" + \"\\\" \\\"\".join(command) + \"\\\"\" + \"\\n\")\n\n    # Execute Command\n    status_marker(mylogfile,\n                  subprocess.Popen(\n                      command, stdout=mylogfile, stderr=mylogfile))\n\n    print(\"All entries have been inserted into database with case: \" +\n          \"case_cdqr-\" + casename.lower())\n    mylogfile.writelines(\n        \"All entries have been inserted into database with case: \" +\n        \"case_cdqr-\" + casename.lower() + \"\\n\")\n\n\ndef output_elasticsearch_ts(mylogfile, srcfilename, casename, psort_location, logname):\n    # Run psort against plaso db file to output to an ElasticSearch server running on the localhost\n    print(\"Exporting results in TimeSketch format to the ElasticSearch server\")\n    mylogfile.writelines(\n        \"Exporting results in TimeSketch format to the ElasticSearch server\\n\")\n\n    # Create command to run\n    command = [\n        psort_location, \"-o\", \"timesketch\", \"--status_view\", \"none\",\n        \"--logfile\", logname, \"--name\", casename.lower(),\n        \"--index\", casename.lower(), srcfilename\n    ]\n\n    print(\"\\\"\" + \"\\\" \\\"\".join(command) + \"\\\"\")\n    mylogfile.writelines(\"\\\"\" + \"\\\" \\\"\".join(command) + \"\\\"\" + \"\\n\")\n\n    # Execute Command\n    status_marker(mylogfile,\n                  subprocess.Popen(\n                      command, stdout=mylogfile, stderr=mylogfile))\n\n    print(\"All entries have been inserted into TimeSketch database with case: \"\n          + casename.lower())\n    mylogfile.writelines(\n        \"All entries have been inserted into TimeSketch database with case: \" +\n        casename.lower() + \"\\n\")\n\n\ndef zip_source(inputfile, outputzip):\n    try:\n        with zipfile.ZipFile(outputzip, \"w\") as zip_ref:\n            zip_ref.write(inputfile, compress_type=compression)\n        return\n    except Exception as e:\n        print(\"Unable to compress file: \" + inputfile)\n        print(e)\n        sys.exit(1)\n\n\ndef unzip_source(src_loc_tmp, outputzipfolder):\n    try:\n        with zipfile.ZipFile(src_loc_tmp, \"r\") as zip_ref:\n            if sys.platform[0:3] == \"win\":\n                zip_ref.extractall(u'\\\\\\\\?\\\\' +\n                                   os.path.abspath(outputzipfolder))\n            else:\n                zip_ref.extractall(os.path.abspath(outputzipfolder))\n        return outputzipfolder\n    except Exception as e:\n        print(\"Unable to extract file: \" + src_loc_tmp)\n        print(e)\n        sys.exit(1)\n\n\ndef create_export(dst_loc, srcfilename, mylogfile, db_file, psort_location, logname):\n    # Create Output filenames\n    dstrawfilename = dst_loc + \"/\" + srcfilename.split(\"/\")[-1] + \".json\"\n    dstfilename = dst_loc + \"/\" + srcfilename.split(\"/\")[-1] + \".json.zip\"\n    if os.path.exists(dstfilename):\n        if query_yes_no(\n                args, \"\\n\" + dstfilename +\n                \" already exists.  Would you like to delete that file?\", \"yes\"):\n            os.remove(dstfilename)\n\n    # Run psort against plaso db file to output a file in line delimited json format\n    print(\"Creating json line delimited file\")\n    mylogfile.writelines(\"Creating json line delimited file\\n\")\n\n    # Create command to run\n    command = [\n        psort_location, \"-o\", \"json_line\", \"--status_view\", \"none\", db_file,\n        \"--logfile\", logname, \"-w\", dstrawfilename\n    ]\n\n    print(\"\\\"\" + \"\\\" \\\"\".join(command) + \"\\\"\")\n    mylogfile.writelines(\"\\\"\" + \"\\\" \\\"\".join(command) + \"\\\"\" + \"\\n\")\n\n    # Execute Command\n    status_marker(mylogfile,\n                  subprocess.Popen(\n                      command, stdout=mylogfile, stderr=mylogfile))\n\n    print(\"Json line delimited file created\")\n    mylogfile.writelines(\"Json line delimited file created\" + \"\\n\")\n\n    return dstfilename\n\ndef get_parser_list(parser_opt, plaso_ver, args):\n    parserlist = parse_optionslatest[parser_opt]\n    if args.parser:\n        parser_opt = args.parser[0]\n    if parser_opt == \"win\":\n        if args.mft:\n             parserlist = parserlist + \",mft\"\n        if args.usnjrnl:\n            parserlist = parserlist + \",usnjrnl\"\n\n    return parserlist\n\n###################### REPORT FIXING SECTION ###############################\n\ndef prefetch_report_fix(row):\n    header_desc_rows = report_header_dict['Prefetch Report.csv'][0][0]\n    header_extra_rows = report_header_dict['Prefetch Report.csv'][1][0]\n\n    if row[5] == \"WinPrefetch\":\n        search_desc = re.compile(\n            r'Prefetch \\[(.{1,200})\\](.{1,20}) - run count (\\d{1,10})( (path): (.{1,200})|) (hash): (.{1,15}) (volume): (\\d{1,10}) \\[(serial number): (.{1,20})  (device path): (.+)\\]'\n        )\n        search_extra = re.compile(\n            r'(md5_hash): (.{1,100})  (number_of_volumes): (\\d{1,10})  (version): (\\d{1,10})  (volume_device_paths): \\[u.(.{1,100}).\\]  (volume_serial_numbers): \\[(.+)\\]'\n        )\n    else:\n        search_desc = re.compile(\n            r'(.{1,200}) (Serial number): (.{1,15}) (Origin): (.+)')\n        search_extra = re.compile(r'(md5_hash): (.+) ')\n\n    search_results_desc = re.search(search_desc, row[header_desc_rows])\n\n    if row[5] == \"WinPrefetch\":\n        if search_results_desc:\n            if search_results_desc.group(4) == '':\n                row[header_desc_rows] = search_results_desc.group(\n                    1) + \",\" + search_results_desc.group(\n                        3) + \",,\" + search_results_desc.group(\n                            8) + \",\" + search_results_desc.group(\n                                10) + \",\" + search_results_desc.group(\n                                    12) + \",\" + search_results_desc.group(\n                                        14) + \",\"\n            else:\n                row[header_desc_rows] = search_results_desc.group(\n                    1) + \",\" + search_results_desc.group(\n                        3) + \",\" + search_results_desc.group(\n                            6) + \",\" + search_results_desc.group(\n                                8) + \",\" + search_results_desc.group(\n                                    10) + \",\" + search_results_desc.group(\n                                        12) + \",\" + search_results_desc.group(\n                                            14) + \",\"\n\n        search_results_extra = re.search(\n            search_extra, row[header_extra_rows]\n        )  # 'md5_hash','number_of_volumes','version','volume_device_paths','volume_serial_numbers'\n        if search_results_extra:\n            row[header_extra_rows] = search_results_extra.group(\n                2) + \",\" + search_results_extra.group(\n                    4) + \",\" + search_results_extra.group(\n                        6) + \",\" + search_results_extra.group(\n                            8) + \",\" + search_results_extra.group(10)\n    else:\n        if search_results_desc:\n            row[header_desc_rows] = \",,,,\" + search_results_desc.group(\n                1) + \",\" + search_results_desc.group(\n                    3) + \",,\" + search_results_desc.group(5)\n\n        search_results_extra = re.search(search_extra, row[header_extra_rows])\n        if search_results_extra:\n            row[header_extra_rows] = search_results_extra.group(2) + \",,,,\"\n\n    row[12] = row[12].replace('OS:', '')\n    return row\n\n\ndef appcompat_report_fix(row):\n    header_desc_rows = report_header_dict['Appcompat Report.csv'][0][0]\n    search_desc = re.compile(\n        r'\\[(.{1,100})\\] (Cached entry): (\\d+) (Path): (.+)')\n\n    header_extra_rows = report_header_dict['Appcompat Report.csv'][1][0]\n    search_extra = re.compile(r'(md5_hash): (.{1,50})')\n    search_results_desc = re.search(search_desc, row[header_desc_rows])\n    if search_results_desc:\n        row[header_desc_rows] = search_results_desc.group(\n            1) + \",\" + search_results_desc.group(\n                3) + \",\" + search_results_desc.group(\n                    5) + \",\" + search_results_desc.group(5).split('\\\\')[-1]\n\n    search_results_extra = re.search(search_extra, row[header_extra_rows])\n    if search_results_extra:\n        row[header_extra_rows] = search_results_extra.group(2).strip()\n\n    row[12] = row[12].replace('OS:', '')\n    return row\n\n\ndef event_log_report_fix(\n        row\n):  #'Event Log Report.csv':[[10,['event_id','EID_desc','record_number','event_level','source_name','computer_name','message']]\n    header_desc_rows = report_header_dict['Event Log Report.csv'][0][0]\n    header_extra_rows = report_header_dict['Event Log Report.csv'][1][0]\n    if row[4] == \"EVT\":\n        search_desc = re.compile(\n            r'\\[(.{1,8}) /.{1,100} (Record Number): (.{1,10}) (Event Level): (.{1,10}) (Source Name): (.{1,300}) (Computer Name): (.{1,100}) (Strings|Message string): (\\[(.+)\\]|.+)'\n        )\n        search_extra = re.compile(\n            r'(md5_hash): (.{1,50}) (message_identifier): (.{1,20}) (recovered): (True|False)  (strings_parsed): ({}  (user_sid): (.{1,75}) (xml_string): (.+)|.+)'\n        )\n\n        search_results_desc = re.search(search_desc, row[header_desc_rows])\n        if search_results_desc:\n            try:\n                eventlog_string = eventlog_dict[search_results_desc.group(1)]\n            except:\n                eventlog_string = \"\"\n            row[header_desc_rows] = search_results_desc.group(\n                1) + \",\" + eventlog_string + \",\" + search_results_desc.group(\n                    3) + \",\" + search_results_desc.group(\n                        5) + \",\" + search_results_desc.group(\n                            7) + \",\" + search_results_desc.group(9) + \",\" + (\n                                (str(search_results_desc.group(12))).replace(\n                                    \"\\r\", \" \")).replace(\"\\n\", \" \")\n        search_results_extra = re.search(search_extra, row[header_extra_rows])\n        if search_results_extra:\n            row[header_extra_rows] = search_results_extra.group(\n                2) + \",\" + search_results_extra.group(\n                    4) + \",\" + search_results_extra.group(\n                        6) + \",\" + search_results_extra.group(8) + \",\" + str(\n                            search_results_extra.group(10)) + \",\" + (\n                                (str(search_results_extra.group(12))).replace(\n                                    \"\\r\", \" \")).replace(\"\\n\", \" \")\n    else:\n        if row[header_desc_rows] != \"desc\":\n            row[header_desc_rows] = \",,,,,,\"\n            row[header_extra_rows] = \",,,,,\"\n    row[12] = row[12].replace('OS:', '')\n    return row\n\n\ndef scheduled_tasks_report_fix(row):\n    header_desc_rows = report_header_dict['Scheduled Tasks Report.csv'][0][0]\n    search_desc = re.compile(\n        r'(\\[(.{1,200})\\] (Task): (.{1,200}): \\[(ID): \\{(.{1,100})\\}\\]|(Task): (.{1,200}) \\[(Identifier): \\{(.{1,100})\\}\\])'\n    )\n\n    header_extra_rows = report_header_dict['Scheduled Tasks Report.csv'][1][0]\n    search_extra = re.compile(r'(md5_hash): (.+) ')\n\n    search_results_desc = re.search(search_desc, row[header_desc_rows])\n    if search_results_desc:\n        if search_results_desc.group(1)[0:4] == \"Task\":\n            row[header_desc_rows] = \",\" + search_results_desc.group(\n                8) + \",\" + search_results_desc.group(10)\n        else:\n            row[header_desc_rows] = search_results_desc.group(\n                2) + \",\" + search_results_desc.group(\n                    4) + \",\" + search_results_desc.group(6)\n\n    search_results_extra = re.search(search_extra, row[header_extra_rows])\n    if search_results_extra:\n        row[header_extra_rows] = search_results_extra.group(2)\n\n    return row\n\n\ndef file_system_report_fix(row):\n    if row[0] is not \"\" and row[0] is not \"--\":\n        header_desc_rows = report_header_dict['File System Report.csv'][0][0]\n        FS_search_desc = re.compile(r'(..):(.{1,500})(Type):(.{1,100})')\n\n        header_extra_rows = report_header_dict['File System Report.csv'][1][0]\n        FS_search_extra = re.compile(\n            r'(file_size): \\((.{1,50}) \\)  (file_system_type): (.{1,20})  (is_allocated): (True|False)(  (md5_hash): (.+) |)'\n        )\n\n        search_results_desc = re.search(FS_search_desc, row[header_desc_rows])\n        if search_results_desc:\n            row[header_desc_rows] = search_results_desc.group(\n                2) + \",\" + search_results_desc.group(4)\n        search_results_extra = re.search(FS_search_extra,\n                                         row[header_extra_rows])\n\n        if search_results_extra:\n            if search_results_extra.group(7) != '':\n                row[header_extra_rows] = search_results_extra.group(\n                    2) + \",\" + search_results_extra.group(\n                        4) + \",\" + search_results_extra.group(\n                            6) + \",\" + search_results_extra.group(9)\n            else:\n                row[header_extra_rows] = search_results_extra.group(\n                    2) + \",\" + search_results_extra.group(\n                        4) + \",\" + search_results_extra.group(6) + \",\"\n        return row\n    else:\n        return [\n            \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\",\n            \"\", \"\", \"\", \"\"\n        ]\n\n\ndef mft_report_fix(row):\n    header_desc_rows = report_header_dict['MFT Report.csv'][0][0]\n    header_extra_rows = report_header_dict['MFT Report.csv'][1][0]\n\n    if row[4] == \"FILE\":\n        search_desc = re.compile(\n            r'(.{1,100}) (File reference): (.{1,100}) (Attribute name): (\\$STANDARD_INFORMATION|\\$FILE_NAME)( |)((Name): (.{1,200}) (Parent file reference): (.+)|(\\((unallocated)|))'\n        )\n        search_extra = re.compile(\n            r'(attribute_type): (.{1,20}) (file_attribute_flags): (.{1,20}) (file_system_type): (.{1,20}) (is_allocated): (True|False)  (md5_hash): (.+) '\n        )\n    else:\n        search_desc = re.compile(\n            r'((.{1,100}) (MAC address): (.{1,20}) (Origin): (.+))')\n        search_extra = re.compile(r'(md5_hash): (.+) ')\n\n    search_results_desc = re.search(search_desc, row[header_desc_rows])\n\n    if row[4] == \"FILE\":\n        if search_results_desc:\n            if search_results_desc.group(5) == \"$FILE_NAME\":\n                row[header_desc_rows] = search_results_desc.group(\n                    3) + \",\" + search_results_desc.group(\n                        5) + \",\" + search_results_desc.group(\n                            9) + \",\" + search_results_desc.group(11).rstrip(\n                                r\" (unallocated)\") + \",\"\n            else:\n                row[header_desc_rows] = search_results_desc.group(\n                    3) + \",\" + search_results_desc.group(5) + \",,,\"\n\n        search_results_extra = re.search(search_extra, row[header_extra_rows])\n        if search_results_extra:\n            row[header_extra_rows] = search_results_extra.group(\n                2) + \",\" + search_results_extra.group(\n                    4) + \",\" + search_results_extra.group(\n                        6) + \",\" + search_results_extra.group(\n                            8) + \",\" + search_results_extra.group(10)\n    else:\n        if search_results_desc:\n            row[header_desc_rows] = \",,,,\" + search_results_desc.group(1)\n\n        search_results_extra = re.search(search_extra, row[header_extra_rows])\n        if search_results_extra:\n            row[header_extra_rows] = search_results_extra.group(2)\n\n    row[12] = row[12].replace('OS:', '')\n\n    return row\n\n\ndef fix_line(row, report_name):\n    if report_name == 'File System Report.csv':\n        del row[9]\n        del row[10]\n        del row[11]\n        del row[11]\n        del row[10]\n    elif report_name == 'Scheduled Tasks Report.csv':\n        del row[9]\n        del row[12]\n        del row[12]\n        del row[11]\n    elif report_name == 'Event Log Report.csv':\n        del row[9]\n        del row[12]\n        del row[12]\n        del row[10]\n    elif report_name == 'Appcompat Report.csv':\n        del row[3]\n        del row[3]\n        del row[3]\n        del row[4]\n        del row[4]\n        del row[4]\n        del row[5]\n        del row[5]\n        del row[5]\n        del row[5]\n    elif report_name == 'MFT Report.csv':\n        del row[9]\n        del row[12]\n        del row[12]\n        del row[10]\n    elif report_name == 'Prefetch Report.csv':\n        del row[9]\n        del row[12]\n        del row[12]\n        del row[10]\n    return row\n\n\n# Report Dictionary (by OS)\n\nreport_header_dict = {\n    'Appcompat Report.csv':\n    [[10, ['source', 'cached_entry_order', 'full_path', 'filename']],\n     [16, ['md5_hash']], appcompat_report_fix],\n    'Event Log Report.csv': [[\n        10, [\n            'event_id', 'EID_desc', 'record_number', 'event_level',\n            'source_name', 'computer_name', 'message'\n        ]\n    ], [\n        16, [\n            'md5_hash', 'message_id', 'recovered', 'strings_parsed',\n            'user_sid', 'xml_string'\n        ]\n    ], event_log_report_fix],\n    'File System Report.csv': [[10, ['filename', 'Type']], [\n        16, ['file_size', 'file_system_type', 'is_allocated', 'md5_hash']\n    ], file_system_report_fix],\n    'MFT Report.csv': [[\n        10, [\n            'File_reference', 'Attribute_name', 'Name',\n            'Parent_file_reference', 'Log_info'\n        ]\n    ], [\n        16, [\n            'attribute_type', 'file_attribute_flags', 'file_system_type',\n            'is_allocated', 'md5_hash'\n        ]\n    ], mft_report_fix],\n    #    'UsnJrnl Report.csv':[],\n    #    'Internet History Report.csv':[],\n    'Prefetch Report.csv': [[\n        10, [\n            'File_name', 'Run_count', 'path', 'hash', 'volume',\n            'Serial number', 'Device_path', 'Origin'\n        ]\n    ], [\n        16, [\n            'md5_hash', 'number_of_volumes', 'version', 'volume_device_paths',\n            'volume_serial_numbers'\n        ]\n    ], prefetch_report_fix],\n    #    'Registry Report.csv':[],\n    'Scheduled Tasks Report.csv': [[10, ['key', 'task', 'identification']],\n                                   [16,\n                                    ['md5_hash']], scheduled_tasks_report_fix],\n    #    'Persistence Report.csv':[],\n    #    'System Information Report.csv':[],\n    #    'AntiVirus Report.csv':[],\n    #    'Firewall Report.csv':[],\n    #    'Login Report.csv':[]\n}\n\n\n# Report Improvement Multi-threading\ndef multi_thread_report_improve(mqueue, mylogfile, report, report_name,\n                                tmp_report_name):\n    output_list = []\n    #mqueue.put(terms[1].writelines(line.replace(\"\\n\",\" \").replace(\"\\r\",\" \")+\"\\n\"))\n    with io.open(report, 'r', encoding='utf-8') as csvfile:\n        for trow in csvfile:\n            row = trow.split(',')\n            output_list.append((report_header_dict[report_name][2](row)))\n        # Print Report to file\n        newreport = open(tmp_report_name, 'w', encoding='utf-8')\n        for line in output_list:\n            if line[10] == 'desc':\n                for thing in report_header_dict[report_name]:\n                    if isinstance(thing, list):\n                        line[thing[0]] = ','.join(thing[1])\n            mqueue.put(\n                newreport.writelines(\n                    ','.join(fix_line(line, report_name)).replace(\n                        \"\\n\", \" \").replace(\"\\r\", \" \") + \"\\n\"))\n        newreport.close()\n\n        if os.stat(tmp_report_name).st_size != 0:\n            mqueue.put(shutil.copyfile(tmp_report_name, report))\n            mqueue.put(os.remove(tmp_report_name))\n        print(str(report_name) + \":    Complete\")\n        mqueue.put(\n            mylogfile.writelines(str(report_name) + \":    Complete\" + \"\\n\"))\n    return\n\n\n# Report Improvements Function\ndef report_improvements(lor, mylogfile):\n    mqueue = queue.Queue()\n    threads = []\n    for report in lor:\n        lonf = []\n        report_name = report.split('/')[-1]\n        tmp_report_name = os.path.dirname(\n            report) + \"/tmp_\" + report_name + \".csv\"\n        if tmp_report_name[0] == '/':\n            tmp_report_name = tmp_report_name[1:]\n        if report_name in report_header_dict:\n            if os.path.exists(report):\n                lonf.append([report, report_name, tmp_report_name])\n        for nfile in lonf:\n            threads.append(\n                threading.Thread(\n                    target=multi_thread_report_improve,\n                    args=(mqueue, mylogfile, nfile[0], nfile[1], nfile[2])))\n\n    [t.start() for t in threads]\n    [t.join() for t in threads]\n    return\n\n\n# This processes the image using parser option selected and creates .plaso file\ndef parse_the_things(args, mylogfile, command1, db_file, unzipped_file,\n                     unzipped_file_loc, csv_file):\n    # Check if the database and supertimeline files already exists and ask to keep or delete them if they do\n    if os.path.isfile(db_file):\n        if query_yes_no(\n                args, \"\\n\" + db_file +\n                \" already exists.  Would you like to delete this file?\", \"no\"):\n            print(\"Removing the existing file: \" + db_file)\n            mylogfile.writelines(\"Removing the existing file: \" + db_file +\n                                 \"\\n\")\n            os.remove(db_file)\n            if os.path.isfile(csv_file):\n                print(\"Removing the existing file: \" + csv_file)\n                mylogfile.writelines(\"Removing the existing file: \" + csv_file\n                                     + \"\\n\")\n                os.remove(csv_file)\n                rpt_dir_name = dst_loc + \"/Reports\"\n                if os.path.isdir(rpt_dir_name):\n                    print(\"Removing the existing report directory: \" +\n                          rpt_dir_name)\n                    mylogfile.writelines(\n                        \"Removing the existing report directory: \" +\n                        rpt_dir_name + \"\\n\")\n                    if sys.platform[0:3] == \"win\":\n                        shutil.rmtree(u'\\\\\\\\?\\\\' +\n                                      os.path.abspath(rpt_dir_name))\n                    else:\n                        shutil.rmtree(rpt_dir_name)\n        else:\n            print(\"Keeping the existing file: \" + db_file)\n            mylogfile.writelines(\"Keeping the existing file: \" + db_file)\n#            return\n\n    # Process image with log2timeline\n    start_dt = datetime.datetime.now()\n    print(\"Processing started at: \" + str(start_dt))\n    mylogfile.writelines(\"Processing started at: \" + str(start_dt) + \"\\n\")\n    print(\"Parsing image\")\n    mylogfile.writelines(\"Parsing image\" + \"\\n\")\n    print(\"\\\"\" + \"\\\" \\\"\".join(command1) + \"\\\"\")\n    mylogfile.writelines(\"\\\"\" + \"\\\" \\\"\".join(command1) + \"\\\"\" + \"\\n\")\n    ######################  Log2timeline Command Execute  ##########################\n    status_marker(mylogfile,\n                  subprocess.Popen(\n                      command1, stdout=mylogfile, stderr=mylogfile))\n\n    end_dt = datetime.datetime.now()\n    duration01 = end_dt - start_dt\n    print(\"Parsing ended at: \" + str(end_dt))\n    mylogfile.writelines(\"Parsing ended at: \" + str(end_dt) + \"\\n\")\n    print(\"Parsing duration was: \" + str(duration01))\n    mylogfile.writelines(\"Parsing duration was: \" + str(duration01) + \"\\n\")\n    # Removing uncompressed file(s)\n    if unzipped_file:\n        print(\"\\nRemoving uncompressed files in directory: \" +\n              unzipped_file_loc)\n        mylogfile.writelines(\"\\nRemoving uncompressed files in directory: \" +\n                             unzipped_file_loc + \"\\n\")\n        if sys.platform[0:3] == \"win\":\n            shutil.rmtree(u'\\\\\\\\?\\\\' + os.path.abspath(unzipped_file_loc))\n        else:\n            shutil.rmtree(unzipped_file_loc)\n\n    return\n\n\ndef create_supertimeline(args, mylogfile, csv_file, psort_location, db_file, logname):\n    # This processes the .plaso file creates the SuperTimeline\n    if os.path.isfile(csv_file):\n        if query_yes_no(\n                args, \"\\n\" + csv_file +\n                \" already exists.  Would you like to delete this file?\", \"no\"):\n            print(\"Removing the existing file: \" + csv_file)\n            mylogfile.writelines(\"Removing the existing file: \" + csv_file +\n                                 \"\\n\")\n            os.remove(csv_file)\n            rpt_dir_name = dst_loc + \"/Reports\"\n            if os.path.isdir(rpt_dir_name):\n                print(\"Removing the existing report directory: \" +\n                      rpt_dir_name)\n                mylogfile.writelines(\"Removing the existing file: \" +\n                                     rpt_dir_name + \"\\n\")\n        else:\n            print(\"Keeping the existing file: \" + csv_file)\n            mylogfile.writelines(\"Keeping the existing file: \" + csv_file)\n            return\n    command2 = [\n        psort_location, \"-o\", \"l2tcsv\", \"--status_view\", \"none\", db_file,\n        \"--logfile\", logname, \"-w\", csv_file\n    ]\n    # Create SuperTimeline\n    print(\"\\nCreating the SuperTimeline CSV file\")\n    mylogfile.writelines(\"\\nCreating the SuperTimeline CSV file\" + \"\\n\")\n    print(\"\\\"\" + \"\\\" \\\"\".join(command2) + \"\\\"\")\n    mylogfile.writelines(\"\\\"\" + \"\\\" \\\"\".join(command2) + \"\\\"\" + \"\\n\")\n    ######################  Psort Command Execute  ##########################\n    status_marker(mylogfile,\n                  subprocess.Popen(\n                      command2, stdout=mylogfile, stderr=mylogfile))\n    print(\"SuperTimeline CSV file is created\")\n    mylogfile.writelines(\"SuperTimeline CSV file is created\\n\")\n    return\n\n\ndef get_es_info(args):\n    casename = \"default\"\n    user = \"\"\n    server = \"127.0.0.1\"\n    port = \"9200\"\n\n    if args.es_kb:\n        casename = args.es_kb[0]\n    if args.es_kb_user:\n        user = args.es_kb_user[0]\n    if args.es_kb_server:\n        server = args.es_kb_server[0]\n    if args.es_kb_port:\n        port = args.es_kb_port[0]\n\n    return casename, server, port, user\n\n\ndef get_ts_es_info(args):\n    casename = \"default\"\n\n    if args.es_ts:\n        casename = args.es_ts[0]\n    return casename\n\n\ndef export_to_elasticsearch(mylogfile, args, db_file, psort_location, logname):\n    start_dt = datetime.datetime.now()\n    print(\"\\nProcess to export to ElasticSearch started\")\n    mylogfile.writelines(\"\\nProcess to export to ElasticSearch started\" + \"\\n\")\n    if args.es_kb:\n        casename, server, port, user = get_es_info(args)\n        output_elasticsearch(mylogfile, db_file, casename, psort_location,\n                             server, port, user, logname)\n    else:\n        casename = get_ts_es_info(args)\n        output_elasticsearch_ts(mylogfile, db_file, casename, psort_location, logname)\n    end_dt = datetime.datetime.now()\n    duration03 = end_dt - start_dt\n    print(\"\\nProcess to export to ElasticSearch completed\")\n    mylogfile.writelines(\"\\nProcess to export to ElasticSearch completed\" +\n                         \"\\n\")\n    print(\"ElasticSearch export process duration was: \" + str(duration03))\n    mylogfile.writelines(\"ElasticSearch export process duration was: \" +\n                         str(duration03) + \"\\n\")\n    return\n\n\ndef export_to_json(dst_loc, srcfilename, mylogfile, db_file, psort_location, logname):\n    # Export Data (if selected)\n    print(\"\\nProcess to create export document started\")\n    mylogfile.writelines(\"\\nProcess to create export document started\" + \"\\n\")\n    # Create the file for export\n    exportfname = create_export(dst_loc, srcfilename, mylogfile, db_file,\n                                psort_location, logname)\n    print(\"Process to create export document complete\")\n    mylogfile.writelines(\"Process to create export document complete\" + \"\\n\")\n\n    end_dt = datetime.datetime.now()\n    duration03 = end_dt - start_dt\n    print(\"Creating export document process duration was: \" + str(duration03))\n    mylogfile.writelines(\"Creating export document process duration was: \" +\n                         str(duration03) + \"\\n\")\n    return\n\n\ndef unzip_files(dst_loc, src_loc):\n    unzipped_file_loc = dst_loc + \"/artifacts/\" + src_loc.split(\"/\")[-1][:-4]\n    print(\"Attempting to extract source file: \" + src_loc)\n    src_loc = unzip_source(src_loc, unzipped_file_loc)\n    print(\"All files extracted to folder: \" + src_loc)\n    return src_loc\n\n\n####################### END FUNCTIONS ############################\n\n\n##################  EXECTUTION SECTION ############################\ndef main():\n    # Default Parser option\n    default_parser = \"win\"\n    unzipped_file = False\n    unzipped_file_loc = \"\"\n\n    # Plaso Program Locations (default)\n    if sys.platform[0:3] == \"win\":\n        log2timeline_location = r\"plaso\\log2timeline.exe\"\n        psort_location = r\"plaso\\psort.exe\"\n    else:\n        log2timeline_location = r\"log2timeline.py\"\n        psort_location = r\"psort.py\"\n\n    # Parsing begins\n    parser_list = [\"win\", \"mft_usnjrnl\", \"lin\", \"mac\", \"android\",\"datt\"]\n\n    parser = argparse.ArgumentParser(\n        description='Cold Disk Quick Response Tool (CDQR)')\n    parser.add_argument(\n        'src_location',\n        nargs=1,\n        help='Source File location: Y:/Case/Tag009/sample.E01')\n    parser.add_argument(\n        'dst_location',\n        nargs='?',\n        default='Results',\n        help=\n        'Destination Folder location. If nothing is supplied then the default is \\'Results\\''\n    )\n    parser.add_argument(\n        '-p',\n        '--parser',\n        nargs=1,\n        help=\n        'Choose parser to use.  If nothing chosen then \\'win\\' is used.  The parsing options are: '\n        + ', '.join(parser_list))\n    parser.add_argument(\n        '--nohash',\n        action='store_true',\n        default=False,\n        help=\n        'Do not hash all the files as part of the processing of the image')\n    parser.add_argument(\n        '--mft',\n        action='store_true',\n        default=False,\n        help=\n        'Process the MFT file (disabled by default except for DATT)')\n    parser.add_argument(\n        '--usnjrnl',\n        action='store_true',\n        default=False,\n        help=\n        'Process the USNJRNL file (disabled by default except for DATT)')\n    parser.add_argument(\n        '--max_cpu',\n        action='store_true',\n        default=False,\n        help='Use the maximum number of cpu cores to process the image')\n    parser.add_argument(\n        '--export',\n        action='store_true',\n        help='Creates line delimited json export file')\n    parser.add_argument(\n        '--artifact_filters',\n        nargs=1,\n        help='Plaso passthrough: Names of forensic artifact definitions, \\\n            provided on the command command line (comma separated). Forensic \\\n            artifacts are stored in .yaml files that are directly \\\n            pulled from the artifact definitions project. You can \\\n            also specify a custom artifacts yaml file (see \\\n            --custom_artifact_definitions). Artifact definitions \\\n            can be used to describe and quickly collect data of \\\n            interest, such as specific files or Windows Registry \\\n            keys.')\n    parser.add_argument(\n        '--artifact_filters_file',\n        nargs=1,\n        help='Plaso passthrough: Names of forensic artifact definitions, \\\n            provided in a file with one artifact name per line. Forensic \\\n            artifacts are stored in .yaml files that are directly \\\n            pulled from the artifact definitions project. You can \\\n            also specify a custom artifacts yaml file (see \\\n            --custom_artifact_definitions). Artifact definitions \\\n            can be used to describe and quickly collect data of \\\n            interest, such as specific files or Windows Registry \\\n            keys.')\n    parser.add_argument(\n        '--artifact_definitions',\n        nargs=1,\n        help='Plaso passthrough: Path to a directory containing artifact \\\n            definitions, which are .yaml files. Artifact definitions can \\\n            be used to describe and quickly collect data of interest, \\\n            such as specific files or Windows Registry keys.')\n    parser.add_argument(\n        '--custom_artifact_definitions',\n        nargs=1,\n        help='Plaso passthrough: Path to a file containing custom artifact \\\n        definitions, which are .yaml files. Artifact definitions can be \\\n        used to describe and quickly collect data of interest, \\\n        such as specific files or Windows Registry keys.')\n    parser.add_argument(\n        '--file_filter',\n        '-f',\n        nargs=1,\n        help='Plaso passthrough: List of files to include for targeted \\\n         collection of files to parse, one line per file path, setup is \\\n        /path|file - where each element can contain either a \\\n        variable set in the preprocessing stage or a regular \\\n        expression.')\n    parser.add_argument(\n        '--es_kb',\n        nargs=1,\n        help=\n        'Outputs Kibana format to elasticsearch database. Requires index name. Example: \\'--es_kb my_index\\''\n    )\n    parser.add_argument(\n        '--es_kb_server',\n        nargs=1,\n        help=\n        'Kibana Format Only: Exports to remote (default is 127.0.0.1) elasticsearch database. Requires Server name or IP address Example: \\'--es_kb_server myserver.elk.go\\' or \\'--es_kb_server 192.168.1.10\\''\n    )\n    parser.add_argument(\n        '--es_kb_port',\n        nargs=1,\n        help=\n        'Kibana Format Only: Port (default is 9200) for remote elasticsearch database. Requires port number Example: \\'--es_kb_port 9200 \\''\n    )\n    parser.add_argument(\n        '--es_kb_user',\n        nargs=1,\n        help=\n        'Kibana Format Only: Username (default is none) for remote elasticsearch database. Requires port number Example: \\'--es_kb_user skadi \\''\n    )\n    parser.add_argument(\n        '--es_ts',\n        nargs=1,\n        help=\n        'Outputs TimeSketch format to elasticsearch database. Requires index/timesketch name. Example: \\'--es_ts my_name\\''\n    )\n    parser.add_argument(\n        '--plaso_db',\n        action='store_true',\n        default=False,\n        help='Process an existing Plaso DB file. Example: artifacts.plaso')\n    parser.add_argument(\n        '-z',\n        action='store_true',\n        default=False,\n        help=\n        'Indicates the input file is a zip file and needs to be decompressed')\n    parser.add_argument(\n        '--no_dependencies_check',\n        action='store_false',\n        default=True,\n        help=\n        'Re-enables the log2timeline the dependencies check. It is skipped by default'\n    )\n    parser.add_argument(\n        '--process_archives',\n        action='store_true',\n        default=False,\n        help=\n        'Extract and inspect contents of archives found inside of artifacts or disk images'\n    )\n    parser.add_argument(\n        '-v', '--version', action='version', version=cdqr_version)\n    parser.add_argument(\n        '-y',\n        action=\"store_true\",\n        default=False,\n        dest='confirmAll',\n        help='Accepts all defaults on prompted questions in the program.')\n    args = parser.parse_args()\n\n    # List to help with logging\n    log_list = [cdqr_version + \"\\n\"]\n    print(cdqr_version)\n\n    # Parsing the input from the command line and building log2timeline command\n    if args:\n        # Validate log2timeline.exe and psort.exe locations\n        if sys.platform[0:3] == \"win\":\n            if not os.path.isfile(log2timeline_location):\n                log2timeline_location, psort_location = query_plaso_location()\n            # Default log2timeline command\n        command1 = [\n            log2timeline_location, \"--partition\", \"all\", \"--vss_stores\", \"all\",\n            \"--status_view\", \"linear\"\n        ]\n\n        # Do not process archives unless enabled\n        if args.process_archives:\n            command1.append(\"--process_archives\")\n\n    # Set log2timeline parsing option(s)\n        if args.parser:\n            if args.parser[0] not in parser_list:\n                print(\"ERROR: \\\"\" + args.parser[0] +\n                      \"\\\" is not a valid parser selection.\")\n                print(\"ERROR: Valid parser options are: \" +\n                      ', '.join(parser_list))\n                print(\"ERROR: Please verify your command and try again.\")\n                print(\"Exiting...\")\n                sys.exit(1)\n            parser_opt = args.parser[0]\n            if parser_opt == \"lin\" or parser_opt == \"mac\":\n                command1 = [\n                    log2timeline_location, \"--partition\", \"all\",\n                    \"--status_view\", \"none\"\n                ]\n        else:\n            # Set Default parser\n            parser_opt = default_parser\n\n    # Determine if Plaso version is compatible\n        p_ver = plaso_version(log2timeline_location)\n        print(\"Plaso Version: \" + p_ver)\n        log_list.append(\"Plaso Version: \" + p_ver + \"\\n\")\n\n        plaso_ver = plaso_version(log2timeline_location)\n\n    # Determine if Export is being used and option is valid\n        if args.export:\n            print(\"Export data option selected\")\n            log_list.append(\"Export data option selected\\n\")\n        # add parsing options to the command\n        command1.append(\"--parsers\")\n        command1.append(get_parser_list(parser_opt, plaso_ver, args))\n        print(\"Using parser: \" + parser_opt)\n        log_list.append(\"Using parser: \" + parser_opt + \"\\n\")\n\n        # Set Hashing variable\n        if args.nohash:\n            command1.append(\"--hashers\")\n            command1.append(\"none\")\n        else:\n            command1.append(\"--hashers\")\n            command1.append(\"md5\")\n\n    # Set Number of CPU cores to use\n        if args.max_cpu:\n            num_cpus = multiprocessing.cpu_count()\n        else:\n            num_cpus = multiprocessing.cpu_count() - 3\n            if num_cpus <= 0:\n                num_cpus = 1\n        command1.append(\"--workers\")\n        command1.append(str(num_cpus))\n        print(\"Number of cpu cores to use: \" + str(num_cpus))\n        log_list.append(\"Number of cpu cores to use: \" + str(num_cpus) + \"\\n\")\n\n        # Set filter file location\n        if args.file_filter:\n            filter_file_loc = verify_file(args.file_filter[0])\n            command1.append(\"--file_filter\")\n            command1.append(filter_file_loc)\n            print(\"Filter file used: \" + filter_file_loc)\n            log_list.append(\"Filter file used: \" + filter_file_loc)\n\n        # Set custom artifact definitions file location\n        if args.custom_artifact_definitions:\n            custom_artifact_definitions_file = verify_file(args.custom_artifact_definitions[0])\n            command1.append(\"--custom_artifact_definitions\")\n            command1.append(custom_artifact_definitions_file)\n            print(\"Custom Artifact Definition file used: \" + custom_artifact_definitions_file)\n            log_list.append(\"Custom Artifact Definition file used: \" + custom_artifact_definitions_file)\n\n        # Set artifact definitions file location\n        if args.artifact_definitions:\n            artifact_definitions_file = verify_file(args.artifact_definitions[0])\n            command1.append(\"--artifact_definitions\")\n            command1.append(artifact_definitions_file)\n            print(\"Artifact Definition file used: \" + artifact_definitions_file)\n            log_list.append(\"Artifact Definition file used: \" + artifact_definitions_file)\n\n        # Set artifact filters file location\n        if args.artifact_filters_file:\n            artifact_filters_file = verify_file(args.artifact_filters_file[0])\n            command1.append(\"--artifact_filters_file\")\n            command1.append(artifact_filters_file)\n            print(\"Artifact Definition file used: \" + artifact_filters_file)\n            log_list.append(\"Artifact Definition file used: \" + artifact_filters_file)\n\n        # Set artifact filters\n        if args.artifact_filters:\n            artifact_filters = args.artifact_filters[0]\n            command1.append(\"--artifact_filters\")\n            command1.append(artifact_filters)\n            print(\"Artifact Definitions used: \" + artifact_filters)\n            log_list.append(\"Artifact Definitions used: \" + artifact_filters)\n\n    # Set source location/file\n        src_loc = verify_file(args.src_location[0])\n\n    # Set destination location/file\n        dst_loc = args.dst_location.replace(\"\\\\\\\\\",\n                                            \"/\").replace(\"\\\\\", \"/\").rstrip(\"/\")\n        if os.path.exists(dst_loc):\n            if not query_yes_no(\n                    args, \"\\n\" + dst_loc +\n                    \" already exists.  Would you like to use that directory anyway?\",\n                    \"yes\"):\n                dst_loc = dst_loc + \"_\" + datetime.datetime.now().strftime(\n                    \"%d-%b-%y_%H-%M-%S\")\n                os.makedirs(dst_loc)\n        else:\n            os.makedirs(dst_loc)\n\n        print(\"Destination Folder: \" + dst_loc)\n        log_list.append(\"Destination Folder: \" + dst_loc + \"\\n\")\n\n        if args.z:\n            unzipped_file = True\n            src_loc = unzip_files(dst_loc, src_loc)\n            unzipped_file_loc = dst_loc + \"/artifacts/\"\n        elif src_loc[-4:].lower() == \".zip\":\n            if query_yes_no(\n                    args, \"\\n\" + src_loc +\n                    \" appears to be a zip file.  Would you like CDQR to unzip it and process the contents?\",\n                    \"yes\"):\n                unzipped_file = True\n                src_loc = unzip_files(dst_loc, src_loc)\n                unzipped_file_loc = dst_loc + \"/artifacts/\"\n\n        print(\"Source data: \" + src_loc)\n        log_list.append(\"Source data: \" + src_loc + \"\\n\")\n\n    if args.plaso_db:\n        db_file = dst_loc + \"/\" + src_loc\n    else:\n        db_file = dst_loc + \"/\" + src_loc.split(\"/\")[-1] + \".plaso\"\n\n    # Create DB, CSV and Log Filenames\n    csv_file = dst_loc + \"/\" + src_loc.split(\"/\")[-1] + \".SuperTimeline.csv\"\n    logname = dst_loc + \"/\" + src_loc.split(\"/\")[-1] + \"_log2timeline.gz\"\n    logfilename = dst_loc + \"/\" + src_loc.split(\"/\")[-1] + \".log\"\n\n    # Check to see if it's a mounted drive and update filename if so\n    if db_file == dst_loc + \"/.plaso\" or db_file[-7:] == \":.plaso\":\n        db_file = dst_loc + \"/\" + \"mounted_image.plaso\"\n        csv_file = dst_loc + \"/\" + \"mounted_image.SuperTimeline.csv\"\n        logname = dst_loc + \"/\" + \"mounted_image.gz\"\n        logfilename = logname + \".log\"\n\n    command1.append(\"--logfile\")\n    command1.append(logname)\n\n    print(\"Log File: \" + logfilename)\n    print(\"Database File: \" + db_file)\n\n    log_list.append(\"Log File: \" + logfilename + \"\\n\")\n    log_list.append(\"Database File: \" + db_file + \"\\n\")\n\n    # Todo only print this if not using ES or TS\n    if args.es_kb is None and args.es_ts is None:\n        print(\"SuperTimeline CSV File: \" + csv_file)\n        log_list.append(\"SuperTimeline CSV File: \" + csv_file + \"\\n\")\n\n    command1.append(db_file)\n    command1.append(src_loc)\n\n    if args.no_dependencies_check:\n        command1.append(\"--no_dependencies_check\")\n\n    if os.path.isfile(logfilename):\n        os.remove(logfilename)\n\n    if os.path.isfile(logname):\n        os.remove(logname)\n\n    mylogfile = open(logfilename, 'w')\n    mylogfile.writelines(\"\".join(log_list))\n\n    start_dt = datetime.datetime.now()\n    print(\"\\nStart time was: \" + str(start_dt))\n    mylogfile.writelines(\"\\nStart time  was: \" + str(start_dt) + \"\\n\")\n\n    # If this is plaso database file, skip parsing\n    if args.plaso_db:\n        print(\n            \"WARNING: File must be plaso database file otherwise it will not work.  Example: artifact.plaso (from CDQR)\"\n        )\n        mylogfile.writelines(\n            \"\\nWARNING: File must be plaso database file otherwise it will not work.  Example: artifact.plaso (from CDQR)\"\n            + \"\\n\")\n        db_file = src_loc\n    else:\n        parse_the_things(args, mylogfile, command1, db_file, unzipped_file,\n                         unzipped_file_loc, csv_file)\n\n    logname = dst_loc + \"/\" + src_loc.split(\"/\")[-1] + \"_psort.gz\"\n    if args.export:\n        export_to_json(dst_loc, src_loc, mylogfile, db_file, psort_location, logname)\n    elif args.es_kb or args.es_ts:\n        export_to_elasticsearch(mylogfile, args, db_file, psort_location, logname)\n    else:\n        create_supertimeline(args, mylogfile, csv_file, psort_location,\n                             db_file, logname)\n        create_reports(args, mylogfile, dst_loc, csv_file, parser_opt)\n\n    end_dt = datetime.datetime.now()\n    duration_full = end_dt - start_dt\n    print(\"\\nTotal duration was: \" + str(duration_full))\n    mylogfile.writelines(\"\\nTotal duration was: \" + str(duration_full) + \"\\n\")\n    mylogfile.close()\n\n\nif __name__ == \"__main__\":\n    main()\n"
  }
]