[
  {
    "path": ".github/issue_template.md",
    "content": "Please provide the following details.\n\n### Host System\n\n- OS :\n- Python version (`python --version`) :\n- Pip version (`pip --version`) :\n- Output of `pip freeze` : [Upload the output to GitHub gists and provide link]\n\n### Error Description\n\nPlease provide the details of the error. Try to provide the **output** and also **steps to reproduce** if required.\n"
  },
  {
    "path": ".gitignore",
    "content": "__pycache__/\nplugins/__pycache__/\n*.pyc\nlib/*.pyc\nplugins/*.pyc\nbelatiFiles/*\nlogs/*\n.directory\nlib/.directory\nplugins/.directory\nproxy.txt\nbelati.conf\nbeta/*\n#remove comment after 0.2.3-dev\nweb/db.sqlite3\n"
  },
  {
    "path": ".gitmodules",
    "content": "[submodule \"CheckMyUsername\"]\n        path = lib/CheckMyUsername\n        url = https://github.com/aancw/CheckMyUsername\n"
  },
  {
    "path": "Belati.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n\n# We need to check Dependency first\nfrom plugins.dep_check import DepCheck\n\ndep_check = DepCheck()\ndep_check.check_dependency()\n\nimport argparse\nimport datetime\nimport urllib2\nimport sys, signal, socket, re\nimport time\nimport dns.resolver\nimport tldextract\nimport shlex, subprocess\n\nfrom plugins.about_project import AboutProject\nfrom plugins.banner_grab import BannerGrab\nfrom plugins.check_domain import CheckDomain\nfrom plugins.config import Config\nfrom plugins.common_service_check import CommonServiceCheck\nfrom plugins.database import Database\nfrom plugins.gather_company import GatherCompany\nfrom plugins.git_finder import GitFinder\nfrom plugins.harvest_email import HarvestEmail\nfrom plugins.harvest_public_document import HarvestPublicDocument\nfrom plugins.json_beautifier import JsonBeautifier\nfrom plugins.logger import Logger\nfrom plugins.meta_exif_extractor import MetaExifExtractor\nfrom plugins.robots_scraper import RobotsScraper\nfrom plugins.scan_nmap import ScanNmap\nfrom plugins.subdomain_enum import SubdomainEnum\nfrom plugins.svn_finder import SVNFinder\nfrom plugins.updater import Updater\nfrom plugins.url_request import URLRequest\nfrom plugins.util import Util\nfrom plugins.wappalyzer import Wappalyzer\n\nfrom lib.CheckMyUsername.check_my_username import CheckMyUsername\nfrom dnsknife.scanner import Scanner\nfrom urlparse import urlparse\n\nfrom cmd2 import Cmd\nfrom tabulate import tabulate\nfrom texttable import Texttable\n\n# Console color\nG = '\\033[92m'  # green\nY = '\\033[93m'  # yellow\nB = '\\033[94m'  # blue\nR = '\\033[91m'  # red\nW = '\\033[0m'   # white\nUNDERLINE = '\\033[4m'\nENDC = '\\033[0m'\n\nlog = Logger()\nutil = Util()\n\nclass Belati(Cmd):\n    def __init__(self):\n        self.about = AboutProject()\n        self.url_req = URLRequest()\n\n        Cmd.doc_header = \"Core Commands\"\n        Cmd.prompt = \"{}belati{} > \".format(UNDERLINE, ENDC)\n        Cmd.path_complete\n\n        Cmd.__init__(self)\n\n        self.list_parameter = ['domain', 'username', 'email', 'orgcomp', 'proxy', 'proxy_file']\n        self.parameters = {}\n        self.multiple_proxy_list = []\n        self.current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')\n\n        self.show_banner()\n        self.conf = Config()\n        self.db = Database()\n\n\n    def show_banner(self):\n        banner = \"\"\"\n        {}\n\n         /$$$$$$$  /$$$$$$$$ /$$        /$$$$$$  /$$$$$$$$     .\n        | $$__  $$| $$_____/| $$       /$$__  $$|__  $$__/    J:L\n        | $$  \\ $$| $$      | $$      | $$  \\ $$   | $$       |:|\n        | $$$$$$$ | $$$$$   | $$      | $$$$$$$$   | $$       |:|\n        | $$__  $$| $$__/   | $$      | $$__  $$   | $$       |:|\n        | $$  \\ $$| $$      | $$      | $$  | $$   | $$       |:|\n        | $$$$$$$/| $$$$$$$$| $$$$$$$$| $$  | $$   | $$   /]  |:|  [\\ \n        |_______/ |________/|________/|__/  |__/   |__/   \\:-'\\\"\"\"'-:/\n                                                            \"\"III\"\"\n                                                              III\n                                                              III\n                                                              III\n                                                             (___)\n\n                                The Traditional Swiss Army Knife for OSINT\n\n        =[ {} {} by {}]=\n\n        + -- --=[ {} ]=-- -- +\n        + -- --=[ {} ]=-- -- +\n        {}\n        \"\"\"\n\n        warning_message = \"\"\"\n        {}\n        This tool is for educational purposes only.\n        Any damage you make will not affect the author.\n        Do It With Your Own Risk!\n\n        For Better Privacy, Please Use proxychains or other proxy service!\n        {}\n        \"\"\"\n\n        log.console_log(banner.format(G, self.about.__name__, self.about.__version__, self.about.__author__, self.about.__info__, self.about.__authorurl__, W))\n        log.console_log(warning_message.format(R, W))\n\n    def do_help(self, line):\n    \t'print help message'\n\n    \tprint(\"\\nCore commands\")\n    \tprint(\"==============\\n\")\n    \tprint tabulate([[\"Name\",\"Description\"],\n    \t\t[\"?\", \"Help menu\"],\n    \t\t[\"!\", \"Run OS Command\"],\n    \t\t[\"history\", \"Show command history\"],\n    \t\t[\"set\", \"Set parameters option value\"],\n    \t\t[\"show\", \"Display list available parameter option\"],\n    \t\t[\"start\", \"Start Automatic Scanning Belati\"],\n    \t\t[\"startws\", \"Start Web Server Only Mode\"],\n    \t\t[\"version\", \"Show application version number\"],\n    \t\t[\"quit\", \"Exit the application\"]],\n\t\t\theaders=\"firstrow\")\n\n\n    def do_set(self, arg, opts=None):\n    \t'''Set Variable for Belati parameters.\\nUsage: set [option] [value]\\n\\nAvailable options:\\ndomain, username, email, orgcomp, proxy, proxy_file'''\n\n    \tif not arg:\n    \t\tlog.console_log('{} Set Variable for Belati parameters.\\nUsage: set [option] [value]\\n\\nAvailable options:\\ndomain, username, email, orgcomp, proxy, proxy_file {}'.format(W, W))\n    \telse:\n            param = shlex.split(arg)\n            key = param[0]\n            value = param[1]\n            if key in self.list_parameter:\n                self.parameters[key] = value\n                log.console_log('{} => {}'.format(self.parameters[key], value))\n            else:\n                log.console_log(\"Available parameters: domain, username, email, orgcomp, proxy, proxy_file\")\n\n    def do_show(self, arg, opts=None):\n    \t'Show available parameter options'\n    \t\n    \tdomain_val = self.parameters['domain'] if 'domain' in self.parameters else None\n    \torgcomp = self.parameters['orgcomp'] if 'orgcomp' in self.parameters else None\n     \temail = self.parameters['email'] if 'email' in self.parameters else None\n     \tusername = self.parameters['username'] if 'username' in self.parameters else None\n     \tproxy = self.parameters['proxy'] if 'proxy' in self.parameters else None\n     \tproxy_file = self.parameters['proxy_file'] if 'proxy_file' in self.parameters else None\n     \torg_val = \"\"\n    \targ = shlex.split(arg)\n\n    \tif not arg:\n    \t\tprint(\"Please use command 'show options' to see list of option parameters\")\n\n    \telif arg[0] == \"options\":\n            print tabulate([[\"Name\",\"Value\", \"Required\", \"Description\"],\n    \t\t\t[\"domain\", domain_val, \"Yes\", \"Domain name for OSINT\"],\n    \t\t\t[\"orgcomp\", orgcomp, \"Yes\", \"Organization/Company name for OSINT\"],\n    \t\t\t[\"email\", email, \"Optional\", \"Email address for OSINT\"],\n    \t\t\t[\"username\", username, \"Optional\", \"Username for OSINT\"],\n    \t\t\t[\"proxy\", proxy, \"Optional\", \"Proxy server(e.g http://127.0.0.1:8080)\"],\n    \t\t\t[\"proxy_file\", proxy_file, \"Optional\", \"Proxy file list location\"]],\n    \t\t\theaders=\"firstrow\")\n\n    def do_startws(self, line):\n    \t'Start Belati in Web Server Only Mode'\n    \t\n    \tlog.console_log(\"{}[*] Entering Web Server Only Mode...{}\".format(Y,W))\n    \tself.start_web_server()\n    \tsys.exit()\n\n    def do_version(self, line):\n    \t'Check current Belati version'\n    \t\n    \tlog.console_log('{} {} by {}\\n'.format(self.about.__name__, self.about.__version__, self.about.__author__))\n    \tlog.console_log('Project URL: {}'.format(self.about.__giturl__))\n\n    def do_start(self, line):\n    \t'Start automatic scanning'\n    \tdomain = self.parameters['domain'] if 'domain' in self.parameters else None\n    \torgcomp = self.parameters['orgcomp'] if 'orgcomp' in self.parameters else None\n     \temail = self.parameters['email'] if 'email' in self.parameters else None\n     \tusername = self.parameters['username'] if 'username' in self.parameters else None\n     \tproxy = self.parameters['proxy'] if 'proxy' in self.parameters else ''\n     \tproxy_file = self.parameters['proxy_file'] if 'proxy_file' in self.parameters else ''\n\n    \tif domain is None and orgcomp is None:\n    \t\tlog.console_log(\"{}[-] Please specify domain/organization {}\".format(R,W))\n    \t\tsys.exit()\n\t\t\t\n\t\tlog.console_log(\"{}[*] Starting at: {} {}\".format(Y, self.current_time , W))\n\n        self.updater = Updater()\n        self.updater.check_update(self.about.__version__)\n\n        # Setup project\n        self.project_id = self.db.create_new_project(domain, orgcomp, self.current_time)\n        log.console_log(\"{}[+] Creating New Belati Project... {}\".format(G, W))\n        log.console_log(\"---------------------------------------------------------\")\n        log.console_log(\"Project ID: {}\".format(str(self.project_id)))\n        log.console_log(\"Project Domain: {}\".format(domain))\n        log.console_log(\"Project Organization/Company: {}\".format(orgcomp))\n        log.console_log(\"---------------------------------------------------------\")\n\n        if domain is not None:\n            if proxy is not '':\n                log.console_log(\"{}[*] Checking Proxy Status... {}\".format(G, W))\n                if self.check_single_proxy_status(proxy, \"http://\" + str(domain)) == 'ok':\n                    pass\n                else:\n                    log.console_log('{}[-] Please use another proxy or disable proxy! {}'.format(R, W))\n                    sys.exit()\n\n            if proxy_file is not '':\n                log.console_log(\"{}[*] Checking Proxy Status from file {}{}\".format(G, proxy_file, W))\n                self.check_multiple_proxy_status(proxy_file, \"http://\" + str(domain))\n                proxy = self.multiple_proxy_list\n\n            extract_domain = tldextract.extract(domain)\n\n            self.check_domain(self.url_req.ssl_checker(domain), proxy)\n            self.banner_grab(self.url_req.ssl_checker(domain), proxy)\n\n            if extract_domain.subdomain == \"\":\n                self.robots_scraper(self.url_req.ssl_checker(domain), proxy)\n                self.enumerate_subdomains(domain, proxy)\n                self.scan_DNS_zone(domain)\n                self.harvest_email_search(domain, proxy)\n                self.harvest_email_pgp(domain, proxy)\n            else:\n                domain = extract_domain.domain + '.' + extract_domain.suffix\n                self.robots_scraper(self.url_req.ssl_checker(domain), proxy)\n                self.enumerate_subdomains(domain, proxy)\n                self.scan_DNS_zone(domain)\n                self.harvest_email_search(domain, proxy)\n                self.harvest_email_pgp(domain, proxy)\n\n            self.harvest_document(domain, proxy)\n\n        if username is not None:\n            self.username_checker(username)\n\n        if orgcomp is not None:\n            self.gather_company(orgcomp, proxy)\n\n        if email is not None:\n            log.console_log(\"This feature will be coming soon. Be patient :)\")\n\n        log.console_log(\"{}All done sir! All logs saved in {}logs{} directory and dowloaded file saved in {}belatiFiles{} {}\".format(Y, B, Y, B, Y, W))\n\n        self.start_web_server()\n\n    def check_domain(self, domain_name, proxy_address):\n        check = CheckDomain()\n\n        log.console_log(G + \"{}[*] Checking Domain Availability... {}\".format(G, W) , 0)\n        log.console_log(check.domain_checker(domain_name, proxy_address))\n        \n        log.console_log(\"{}[*] Checking URL Alive... {}\".format(G, W), 0)\n        log.console_log(check.alive_check(domain_name, proxy_address))\n\n        log.console_log(\"{}[*] Perfoming Whois... {}\".format(G, W))\n        whois_result = check.whois_domain(domain_name)\n        log.console_log(whois_result)\n        email = re.findall(r'[a-zA-Z0-9._+-]+@[a-zA-Z0-9._+-]+\\s*', str(whois_result))\n\n        # JSON Beautifier\n        json_bf = JsonBeautifier()\n        json_whois = json_bf.beautifier(str(whois_result))\n        self.db.insert_domain_result(self.project_id, util.strip_scheme(domain_name), str(json_whois), util.clean_list_string(email))\n\n    def banner_grab(self, domain_name, proxy_address):\n        banner = BannerGrab()\n        log.console_log(\"{}[*] Perfoming HTTP Banner Grabbing... {}\".format(G, W))\n        banner_info = banner.show_banner(domain_name, proxy_address)\n        log.console_log(banner_info)\n        self.db.insert_banner(domain_name, self.project_id, str(banner_info))\n\n    def enumerate_subdomains(self, domain_name, proxy):\n        log.console_log(\"{}[*] Perfoming Subdomains Enumeration...{}\".format(G, W))\n        sub_enum = SubdomainEnum()\n        log.console_log(\"{}[+] Grabbing data from dnsdumpster...{}\\n\".format(B, W))\n        dnsdumpster = sub_enum.scan_dnsdumpster(domain_name)\n        subdomain_list = []\n        data_table = [[\"Domain\", \"IP\", \"Provider\", \"Country\"]]\n        for entry in dnsdumpster['dns_records']['host']:\n            data_table.extend([[entry['domain'], entry['ip'], entry['provider'], entry['country']]])\n            subdomain_list.append(entry['domain'])\n\n        log.console_log( tabulate(data_table, headers='firstrow') )\n\n        log.console_log(\"{}[+] Grabbing data from crt.sh...{}\\n\".format(B, W))\n        crt_list = sub_enum.scan_crtsh(domain_name, proxy)\n        \n        if crt_list is not None:\n            log.console_log(\"\\n\".join(crt_list))\n            subdomain_list = list(set(subdomain_list + crt_list))\n        \n        log.console_log(\"{}[+] Grabbing data from findsubdomains.com...{}\\n\".format(B, W))\n        findsubdomains_list = sub_enum.scan_findsubdomainsCom(domain_name,proxy)\n        \n        if findsubdomains_list is not None:\n            log.console_log(\"\\n\".join(findsubdomains_list))\n            subdomain_list = list(set(subdomain_list + findsubdomains_list))\n\n        subdomain_ip_list = []\n\n        for subdomain in subdomain_list:\n            self.banner_grab(self.url_req.ssl_checker(subdomain), proxy)\n            self.robots_scraper(self.url_req.ssl_checker(subdomain), proxy)\n            self.wappalyzing_webpage(subdomain)\n            self.public_git_finder(subdomain, proxy)\n            self.public_svn_finder(subdomain, proxy)\n            try:\n                subdomain_ip_list.append(socket.gethostbyname(subdomain))\n                self.db.update_subdomain_ip(self.project_id, subdomain, str(socket.gethostbyname(subdomain)))\n            except socket.gaierror:\n                pass\n\n        subdomain_ip_listFix = list(set(subdomain_ip_list))\n\n        # check common service port TODO\n        #for ipaddress in subdomain_ip_listFix:\n            #self.common_service_check(ipaddress)\n\n        for ipaddress in subdomain_ip_listFix:\n            self.service_scanning(ipaddress)\n\n    def wappalyzing_webpage(self, domain):\n        log.console_log(\"{}[*] Wapplyzing on domain {}{}\".format(G, domain, W))\n        wappalyzing = Wappalyzer()\n        targeturl = self.url_req.ssl_checker(domain)\n        try:\n            data = wappalyzing.run_wappalyze(targeturl)\n            self.db.insert_wappalyzing(self.project_id, domain, data)\n        except urllib2.URLError as exc:\n            log.console_log('URL Error: {0}'.format(str(exc)))\n        except urllib2.HTTPError as exc:\n            log.console_log('HTTP Error: {0}'.format(str(exc)))\n        except Exception as exc:\n            log.console_log('Unknown error: {0}'.format(str(exc)))\n\n    def service_scanning(self, ipaddress):\n        scan_nm = ScanNmap()\n        log.console_log(\"{}[*] Perfoming Nmap Full Scan on IP {}{}\".format(G, ipaddress, W))\n        log.console_log(\"{}[*] nmap -sS -A -Pn {}{}\".format(G, ipaddress, W))\n        scan_nm.run_scanning(ipaddress)\n\n    def scan_DNS_zone(self, domain_name):\n        log.console_log(\"{}[*] Perfoming DNS Zone Scanning... {}\".format(G, W))\n        log.console_log(\"{}[*] Please wait, maximum timeout for checking is 1 minutes {}\".format(G, W))\n        signal.signal(signal.SIGALRM, self.timeLimitHandler)\n        signal.alarm(60)\n        try:\n            scan_list = str(list(Scanner(domain_name).scan()))\n            ns_record_list = []\n            mx_record_list = []\n            log.console_log(\"{}{}{}\".format(G, scan_list.replace(\",\",\"\\n\"), W))\n            log.console_log(\"{}DNS Server:{}\".format(G, W))\n            for ns in dns.resolver.query(domain_name, 'NS'):\n                log.console_log(G + ns.to_text() + W)\n                ns_record_list.append(ns.to_text())\n\n            log.console_log(\"{}MX Record:{}\".format(G, W))\n            for ns in dns.resolver.query(domain_name, 'MX'):\n                log.console_log(\"{}{}{}\".format(G, ns.to_text(), W))\n                mx_record_list.append(ns.to_text())\n\n            self.db.update_dns_zone(self.project_id, domain_name, util.clean_list_string(ns_record_list), util.clean_list_string(mx_record_list))\n\n        except Exception, exc:\n            print(\"{}[*] No response from server... SKIP!{}\".format(R, W))\n\n    def harvest_email_search(self, domain_name, proxy_address):\n        log.console_log(\"{}[*] Perfoming Email Harvest from Google Search...{}\".format(G, W) )\n        harvest = HarvestEmail()\n        harvest_result = harvest.crawl_search(domain_name, proxy_address)\n        try:\n            log.console_log(\"{}[*] Found {} emails on domain {}{}\".format(Y, str(len(harvest_result)), domain_name, W))\n            log.console_log(\"{}{}{}\".format(R, '\\n'.join(harvest_result), W))\n            self.db.insert_email_result(self.project_id, util.clean_list_string(harvest_result))\n        except Exception, exc:\n            log.console_log(\"{}[-] Not found or Unavailable. {}{}\".format(R, str(harvest_result), W ))\n\n    def harvest_email_pgp(self, domain_name, proxy_address):\n        log.console_log(\"{}[*] Perfoming Email Harvest from PGP Server...{}\".format(G, W) )\n        harvest = HarvestEmail()\n        harvest_result = harvest.crawl_pgp_mit_edu(domain_name, proxy_address)\n        try:\n            log.console_log(\"{}[*] Found {} emails on domain {}{}\".format(Y, str(len(harvest_result)), domain_name, W))\n            log.console_log(\"{}{}{}\".format(R, '\\n'.join(harvest_result), W))\n            self.db.update_pgp_email(self.project_id, util.clean_list_string(harvest_result))\n        except Exception, exc:\n            log.console_log(\"{}[-] Not found or Unavailable. {}{}\".format(R, str(harvest_result), W ))\n\n    def harvest_document(self, domain_name, proxy_address):\n        log.console_log(\"{}[*] Perfoming Public Document Harvest from Google... {}\".format(G, W))\n        public_doc = HarvestPublicDocument()\n        public_doc.init_crawl(domain_name, proxy_address, self.project_id)\n\n    def username_checker(self, username):\n        log.console_log(\"{}[*] Perfoming Username Availability Checker... {}\".format(G, W))\n        user_check = CheckMyUsername()\n        username_status_result = user_check.check_username_availability(username)\n\n        for result in username_status_result:\n            log.console_log(G + \"[+] \" + result[0] + \" => \" + result[1] + \": \" + result[2])\n\n    def check_single_proxy_status(self, proxy_address, domain_check):\n        try:\n            parse = urlparse(proxy_address)\n            proxy_scheme = parse.scheme\n            proxy = str(parse.hostname) + ':' + str(parse.port)\n            proxy_handler = urllib2.ProxyHandler({ proxy_scheme: proxy})\n            opener = urllib2.build_opener(proxy_handler)\n            opener.addheaders = [('User-agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36')]\n            urllib2.install_opener(opener)\n            req = urllib2.Request(domain_check)\n            start_time = time.time()\n            sock = urllib2.urlopen(req)\n            end_time = time.time()\n            diff_time = round(end_time - start_time, 3)\n            log.console_log(Y + \"{}[+] {} OK! Response Time : {}s\".format(Y, proxy_address, str(diff_time), W ))\n            return 'ok'\n        except urllib2.HTTPError, e:\n            print('Error code: ' + str(e.code))\n            return e.code\n        except Exception, detail:\n            print('ERROR ' +  str(detail))\n            return 1\n\n    def check_multiple_proxy_status(self, file_location, domain_check):\n        with open(file_location) as data:\n            text = [line.rstrip('\\n') for line in data]\n            for proxy in text:\n                if self.check_single_proxy_status(str(proxy), str(domain_check)) == 'ok':\n                     self.multiple_proxy_list.append(proxy)\n\n    def public_git_finder(self, domain, proxy_address):\n        log.console_log(\"{}[*] Checking Public GIT Directory on domain {}{}\".format(G, domain, W))\n        git_finder = GitFinder()\n        if git_finder.check_git(domain, proxy_address) == True:\n            log.console_log(\"{}[+] Gotcha! You are in luck, boy![{}/.git/]{}\".format(Y, domain, W))\n            self.db.update_git_finder(self.project_id, domain, \"Yes\")\n\n    def public_svn_finder(self, domain, proxy_address):\n        log.console_log(\"{}[*] Checking Public SVN Directory on domain {}{}\".format(G, domain, W))\n        svn_finder = SVNFinder()\n        if svn_finder.check_svn(domain, proxy_address) == 403:\n            log.console_log(\"{}[+] Um... Forbidden :( {}\".format(Y, W))\n        if svn_finder.check_svn(domain, proxy_address) == 200:\n            log.console_log(\"{}[+] Gotcha! You are in luck, boy![{}/.svn/]{}\".format(Y, domain, W))\n            self.db.update_svn_finder(self.project_id, domain, \"Yes\")\n\n    def robots_scraper(self, domain, proxy_address):\n        scraper = RobotsScraper()\n        data = scraper.check_robots(domain, proxy_address)\n        if data is not None and isinstance(data, int) == False and data.code == 200:\n            log.console_log(\"{}[+] Found interesting robots.txt[ {} ] =>{}\".format(Y, domain, W))\n            log.console_log(data.read())\n            self.db.insert_robots_txt(self.project_id, domain, str(data.read()))\n\n    def gather_company(self, company_name, proxy_address):\n        log.console_log(\"{}[+] Gathering Company Employee {} -> {}\".format(G, W, company_name))\n        gather_company = GatherCompany()\n        gather_company.crawl_company_employee(company_name, proxy_address, self.project_id)\n\n    def start_web_server(self):\n        log.console_log(\"{}Starting Django Web Server at http://127.0.0.1:8000/{}\".format(Y, W))\n        py_bin = self.conf.get_config(\"Environment\", \"py_bin\")\n        command = \"{} web/manage.py runserver 0.0.0.0:8000\".format(py_bin)\n        process = subprocess.Popen(shlex.split(command), stdout=subprocess.PIPE)\n        while True:\n            output = process.stdout.readline()\n            if output == '' and process.poll() is not None:\n                break\n            if output:\n                log.console_log(output.strip())\n        rc = process.poll()\n        return rc\n\n    def complete_set(self, text, line, start_index, end_index):\n        if text:\n            return [\n                param for param in self.list_parameter\n                if param.startswith(text)\n            ]\n        else:\n            return self.list_parameter\n\n    def common_service_check(self, host):\n        log.console_log(\"{}[*] Checking Common Service Check on host {}{}\".format(G, host, W))\n        service_check = CommonServiceCheck()\n        service_check.check_available_service(host)\n\n    def timeLimitHandler(self, signum, frame):\n        print(\"No Response...\")\n\nif __name__ == '__main__':\n    BelatiApp = Belati()\n    BelatiApp.cmdloop()\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "Changelog:\n\nv0.2.0-dev:\n\n- Add Gather Public Company Employee\n- Add SVN Finder\n- Update URL Request\n- Rework Code\n- Fix small bug\n- Update Harvest Public Document Regex\nA-dd version for updater\n\nv0.2.1-dev:\n\n- Add Belati Configuration Wizard - [Core]\n- Add Database Support( On Progress )\n- Rework Code\n- Update Database System\n- Update Creating Project Info\n- Update Sublist3r\n- Update Gather Company Plugin\n- Update README\n\nv0.2.2-dev:\n\n- Add Django Web Management\n- Update Auto Configuration\n- Add auto start Django\n- Update output result\n- Update Gather Company Info\n\nv0.2.3-dev\n\n- Add Metadata/exif info for document\n- Update Database Schema\n- Auto Update system\n- Update URL File validation checker\n- Clean Output Result\n- Update Banner\n- Check git control status for update checker\n- Add Web Server Only Mode\n- Rework Code\n\nv0.2.4\n\n- Migrating argument parse to interactive command line shell\n- Implementing command line shell mode\n- Remove git branch version from Dockerfile\n- Update Dependencies Checker, more accurate with module version comparison \n- Migrating sublist3r to manual checking dnsdumpster, crtsh and other service will coming soon\n- Fixing bug for stability and improvement"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing to Belati\n\nBelati welcomes contribution from everyone.\n\n# How to contribute\n\nI ❤️ pull requests. If you'd like to fix a bug, contribute a feature or just correct a typo, please feel free to do so. Belati have so many [TODO](https://github.com/aancw/Belati/issues/12) :)\n\n## Getting Started\n\nTo start working on the Belati, first fork the repo, then clone it:\n\n```\ngit clone @github.com:your-username/Belati.git\n```\n\n*Note: replace \"your-username\" with your GitHub handle*\n\n\n### On Progress...."
  },
  {
    "path": "CONTRIBUTORS.md",
    "content": "# Belati Contributors\n\n* **[Chandrapal](https://github.com/Chan9390)**\n\n\t* Changed webserver's ip from loopback\n\t* Making docker version of Belati"
  },
  {
    "path": "Dockerfile",
    "content": "FROM debian:stretch-slim\nLABEL MAINTAINER \"Chandrapal <bnchandrapal@protonmail.com>\"\n\nRUN cd /home \\\n    && apt-get update \\\n    && apt-get install -y git python python-pip nmap exiftool \\\n    && git clone https://github.com/aancw/Belati.git \\\n    && cd Belati \\\n    && git submodule update --init --recursive --remote \\\n    && pip install --upgrade --force-reinstall -r requirements.txt \\\n    && echo 'alias belati=\"python /home/Belati/Belati.py\"' >> ~/.bashrc\n\nWORKDIR /home/Belati\n\nEXPOSE 8000"
  },
  {
    "path": "LICENSE",
    "content": "                    GNU GENERAL PUBLIC LICENSE\n                       Version 2, June 1991\n\n Copyright (C) 1989, 1991 Free Software Foundation, Inc., <http://fsf.org/>\n 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n                            Preamble\n\n  The licenses for most software are designed to take away your\nfreedom to share and change it.  By contrast, the GNU General Public\nLicense is intended to guarantee your freedom to share and change free\nsoftware--to make sure the software is free for all its users.  This\nGeneral Public License applies to most of the Free Software\nFoundation's software and to any other program whose authors commit to\nusing it.  (Some other Free Software Foundation software is covered by\nthe GNU Lesser General Public License instead.)  You can apply it to\nyour programs, too.\n\n  When we speak of free software, we are referring to freedom, not\nprice.  Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthis service if you wish), that you receive source code or can get it\nif you want it, that you can change the software or use pieces of it\nin new free programs; and that you know you can do these things.\n\n  To protect your rights, we need to make restrictions that forbid\nanyone to deny you these rights or to ask you to surrender the rights.\nThese restrictions translate to certain responsibilities for you if you\ndistribute copies of the software, or if you modify it.\n\n  For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must give the recipients all the rights that\nyou have.  You must make sure that they, too, receive or can get the\nsource code.  And you must show them these terms so they know their\nrights.\n\n  We protect your rights with two steps: (1) copyright the software, and\n(2) offer you this license which gives you legal permission to copy,\ndistribute and/or modify the software.\n\n  Also, for each author's protection and ours, we want to make certain\nthat everyone understands that there is no warranty for this free\nsoftware.  If the software is modified by someone else and passed on, we\nwant its recipients to know that what they have is not the original, so\nthat any problems introduced by others will not reflect on the original\nauthors' reputations.\n\n  Finally, any free program is threatened constantly by software\npatents.  We wish to avoid the danger that redistributors of a free\nprogram will individually obtain patent licenses, in effect making the\nprogram proprietary.  To prevent this, we have made it clear that any\npatent must be licensed for everyone's free use or not licensed at all.\n\n  The precise terms and conditions for copying, distribution and\nmodification follow.\n\n                    GNU GENERAL PUBLIC LICENSE\n   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION\n\n  0. This License applies to any program or other work which contains\na notice placed by the copyright holder saying it may be distributed\nunder the terms of this General Public License.  The \"Program\", below,\nrefers to any such program or work, and a \"work based on the Program\"\nmeans either the Program or any derivative work under copyright law:\nthat is to say, a work containing the Program or a portion of it,\neither verbatim or with modifications and/or translated into another\nlanguage.  (Hereinafter, translation is included without limitation in\nthe term \"modification\".)  Each licensee is addressed as \"you\".\n\nActivities other than copying, distribution and modification are not\ncovered by this License; they are outside its scope.  The act of\nrunning the Program is not restricted, and the output from the Program\nis covered only if its contents constitute a work based on the\nProgram (independent of having been made by running the Program).\nWhether that is true depends on what the Program does.\n\n  1. You may copy and distribute verbatim copies of the Program's\nsource code as you receive it, in any medium, provided that you\nconspicuously and appropriately publish on each copy an appropriate\ncopyright notice and disclaimer of warranty; keep intact all the\nnotices that refer to this License and to the absence of any warranty;\nand give any other recipients of the Program a copy of this License\nalong with the Program.\n\nYou may charge a fee for the physical act of transferring a copy, and\nyou may at your option offer warranty protection in exchange for a fee.\n\n  2. You may modify your copy or copies of the Program or any portion\nof it, thus forming a work based on the Program, and copy and\ndistribute such modifications or work under the terms of Section 1\nabove, provided that you also meet all of these conditions:\n\n    a) You must cause the modified files to carry prominent notices\n    stating that you changed the files and the date of any change.\n\n    b) You must cause any work that you distribute or publish, that in\n    whole or in part contains or is derived from the Program or any\n    part thereof, to be licensed as a whole at no charge to all third\n    parties under the terms of this License.\n\n    c) If the modified program normally reads commands interactively\n    when run, you must cause it, when started running for such\n    interactive use in the most ordinary way, to print or display an\n    announcement including an appropriate copyright notice and a\n    notice that there is no warranty (or else, saying that you provide\n    a warranty) and that users may redistribute the program under\n    these conditions, and telling the user how to view a copy of this\n    License.  (Exception: if the Program itself is interactive but\n    does not normally print such an announcement, your work based on\n    the Program is not required to print an announcement.)\n\nThese requirements apply to the modified work as a whole.  If\nidentifiable sections of that work are not derived from the Program,\nand can be reasonably considered independent and separate works in\nthemselves, then this License, and its terms, do not apply to those\nsections when you distribute them as separate works.  But when you\ndistribute the same sections as part of a whole which is a work based\non the Program, the distribution of the whole must be on the terms of\nthis License, whose permissions for other licensees extend to the\nentire whole, and thus to each and every part regardless of who wrote it.\n\nThus, it is not the intent of this section to claim rights or contest\nyour rights to work written entirely by you; rather, the intent is to\nexercise the right to control the distribution of derivative or\ncollective works based on the Program.\n\nIn addition, mere aggregation of another work not based on the Program\nwith the Program (or with a work based on the Program) on a volume of\na storage or distribution medium does not bring the other work under\nthe scope of this License.\n\n  3. You may copy and distribute the Program (or a work based on it,\nunder Section 2) in object code or executable form under the terms of\nSections 1 and 2 above provided that you also do one of the following:\n\n    a) Accompany it with the complete corresponding machine-readable\n    source code, which must be distributed under the terms of Sections\n    1 and 2 above on a medium customarily used for software interchange; or,\n\n    b) Accompany it with a written offer, valid for at least three\n    years, to give any third party, for a charge no more than your\n    cost of physically performing source distribution, a complete\n    machine-readable copy of the corresponding source code, to be\n    distributed under the terms of Sections 1 and 2 above on a medium\n    customarily used for software interchange; or,\n\n    c) Accompany it with the information you received as to the offer\n    to distribute corresponding source code.  (This alternative is\n    allowed only for noncommercial distribution and only if you\n    received the program in object code or executable form with such\n    an offer, in accord with Subsection b above.)\n\nThe source code for a work means the preferred form of the work for\nmaking modifications to it.  For an executable work, complete source\ncode means all the source code for all modules it contains, plus any\nassociated interface definition files, plus the scripts used to\ncontrol compilation and installation of the executable.  However, as a\nspecial exception, the source code distributed need not include\nanything that is normally distributed (in either source or binary\nform) with the major components (compiler, kernel, and so on) of the\noperating system on which the executable runs, unless that component\nitself accompanies the executable.\n\nIf distribution of executable or object code is made by offering\naccess to copy from a designated place, then offering equivalent\naccess to copy the source code from the same place counts as\ndistribution of the source code, even though third parties are not\ncompelled to copy the source along with the object code.\n\n  4. You may not copy, modify, sublicense, or distribute the Program\nexcept as expressly provided under this License.  Any attempt\notherwise to copy, modify, sublicense or distribute the Program is\nvoid, and will automatically terminate your rights under this License.\nHowever, parties who have received copies, or rights, from you under\nthis License will not have their licenses terminated so long as such\nparties remain in full compliance.\n\n  5. You are not required to accept this License, since you have not\nsigned it.  However, nothing else grants you permission to modify or\ndistribute the Program or its derivative works.  These actions are\nprohibited by law if you do not accept this License.  Therefore, by\nmodifying or distributing the Program (or any work based on the\nProgram), you indicate your acceptance of this License to do so, and\nall its terms and conditions for copying, distributing or modifying\nthe Program or works based on it.\n\n  6. Each time you redistribute the Program (or any work based on the\nProgram), the recipient automatically receives a license from the\noriginal licensor to copy, distribute or modify the Program subject to\nthese terms and conditions.  You may not impose any further\nrestrictions on the recipients' exercise of the rights granted herein.\nYou are not responsible for enforcing compliance by third parties to\nthis License.\n\n  7. If, as a consequence of a court judgment or allegation of patent\ninfringement or for any other reason (not limited to patent issues),\nconditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License.  If you cannot\ndistribute so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you\nmay not distribute the Program at all.  For example, if a patent\nlicense would not permit royalty-free redistribution of the Program by\nall those who receive copies directly or indirectly through you, then\nthe only way you could satisfy both it and this License would be to\nrefrain entirely from distribution of the Program.\n\nIf any portion of this section is held invalid or unenforceable under\nany particular circumstance, the balance of the section is intended to\napply and the section as a whole is intended to apply in other\ncircumstances.\n\nIt is not the purpose of this section to induce you to infringe any\npatents or other property right claims or to contest validity of any\nsuch claims; this section has the sole purpose of protecting the\nintegrity of the free software distribution system, which is\nimplemented by public license practices.  Many people have made\ngenerous contributions to the wide range of software distributed\nthrough that system in reliance on consistent application of that\nsystem; it is up to the author/donor to decide if he or she is willing\nto distribute software through any other system and a licensee cannot\nimpose that choice.\n\nThis section is intended to make thoroughly clear what is believed to\nbe a consequence of the rest of this License.\n\n  8. If the distribution and/or use of the Program is restricted in\ncertain countries either by patents or by copyrighted interfaces, the\noriginal copyright holder who places the Program under this License\nmay add an explicit geographical distribution limitation excluding\nthose countries, so that distribution is permitted only in or among\ncountries not thus excluded.  In such case, this License incorporates\nthe limitation as if written in the body of this License.\n\n  9. The Free Software Foundation may publish revised and/or new versions\nof the General Public License from time to time.  Such new versions will\nbe similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\nEach version is given a distinguishing version number.  If the Program\nspecifies a version number of this License which applies to it and \"any\nlater version\", you have the option of following the terms and conditions\neither of that version or of any later version published by the Free\nSoftware Foundation.  If the Program does not specify a version number of\nthis License, you may choose any version ever published by the Free Software\nFoundation.\n\n  10. If you wish to incorporate parts of the Program into other free\nprograms whose distribution conditions are different, write to the author\nto ask for permission.  For software which is copyrighted by the Free\nSoftware Foundation, write to the Free Software Foundation; we sometimes\nmake exceptions for this.  Our decision will be guided by the two goals\nof preserving the free status of all derivatives of our free software and\nof promoting the sharing and reuse of software generally.\n\n                            NO WARRANTY\n\n  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY\nFOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN\nOTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES\nPROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED\nOR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\nMERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS\nTO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE\nPROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,\nREPAIR OR CORRECTION.\n\n  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR\nREDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,\nINCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING\nOUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED\nTO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY\nYOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER\nPROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGES.\n\n                     END OF TERMS AND CONDITIONS\n\n            How to Apply These Terms to Your New Programs\n\n  If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n  To do so, attach the following notices to the program.  It is safest\nto attach them to the start of each source file to most effectively\nconvey the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n    {description}\n    Copyright (C) {year}  {fullname}\n\n    This program is free software; you can redistribute it and/or modify\n    it under the terms of the GNU General Public License as published by\n    the Free Software Foundation; either version 2 of the License, or\n    (at your option) any later version.\n\n    This program is distributed in the hope that it will be useful,\n    but WITHOUT ANY WARRANTY; without even the implied warranty of\n    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n    GNU General Public License for more details.\n\n    You should have received a copy of the GNU General Public License along\n    with this program; if not, write to the Free Software Foundation, Inc.,\n    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n\nAlso add information on how to contact you by electronic and paper mail.\n\nIf the program is interactive, make it output a short notice like this\nwhen it starts in an interactive mode:\n\n    Gnomovision version 69, Copyright (C) year name of author\n    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n    This is free software, and you are welcome to redistribute it\n    under certain conditions; type `show c' for details.\n\nThe hypothetical commands `show w' and `show c' should show the appropriate\nparts of the General Public License.  Of course, the commands you use may\nbe called something other than `show w' and `show c'; they could even be\nmouse-clicks or menu items--whatever suits your program.\n\nYou should also get your employer (if you work as a programmer) or your\nschool, if any, to sign a \"copyright disclaimer\" for the program, if\nnecessary.  Here is a sample; alter the names:\n\n  Yoyodyne, Inc., hereby disclaims all copyright interest in the program\n  `Gnomovision' (which makes passes at compilers) written by James Hacker.\n\n  {signature of Ty Coon}, 1 April 1989\n  Ty Coon, President of Vice\n\nThis General Public License does not permit incorporating your program into\nproprietary programs.  If your program is a subroutine library, you may\nconsider it more useful to permit linking proprietary applications with the\nlibrary.  If this is what you want to do, use the GNU Lesser General\nPublic License instead of this License.\n"
  },
  {
    "path": "README.md",
    "content": "# Belati\n\n![Belati](/images/Belati-logo.png?raw=true \"Belati Logo\")\n\n\n[![Awesome OSINT](https://img.shields.io/badge/awesome-osint-brightgreen.svg)](https://github.com/jivoi/awesome-osint)\n[![OSINT Framework](https://img.shields.io/badge/osint-framework-brightgreen.svg)](http://osintframework.com)\n[![n0where](https://img.shields.io/badge/n0where-top%20100-lightgrey.svg)](https://n0where.net/best-cybersecurity-tools/)\n[![ToolsWatch](https://img.shields.io/badge/Tools-Watch-brightgreen.svg)](http://www.toolswatch.org/2017/07/belati-v-0-2-2-dev-swiss-army-knife-for-osint/)\n[![BlackArch Scanner](https://img.shields.io/badge/BlackArch-Scanner-red.svg)](https://blackarch.org/scanner.html)\n[![Echo Ezine 31](https://img.shields.io/badge/Echo-Ezine%2031-yellow.svg)](http://ezine.echo.or.id/issue31/005.txt)\n\n\n### Belati - The Traditional Swiss Army Knife For OSINT\n\nBelati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. This tools is inspired by Foca and Datasploit for OSINT :)\n\n## Current Version\n\nv0.2.4\n\n## Belati In Action\n\n[![Belati In Action 0.24-stable Preview](https://img.youtube.com/vi/yRSln6BSo-c/0.jpg)](https://www.youtube.com/watch?v=yRSln6BSo-c)\n\n## Why I Made this?\nJust for learning stuff and OSINT purpose. Correct me if i'm wrong\n\n## What Belati can do?\n- Interactive command line shell\n- Whois(Indonesian TLD Support)\n- Banner Grabbing\n- Subdomain Enumeration\n- Service Scanning for all Subdomain Machine\n- Web Appalyzer Support\n- DNS mapping / Zone Scanning\n- Mail Harvester from Website & Search Engine\n- Mail Harvester from MIT PGP Public Key Server\n- Scrapping Public Document for Domain from Search Engine\n- Fake and Random User Agent ( Prevent from blocking )\n- Proxy Support for Harvesting Emails and Documents\n- Public Git Finder in domain/subdomain\n- Public SVN Finder in domain/subdomain\n- Robot.txt Scraper in domain/subdomain\n- Gather Public Company Info & Employee\n- SQLite3 Database Support for storing Belati Results\n- Setup Wizard/Configuration for Belati\n- Django Web Management\n- Webserver only mode\n- Auto Dependency Checker\n- Auto Update system\n- Document Metadata/Exif Extractor\n- Document Author Metadata\n- Graph Visualization( On Progress )\n\n## TODO\n\nPlease see Belati TODO list here -> https://github.com/aancw/Belati/issues/12\n\n## Library\n\n- python-whois\n- Sublist3r\n- Subbrute\n\n## Requirements\n\n- nmap\n- git\n- sqlite3\n- exiftool\n\n## Install/Usage\n\n```\ngit clone https://github.com/aancw/Belati.git\ncd Belati\ngit submodule update --init --recursive --remote\npip install --upgrade pip\npip install -r requirements.txt #please use pip with python v2\nsudo su\npython Belati.py --help\n```\n\n## Docker Installation\n\n- Download Dockerfile:\n\n```bash\nwget https://raw.githubusercontent.com/aancw/Belati/master/Dockerfile\n```\n\n- Execute the following command to create a Docker image locally:\n  ```bash\n  docker build -t belati . #dot\n  ```\n\n- To create a container from the image, execute:\n  ```bash\n  docker run -p 8000:8000 -it belati /bin/bash\n  ```\n\n- Running Belati\n    ```bash\n    belati -h\n    ```\n\n\nFor more info, please refer to this guide: https://github.com/espi0n/Dockerfiles/blob/master/Belati/README.md\n\n## Tested On\n\n- Ubuntu 16.04 x86_64\n- Arch Linux x86_64\n- CentOS 7\n- Debian Jessie\n- MacOS\n\n## Python Requirements\n\nThis tool not compatible with Python 3. I need to migrate this later. So use python v2.7 instead!\n\n## Why Need Root Privilege?\n\nI've try to avoid using Root Privilege, but nmap need Root Privilege. You can add sudo or other way to run nmap without root privilege. It's your choice ;)\n\nReference -> https://secwiki.org/w/Running_nmap_as_an_unprivileged_user\n\nDon't worry. Belati still running well when you are run with normal user ;)\n\n## Dependencies\n\n- urllib2\n- dnspython\n- requests\n- argparse\n- texttable\n- python-geoip-geolite2\n- python-geoip\n- dnsknife\n- termcolor\n- colorama\n- validators\n- tqdm\n- tldextract\n- fake-useragent\n- python-wappalyzer\n- future\n- beautifulsoup4\n- python-whois\n- futures\n- django\n- pyexifinfo\n- cmd2\n- tabulate\n\n## Missing Dependencies?\n\nIf you are seeing this\n\n```\n$ python Belati.py\n\n            You are missing a module required for Belati. In order to continue using Belati, please install them with:\n\n            `pip install --upgrade --force-reinstall -r requirements.txt`\n\n            or manually install missing modules with:\n\n            `pip install --upgrade --force-reinstall dnspython requests termcolor colorama future beautifulsoup4 futures`\n```\n\nand this\n\n```\nYou are using pip version 8.1.2, however version 9.0.1 is available.\nYou should consider upgrading via the 'pip install --upgrade pip' command.\n```        \n\nPlease upgrade pip version and follow the instructions:\n\n```\npip install --upgrade pip\n```\n\n## System Dependencies\n\nFor CentOS/Fedora user, please install this:\n\n```\nyum install gcc gmp gmp-devel python-devel\n```\n\nFor Debian/Ubuntu user, please install this:\n\n```\nsudo apt-get install nmap git sqlite3 exiftool\n```\n\n\n## Notice\n\nThis tool is for educational purposes only. Any damage you make will not affect the author. Do It With Your Own Risk!\n\n## Feedback/Suggestion\n\nFeel free to create Issue in this repository or email me at `cacaddv [at] gmail.com` . Your feedback and suggestion is useful for Belati development progress :)\n\n## Contribution\n\nBelati welcomes contribution from everyone. Please see [CONTRIBUTING.md](https://github.com/aancw/Belati/blob/master/CONTRIBUTING.md)\n\n## Contributors\n\nPlease see [CONTRIBUTORS.md](https://github.com/aancw/Belati/blob/master/CONTRIBUTORS.md) and please add your name for credit in that file :)\n\n## Thanks To\n\nThanks to PyWhois Library, Sublist3r, MailHarvester, Emingoo for being part of my code. Also thanks to Hispagatos, Infosec-ninjas, eCHo, RNDC( Research and development center ) and all other people who are inspiring this project :)\n\n## Publications\n\nEcho Ezine 31 : http://ezine.echo.or.id/issue31/005.txt - Belati : Collecting Public Data & Public Document for OSINT Purpose - Petruknisme\n\nIDSECCONF 2017 : https://www.slideshare.net/idsecconf/belati-the-traditional-swiss-army-knife-for-osint - Belati: The Traditional Swiss Army Knife for OSINT\n\n\n## License\n\n**Author:** Aan Wahyu( https://petruknisme.com )\n\nBelati is licensed under GPL V2. You can use, modify, or redistribute this tool under the terms of GNU General Public License (GPLv2). Please see [LICENSE](https://github.com/aancw/Belati/blob/master/LICENSE) for the full license text.\n"
  },
  {
    "path": "lib/__init__.py",
    "content": ""
  },
  {
    "path": "lib/pywhois/.hg_archival.txt",
    "content": "repo: ea0e45971cea31656dfa687dd701a201929ad830\nnode: ccad96890edda4b701762d22129f4436f111566d\nbranch: default\nlatesttag: null\nlatesttagdistance: 95\nchangessincelatesttag: 111\n"
  },
  {
    "path": "lib/pywhois/.hgignore",
    "content": "^.eggs$\n^python_whois.egg-info$\n\\.pyc$\n\\.swp$\n"
  },
  {
    "path": "lib/pywhois/MANIFEST.in",
    "content": "include whois/data/tlds.txt\ninclude README.rst\n"
  },
  {
    "path": "lib/pywhois/README.rst",
    "content": "Goal\n====\n\n-  Create a simple importable Python module which will produce parsed\n   WHOIS data for a given domain.\n-  Able to extract data for all the popular TLDs (com, org, net, ...)\n-  Query a WHOIS server directly instead of going through an\n   intermediate web service like many others do.\n-  Works with Python 2 & 3\n\n\n\nExample\n=======\n\n.. sourcecode:: python\n\n    >>> import whois\n    >>> w = whois.whois('webscraping.com')\n    >>> w.expiration_date  # dates converted to datetime object\n    datetime.datetime(2013, 6, 26, 0, 0)\n    >>> w.text  # the content downloaded from whois server\n    u'\\nWhois Server Version 2.0\\n\\nDomain names in the .com and .net \n    ...'\n\n    >>> print w  # print values of all found attributes\n    creation_date: 2004-06-26 00:00:00\n    domain_name: [u'WEBSCRAPING.COM', u'WEBSCRAPING.COM']\n    emails: [u'WEBSCRAPING.COM@domainsbyproxy.com', u'WEBSCRAPING.COM@domainsbyproxy.com']\n    expiration_date: 2013-06-26 00:00:00\n    ...\n\n\n\nInstall\n=======\n\nInstall from pypi:\n\n.. sourcecode:: bash\n\n    pip install python-whois\n\nOr checkout latest version from repository:\n\n.. sourcecode:: bash\n\n    hg clone https://bitbucket.org/richardpenman/pywhois\n\nNote that then you will need to manually install the futures module, which allows supporting both Python 2 & 3:\n\n\n.. sourcecode:: bash\n\n    pip install futures\n\n\n\n\nChangelog\n=========\n\n0.6 - 2016-03-02:\n\n* support added for python 3\n* updated TLD list\n\n0.5 - 2015-09-05:\n\n* added native client, which now handles whois requests by default\n* added pretty formatting to string representation\n* return None instead of raising KeyError when an attribute does not exist\n* new TLD's: .mobi, .io, .kg, .su, .biz\n\n0.4 - 2015-08-13:\n\n* new TLD's: .de, .nl, .ca, .be\n* migrated to bitbucket\n* added socket timeout\n\n0.3 - 2015-03-31:\n\n* improved datetime parsing with python-dateutil when available\n* base WhoisEntry class inherits from dict\n* fixed TLD's: .org, .info\n\n\n\nContact\n=======\n\nYou can post ideas or patches here:\nhttps://bitbucket.org/richardpenman/pywhois/issues\n\nThanks to the many who have sent patches for additional domains!\n"
  },
  {
    "path": "lib/pywhois/__init__.py",
    "content": ""
  },
  {
    "path": "lib/pywhois/setup.py",
    "content": "import sys, os\nimport setuptools\n\nversion = '0.6.3'\n\nsetuptools.setup(\n    name='python-whois',\n    version=version,\n    description=\"Whois querying and parsing of domain registration information.\",\n    long_description='',\n    install_requires=[\n        'future',\n    ],\n    classifiers=[\n        'Environment :: Web Environment',\n        'Intended Audience :: Developers',\n        'License :: OSI Approved :: MIT License',\n        'Operating System :: OS Independent',\n        'Programming Language :: Python',\n        'Topic :: Internet :: WWW/HTTP'\n    ],\n    keywords='whois, python',\n    author='Richard Penman',\n    author_email='richard@webscraping.com',\n    url='https://bitbucket.org/richardpenman/pywhois',\n    license='MIT',\n    packages=['whois'],\n    package_dir={'whois':'whois'},\n    extras_require={\n        'better date conversion': [\"python-dateutil\"]\n    },\n    test_suite='nose.collector',\n    tests_require=['nose', 'simplejson'],\n    include_package_data=True,\n    zip_safe=False\n)\n"
  },
  {
    "path": "lib/pywhois/test/samples/expected/digg.com",
    "content": "{\"domain_name\": \"DIGG.COM\", \"expiration_date\": \"2010-02-20 00:00:00\", \"updated_date\": \"2007-03-13 00:00:00\", \"status\": [\"clientDeleteProhibited\", \"clientRenewProhibited\", \"clientTransferProhibited\", \"clientUpdateProhibited\"], \"creation_date\": \"2000-02-20 00:00:00\"}"
  },
  {
    "path": "lib/pywhois/test/samples/expected/google.com",
    "content": "{\"domain_name\": [\"GOOGLE.COM\", \"google.com\"], \"expiration_date\": \"2011-09-14 00:00:00\", \"updated_date\": \"2006-04-10 00:00:00\", \"status\": [\"clientDeleteProhibited\", \"clientTransferProhibited\", \"clientUpdateProhibited\"], \"creation_date\": \"1997-09-15 00:00:00\"}"
  },
  {
    "path": "lib/pywhois/test/samples/expected/imdb.com",
    "content": "{\"domain_name\": \"IMDB.COM\", \"expiration_date\": \"2016-01-04 00:00:00\", \"updated_date\": \"2008-03-28 00:00:00\", \"status\": \"clientTransferProhibited\", \"creation_date\": \"1996-01-05 00:00:00\"}"
  },
  {
    "path": "lib/pywhois/test/samples/expected/microsoft.com",
    "content": "{\"domain_name\": \"MICROSOFT.COM\", \"expiration_date\": \"2014-05-03 00:00:00\", \"updated_date\": \"2006-10-10 00:00:00\", \"status\": [\"clientDeleteProhibited\", \"clientTransferProhibited\", \"clientUpdateProhibited\"], \"creation_date\": \"1991-05-02 00:00:00\"}"
  },
  {
    "path": "lib/pywhois/test/samples/expected/reddit.com",
    "content": "{\"domain_name\": \"REDDIT.COM\", \"expiration_date\": \"2009-04-29 00:00:00\", \"updated_date\": \"2008-06-04 00:00:00\", \"status\": [\"clientDeleteProhibited\", \"clientTransferProhibited\", \"clientUpdateProhibited\"], \"creation_date\": \"2005-04-29 00:00:00\"}"
  },
  {
    "path": "lib/pywhois/test/samples/expected/urlowl.com",
    "content": "{\"domain_name\": [\"URLOWL.COM\", \"urlowl.com\"], \"expiration_date\": \"2009-04-14 00:00:00\", \"updated_date\": \"2008-04-14 00:00:00\", \"status\": \"ok\", \"creation_date\": \"2008-04-14 00:00:00\"}"
  },
  {
    "path": "lib/pywhois/test/samples/whois/digg.com",
    "content": "\nWhois Server Version 2.0\n\nDomain names in the .com and .net domains can now be registered\nwith many different competing registrars. Go to http://www.internic.net\nfor detailed information.\n\n   Domain Name: DIGG.COM\n   Registrar: GODADDY.COM, INC.\n   Whois Server: whois.godaddy.com\n   Referral URL: http://registrar.godaddy.com\n   Name Server: UDNS1.ULTRADNS.NET\n   Name Server: UDNS2.ULTRADNS.NET\n   Status: clientDeleteProhibited\n   Status: clientRenewProhibited\n   Status: clientTransferProhibited\n   Status: clientUpdateProhibited\n   Updated Date: 13-mar-2007\n   Creation Date: 20-feb-2000\n   Expiration Date: 20-feb-2010\n\n>>> Last update of whois database: Thu, 26 Jun 2008 21:39:08 EDT <<<\n\nNOTICE: The expiration date displayed in this record is the date the \nregistrar's sponsorship of the domain name registration in the registry is \ncurrently set to expire. This date does not necessarily reflect the expiration \ndate of the domain name registrant's agreement with the sponsoring \nregistrar.  Users may consult the sponsoring registrar's Whois database to \nview the registrar's reported date of expiration for this registration.\n\nTERMS OF USE: You are not authorized to access or query our Whois \ndatabase through the use of electronic processes that are high-volume and \nautomated except as reasonably necessary to register domain names or \nmodify existing registrations; the Data in VeriSign Global Registry \nServices' (\"VeriSign\") Whois database is provided by VeriSign for \ninformation purposes only, and to assist persons in obtaining information \nabout or related to a domain name registration record. VeriSign does not \nguarantee its accuracy. By submitting a Whois query, you agree to abide \nby the following terms of use: You agree that you may use this Data only \nfor lawful purposes and that under no circumstances will you use this Data \nto: (1) allow, enable, or otherwise support the transmission of mass \nunsolicited, commercial advertising or solicitations via e-mail, telephone, \nor facsimile; or (2) enable high volume, automated, electronic processes \nthat apply to VeriSign (or its computer systems). The compilation, \nrepackaging, dissemination or other use of this Data is expressly \nprohibited without the prior written consent of VeriSign. You agree not to \nuse electronic processes that are automated and high-volume to access or \nquery the Whois database except as reasonably necessary to register \ndomain names or modify existing registrations. VeriSign reserves the right \nto restrict your access to the Whois database in its sole discretion to ensure \noperational stability.  VeriSign may restrict or terminate your access to the \nWhois database for failure to abide by these terms of use. VeriSign \nreserves the right to modify these terms at any time. \n\nThe Registry database contains ONLY .COM, .NET, .EDU domains and\nRegistrars.The data contained in GoDaddy.com, Inc.'s WhoIs database,\nwhile believed by the company to be reliable, is provided \"as is\"\nwith no guarantee or warranties regarding its accuracy.  This\ninformation is provided for the sole purpose of assisting you\nin obtaining information about domain name registration records.\nAny use of this data for any other purpose is expressly forbidden without the prior written\npermission of GoDaddy.com, Inc.  By submitting an inquiry,\nyou agree to these terms of usage and limitations of warranty.  In particular,\nyou agree not to use this data to allow, enable, or otherwise make possible,\ndissemination or collection of this data, in part or in its entirety, for any\npurpose, such as the transmission of unsolicited advertising and\nand solicitations of any kind, including spam.  You further agree\nnot to use this data to enable high volume, automated or robotic electronic\nprocesses designed to collect or compile this data for any purpose,\nincluding mining this data for your own personal or commercial purposes. \n\nPlease note: the registrant of the domain name is specified\nin the \"registrant\" field.  In most cases, GoDaddy.com, Inc. \nis not the registrant of domain names listed in this database.\n\n\nRegistrant:\n   Domains by Proxy, Inc.\n\n   Registered through: GoDaddy.com, Inc. (http://www.godaddy.com)\n   Domain Name: DIGG.COM\n\n   Domain servers in listed order:\n      UDNS1.ULTRADNS.NET\n      UDNS2.ULTRADNS.NET\n\n\n   For complete domain details go to:\n   http://who.godaddy.com/whoischeck.aspx?Domain=DIGG.COM\n"
  },
  {
    "path": "lib/pywhois/test/samples/whois/google.com",
    "content": "\nWhois Server Version 2.0\n\nDomain names in the .com and .net domains can now be registered\nwith many different competing registrars. Go to http://www.internic.net\nfor detailed information.\n\n   Server Name: GOOGLE.COM.ZZZZZ.GET.LAID.AT.WWW.SWINGINGCOMMUNITY.COM\n   IP Address: 69.41.185.195\n   Registrar: INNERWISE, INC. D/B/A ITSYOURDOMAIN.COM\n   Whois Server: whois.itsyourdomain.com\n   Referral URL: http://www.itsyourdomain.com\n\n   Server Name: GOOGLE.COM.ZOMBIED.AND.HACKED.BY.WWW.WEB-HACK.COM\n   IP Address: 217.107.217.167\n   Registrar: ONLINENIC, INC.\n   Whois Server: whois.35.com\n   Referral URL: http://www.OnlineNIC.com\n\n   Server Name: GOOGLE.COM.YAHOO.COM.MYSPACE.COM.YOUTUBE.COM.FACEBOOK.COM.THEYSUCK.DNSABOUT.COM\n   IP Address: 72.52.190.30\n   Registrar: GODADDY.COM, INC.\n   Whois Server: whois.godaddy.com\n   Referral URL: http://registrar.godaddy.com\n\n   Server Name: GOOGLE.COM.WORDT.DOOR.VEEL.WHTERS.GEBRUIKT.SERVERTJE.NET\n   IP Address: 62.41.27.144\n   Registrar: KEY-SYSTEMS GMBH\n   Whois Server: whois.rrpproxy.net\n   Referral URL: http://www.key-systems.net\n\n   Server Name: GOOGLE.COM.VN\n   Registrar: ONLINENIC, INC.\n   Whois Server: whois.35.com\n   Referral URL: http://www.OnlineNIC.com\n\n   Server Name: GOOGLE.COM.UY\n   Registrar: DIRECTI INTERNET SOLUTIONS PVT. LTD. D/B/A PUBLICDOMAINREGISTRY.COM\n   Whois Server: whois.PublicDomainRegistry.com\n   Referral URL: http://www.PublicDomainRegistry.com\n\n   Server Name: GOOGLE.COM.UA\n   Registrar: DIRECTI INTERNET SOLUTIONS PVT. LTD. D/B/A PUBLICDOMAINREGISTRY.COM\n   Whois Server: whois.PublicDomainRegistry.com\n   Referral URL: http://www.PublicDomainRegistry.com\n\n   Server Name: GOOGLE.COM.TW\n   Registrar: WEB COMMERCE COMMUNICATIONS LIMITED DBA WEBNIC.CC\n   Whois Server: whois.webnic.cc\n   Referral URL: http://www.webnic.cc\n\n   Server Name: GOOGLE.COM.TR\n   Registrar: DIRECTI INTERNET SOLUTIONS PVT. LTD. D/B/A PUBLICDOMAINREGISTRY.COM\n   Whois Server: whois.PublicDomainRegistry.com\n   Referral URL: http://www.PublicDomainRegistry.com\n\n   Server Name: GOOGLE.COM.SUCKS.FIND.CRACKZ.WITH.SEARCH.GULLI.COM\n   IP Address: 80.190.192.24\n   Registrar: EPAG DOMAINSERVICES GMBH\n   Whois Server: whois.enterprice.net\n   Referral URL: http://www.enterprice.net\n\n   Server Name: GOOGLE.COM.SPROSIUYANDEKSA.RU\n   Registrar: MELBOURNE IT, LTD. D/B/A INTERNET NAMES WORLDWIDE\n   Whois Server: whois.melbourneit.com\n   Referral URL: http://www.melbourneit.com\n\n   Server Name: GOOGLE.COM.SERVES.PR0N.FOR.ALLIYAH.NET\n   IP Address: 84.255.209.69\n   Registrar: GODADDY.COM, INC.\n   Whois Server: whois.godaddy.com\n   Referral URL: http://registrar.godaddy.com\n\n   Server Name: GOOGLE.COM.SA\n   Registrar: OMNIS NETWORK, LLC\n   Whois Server: whois.omnis.com\n   Referral URL: http://domains.omnis.com\n\n   Server Name: GOOGLE.COM.PLZ.GIVE.A.PR8.TO.AUDIOTRACKER.NET\n   IP Address: 213.251.184.30\n   Registrar: OVH\n   Whois Server: whois.ovh.com\n   Referral URL: http://www.ovh.com\n\n   Server Name: GOOGLE.COM.MX\n   Registrar: DIRECTI INTERNET SOLUTIONS PVT. LTD. D/B/A PUBLICDOMAINREGISTRY.COM\n   Whois Server: whois.PublicDomainRegistry.com\n   Referral URL: http://www.PublicDomainRegistry.com\n\n   Server Name: GOOGLE.COM.IS.NOT.HOSTED.BY.ACTIVEDOMAINDNS.NET\n   IP Address: 217.148.161.5\n   Registrar: ENOM, INC.\n   Whois Server: whois.enom.com\n   Referral URL: http://www.enom.com\n\n   Server Name: GOOGLE.COM.IS.HOSTED.ON.PROFITHOSTING.NET\n   IP Address: 66.49.213.213\n   Registrar: NAME.COM LLC\n   Whois Server: whois.name.com\n   Referral URL: http://www.name.com\n\n   Server Name: GOOGLE.COM.IS.APPROVED.BY.NUMEA.COM\n   IP Address: 213.228.0.43\n   Registrar: GANDI SAS\n   Whois Server: whois.gandi.net\n   Referral URL: http://www.gandi.net\n\n   Server Name: GOOGLE.COM.HAS.LESS.FREE.PORN.IN.ITS.SEARCH.ENGINE.THAN.SECZY.COM\n   IP Address: 209.187.114.130\n   Registrar: INNERWISE, INC. D/B/A ITSYOURDOMAIN.COM\n   Whois Server: whois.itsyourdomain.com\n   Referral URL: http://www.itsyourdomain.com\n\n   Server Name: GOOGLE.COM.DO\n   Registrar: GODADDY.COM, INC.\n   Whois Server: whois.godaddy.com\n   Referral URL: http://registrar.godaddy.com\n\n   Server Name: GOOGLE.COM.COLLEGELEARNER.COM\n   IP Address: 72.14.207.99\n   IP Address: 64.233.187.99\n   IP Address: 64.233.167.99\n   Registrar: GODADDY.COM, INC.\n   Whois Server: whois.godaddy.com\n   Referral URL: http://registrar.godaddy.com\n\n   Server Name: GOOGLE.COM.CO\n   Registrar: NAMESECURE.COM\n   Whois Server: whois.namesecure.com\n   Referral URL: http://www.namesecure.com\n\n   Server Name: GOOGLE.COM.BR\n   Registrar: ENOM, INC.\n   Whois Server: whois.enom.com\n   Referral URL: http://www.enom.com\n\n   Server Name: GOOGLE.COM.BEYONDWHOIS.COM\n   IP Address: 203.36.226.2\n   Registrar: TUCOWS INC.\n   Whois Server: whois.tucows.com\n   Referral URL: http://domainhelp.opensrs.net\n\n   Server Name: GOOGLE.COM.AU\n   Registrar: PLANETDOMAIN PTY LTD.\n   Whois Server: whois.planetdomain.com\n   Referral URL: http://www.planetdomain.com\n\n   Server Name: GOOGLE.COM.ACQUIRED.BY.CALITEC.NET\n   IP Address: 85.190.27.2\n   Registrar: ENOM, INC.\n   Whois Server: whois.enom.com\n   Referral URL: http://www.enom.com\n\n   Domain Name: GOOGLE.COM\n   Registrar: MARKMONITOR INC.\n   Whois Server: whois.markmonitor.com\n   Referral URL: http://www.markmonitor.com\n   Name Server: NS1.GOOGLE.COM\n   Name Server: NS2.GOOGLE.COM\n   Name Server: NS3.GOOGLE.COM\n   Name Server: NS4.GOOGLE.COM\n   Status: clientDeleteProhibited\n   Status: clientTransferProhibited\n   Status: clientUpdateProhibited\n   Updated Date: 10-apr-2006\n   Creation Date: 15-sep-1997\n   Expiration Date: 14-sep-2011\n\n>>> Last update of whois database: Thu, 26 Jun 2008 21:39:39 EDT <<<\n\nNOTICE: The expiration date displayed in this record is the date the \nregistrar's sponsorship of the domain name registration in the registry is \ncurrently set to expire. This date does not necessarily reflect the expiration \ndate of the domain name registrant's agreement with the sponsoring \nregistrar.  Users may consult the sponsoring registrar's Whois database to \nview the registrar's reported date of expiration for this registration.\n\nTERMS OF USE: You are not authorized to access or query our Whois \ndatabase through the use of electronic processes that are high-volume and \nautomated except as reasonably necessary to register domain names or \nmodify existing registrations; the Data in VeriSign Global Registry \nServices' (\"VeriSign\") Whois database is provided by VeriSign for \ninformation purposes only, and to assist persons in obtaining information \nabout or related to a domain name registration record. VeriSign does not \nguarantee its accuracy. By submitting a Whois query, you agree to abide \nby the following terms of use: You agree that you may use this Data only \nfor lawful purposes and that under no circumstances will you use this Data \nto: (1) allow, enable, or otherwise support the transmission of mass \nunsolicited, commercial advertising or solicitations via e-mail, telephone, \nor facsimile; or (2) enable high volume, automated, electronic processes \nthat apply to VeriSign (or its computer systems). The compilation, \nrepackaging, dissemination or other use of this Data is expressly \nprohibited without the prior written consent of VeriSign. You agree not to \nuse electronic processes that are automated and high-volume to access or \nquery the Whois database except as reasonably necessary to register \ndomain names or modify existing registrations. VeriSign reserves the right \nto restrict your access to the Whois database in its sole discretion to ensure \noperational stability.  VeriSign may restrict or terminate your access to the \nWhois database for failure to abide by these terms of use. VeriSign \nreserves the right to modify these terms at any time. \n\nThe Registry database contains ONLY .COM, .NET, .EDU domains and\nRegistrars.\nMarkMonitor.com - The Leader in Corporate Domain Management\n----------------------------------------------------------\nFor Global Domain Consolidation, Research & Intelligence,\nand Enterprise DNS, go to: www.markmonitor.com\n----------------------------------------------------------\n\nThe Data in MarkMonitor.com's WHOIS database is provided by MarkMonitor.com\nfor information purposes, and to assist persons in obtaining information\nabout or related to a domain name registration record.  MarkMonitor.com\ndoes not guarantee its accuracy.  By submitting a WHOIS query, you agree\nthat you will use this Data only for lawful purposes and that, under no\ncircumstances will you use this Data to: (1) allow, enable, or otherwise\nsupport the transmission of mass unsolicited, commercial advertising or\nsolicitations via e-mail (spam); or  (2) enable high volume, automated,\nelectronic processes that apply to MarkMonitor.com (or its systems).\nMarkMonitor.com reserves the right to modify these terms at any time.\nBy submitting this query, you agree to abide by this policy.\n\nRegistrant:\n        Dns Admin\n        Google Inc.\n        Please contact contact-admin@google.com 1600 Amphitheatre Parkway\n         Mountain View CA 94043\n        US\n        dns-admin@google.com +1.6502530000 Fax: +1.6506188571\n\n    Domain Name: google.com\n\n        Registrar Name: Markmonitor.com\n        Registrar Whois: whois.markmonitor.com\n        Registrar Homepage: http://www.markmonitor.com\n\n    Administrative Contact:\n        DNS Admin\n        Google Inc.\n        1600 Amphitheatre Parkway\n         Mountain View CA 94043\n        US\n        dns-admin@google.com +1.6506234000 Fax: +1.6506188571\n    Technical Contact, Zone Contact:\n        DNS Admin\n        Google Inc.\n        2400 E. Bayshore Pkwy\n         Mountain View CA 94043\n        US\n        dns-admin@google.com +1.6503300100 Fax: +1.6506181499\n\n    Created on..............: 1997-09-15.\n    Expires on..............: 2011-09-13.\n    Record last updated on..: 2008-06-08.\n\n    Domain servers in listed order:\n\n    ns4.google.com\n    ns3.google.com\n    ns2.google.com\n    ns1.google.com\n    \n\nMarkMonitor.com - The Leader in Corporate Domain Management\n----------------------------------------------------------\nFor Global Domain Consolidation, Research & Intelligence,\nand Enterprise DNS, go to: www.markmonitor.com\n----------------------------------------------------------\n--\n"
  },
  {
    "path": "lib/pywhois/test/samples/whois/imdb.com",
    "content": "\nWhois Server Version 2.0\n\nDomain names in the .com and .net domains can now be registered\nwith many different competing registrars. Go to http://www.internic.net\nfor detailed information.\n\n   Server Name: IMDB.COM.MORE.INFO.AT.WWW.BEYONDWHOIS.COM\n   IP Address: 203.36.226.2\n   Registrar: TUCOWS INC.\n   Whois Server: whois.tucows.com\n   Referral URL: http://domainhelp.opensrs.net\n\n   Domain Name: IMDB.COM\n   Registrar: NETWORK SOLUTIONS, LLC.\n   Whois Server: whois.networksolutions.com\n   Referral URL: http://www.networksolutions.com\n   Name Server: UDNS1.ULTRADNS.NET\n   Name Server: UDNS2.ULTRADNS.NET\n   Status: clientTransferProhibited\n   Updated Date: 28-mar-2008\n   Creation Date: 05-jan-1996\n   Expiration Date: 04-jan-2016\n\n>>> Last update of whois database: Thu, 26 Jun 2008 21:40:25 EDT <<<\n\nNOTICE: The expiration date displayed in this record is the date the \nregistrar's sponsorship of the domain name registration in the registry is \ncurrently set to expire. This date does not necessarily reflect the expiration \ndate of the domain name registrant's agreement with the sponsoring \nregistrar.  Users may consult the sponsoring registrar's Whois database to \nview the registrar's reported date of expiration for this registration.\n\nTERMS OF USE: You are not authorized to access or query our Whois \ndatabase through the use of electronic processes that are high-volume and \nautomated except as reasonably necessary to register domain names or \nmodify existing registrations; the Data in VeriSign Global Registry \nServices' (\"VeriSign\") Whois database is provided by VeriSign for \ninformation purposes only, and to assist persons in obtaining information \nabout or related to a domain name registration record. VeriSign does not \nguarantee its accuracy. By submitting a Whois query, you agree to abide \nby the following terms of use: You agree that you may use this Data only \nfor lawful purposes and that under no circumstances will you use this Data \nto: (1) allow, enable, or otherwise support the transmission of mass \nunsolicited, commercial advertising or solicitations via e-mail, telephone, \nor facsimile; or (2) enable high volume, automated, electronic processes \nthat apply to VeriSign (or its computer systems). The compilation, \nrepackaging, dissemination or other use of this Data is expressly \nprohibited without the prior written consent of VeriSign. You agree not to \nuse electronic processes that are automated and high-volume to access or \nquery the Whois database except as reasonably necessary to register \ndomain names or modify existing registrations. VeriSign reserves the right \nto restrict your access to the Whois database in its sole discretion to ensure \noperational stability.  VeriSign may restrict or terminate your access to the \nWhois database for failure to abide by these terms of use. VeriSign \nreserves the right to modify these terms at any time. \n\nThe Registry database contains ONLY .COM, .NET, .EDU domains and\nRegistrars.NOTICE AND TERMS OF USE: You are not authorized to access or query our WHOIS\ndatabase through the use of high-volume, automated, electronic processes. The\nData in Network Solutions' WHOIS database is provided by Network Solutions for information\npurposes only, and to assist persons in obtaining information about or related\nto a domain name registration record. Network Solutions does not guarantee its accuracy.\nBy submitting a WHOIS query, you agree to abide by the following terms of use:\nYou agree that you may use this Data only for lawful purposes and that under no\ncircumstances will you use this Data to: (1) allow, enable, or otherwise support\nthe transmission of mass unsolicited, commercial advertising or solicitations\nvia e-mail, telephone, or facsimile; or (2) enable high volume, automated,\nelectronic processes that apply to Network Solutions (or its computer systems). The\ncompilation, repackaging, dissemination or other use of this Data is expressly\nprohibited without the prior written consent of Network Solutions. You agree not to use\nhigh-volume, automated, electronic processes to access or query the WHOIS\ndatabase. Network Solutions reserves the right to terminate your access to the WHOIS\ndatabase in its sole discretion, including without limitation, for excessive\nquerying of the WHOIS database or for failure to otherwise abide by this policy.\nNetwork Solutions reserves the right to modify these terms at any time.\n\nGet a FREE domain name registration, transfer, or renewal with any annual hosting package.\n\nhttp://www.networksolutions.com\n\nVisit AboutUs.org for more information about IMDB.COM\n<a href=\"http://www.aboutus.org/IMDB.COM\">AboutUs: IMDB.COM </a>\n\n\n\n\nRegistrant:\nIMDb.com, Inc.\n   Legal Dept, PO Box 81226\n   Seattle, WA 98108\n   US\n\n   Domain Name: IMDB.COM\n\n   ------------------------------------------------------------------------\n   Promote your business to millions of viewers for only $1 a month\n   Learn how you can get an Enhanced Business Listing here for your domain name.\n   Learn more at http://www.NetworkSolutions.com/\n   ------------------------------------------------------------------------\n\n   Administrative Contact, Technical Contact:\n      Hostmaster, IMDb\t\thostmaster@imdb.com\n      IMDb.com, Inc.\n      Legal Dept, PO Box 81226\n      Seattle, WA 98108\n      US\n      +1.2062664064 fax: +1.2062667010\n\n\n   Record expires on 04-Jan-2016.\n   Record created on 05-Jan-1996.\n   Database last updated on 26-Jun-2008 21:38:42 EDT.\n\n   Domain servers in listed order:\n\n   UDNS1.ULTRADNS.NET           \n   UDNS2.ULTRADNS.NET           \n\n"
  },
  {
    "path": "lib/pywhois/test/samples/whois/microsoft.com",
    "content": "\nWhois Server Version 2.0\n\nDomain names in the .com and .net domains can now be registered\nwith many different competing registrars. Go to http://www.internic.net\nfor detailed information.\n\n   Server Name: MICROSOFT.COM.ZZZZZZ.MORE.DETAILS.AT.WWW.BEYONDWHOIS.COM\n   IP Address: 203.36.226.2\n   Registrar: TUCOWS INC.\n   Whois Server: whois.tucows.com\n   Referral URL: http://domainhelp.opensrs.net\n\n   Server Name: MICROSOFT.COM.ZZZZZ.GET.LAID.AT.WWW.SWINGINGCOMMUNITY.COM\n   IP Address: 69.41.185.194\n   Registrar: INNERWISE, INC. D/B/A ITSYOURDOMAIN.COM\n   Whois Server: whois.itsyourdomain.com\n   Referral URL: http://www.itsyourdomain.com\n\n   Server Name: MICROSOFT.COM.ZZZOMBIED.AND.HACKED.BY.WWW.WEB-HACK.COM\n   IP Address: 217.107.217.167\n   Registrar: ONLINENIC, INC.\n   Whois Server: whois.35.com\n   Referral URL: http://www.OnlineNIC.com\n\n   Server Name: MICROSOFT.COM.ZZZ.IS.0WNED.AND.HAX0RED.BY.SUB7.NET\n   IP Address: 207.44.240.96\n   Registrar: INNERWISE, INC. D/B/A ITSYOURDOMAIN.COM\n   Whois Server: whois.itsyourdomain.com\n   Referral URL: http://www.itsyourdomain.com\n\n   Server Name: MICROSOFT.COM.WILL.LIVE.FOREVER.BECOUSE.UNIXSUCKS.COM\n   IP Address: 185.3.4.7\n   Registrar: MELBOURNE IT, LTD. D/B/A INTERNET NAMES WORLDWIDE\n   Whois Server: whois.melbourneit.com\n   Referral URL: http://www.melbourneit.com\n\n   Server Name: MICROSOFT.COM.WILL.BE.SLAPPED.IN.THE.FACE.BY.MY.BLUE.VEINED.SPANNER.NET\n   IP Address: 216.127.80.46\n   Registrar: COMPUTER SERVICES LANGENBACH GMBH DBA JOKER.COM\n   Whois Server: whois.joker.com\n   Referral URL: http://www.joker.com\n\n   Server Name: MICROSOFT.COM.WILL.BE.BEATEN.WITH.MY.SPANNER.NET\n   IP Address: 216.127.80.46\n   Registrar: COMPUTER SERVICES LANGENBACH GMBH DBA JOKER.COM\n   Whois Server: whois.joker.com\n   Referral URL: http://www.joker.com\n\n   Server Name: MICROSOFT.COM.WAREZ.AT.TOPLIST.GULLI.COM\n   IP Address: 80.190.192.33\n   Registrar: EPAG DOMAINSERVICES GMBH\n   Whois Server: whois.enterprice.net\n   Referral URL: http://www.enterprice.net\n\n   Server Name: MICROSOFT.COM.USERS.SHOULD.HOST.WITH.UNIX.AT.ITSHOSTED.COM\n   IP Address: 74.52.88.132\n   Registrar: ENOM, INC.\n   Whois Server: whois.enom.com\n   Referral URL: http://www.enom.com\n\n   Server Name: MICROSOFT.COM.TOTALLY.SUCKS.S3U.NET\n   IP Address: 207.208.13.22\n   Registrar: ENOM, INC.\n   Whois Server: whois.enom.com\n   Referral URL: http://www.enom.com\n\n   Server Name: MICROSOFT.COM.SOFTWARE.IS.NOT.USED.AT.REG.RU\n   Registrar: MELBOURNE IT, LTD. D/B/A INTERNET NAMES WORLDWIDE\n   Whois Server: whois.melbourneit.com\n   Referral URL: http://www.melbourneit.com\n\n   Server Name: MICROSOFT.COM.SHOULD.GIVE.UP.BECAUSE.LINUXISGOD.COM\n   IP Address: 65.160.248.13\n   Registrar: GKG.NET, INC.\n   Whois Server: whois.gkg.net\n   Referral URL: http://www.gkg.net\n\n   Server Name: MICROSOFT.COM.RAWKZ.MUH.WERLD.MENTALFLOSS.CA\n   Registrar: TUCOWS INC.\n   Whois Server: whois.tucows.com\n   Referral URL: http://domainhelp.opensrs.net\n\n   Server Name: MICROSOFT.COM.OHMYGODITBURNS.COM\n   IP Address: 216.158.63.6\n   Registrar: DOTSTER, INC.\n   Whois Server: whois.dotster.com\n   Referral URL: http://www.dotster.com\n\n   Server Name: MICROSOFT.COM.MORE.INFO.AT.WWW.BEYONDWHOIS.COM\n   IP Address: 203.36.226.2\n   Registrar: TUCOWS INC.\n   Whois Server: whois.tucows.com\n   Referral URL: http://domainhelp.opensrs.net\n\n   Server Name: MICROSOFT.COM.LOVES.ME.KOSMAL.NET\n   IP Address: 65.75.198.123\n   Registrar: GODADDY.COM, INC.\n   Whois Server: whois.godaddy.com\n   Referral URL: http://registrar.godaddy.com\n\n   Server Name: MICROSOFT.COM.LIVES.AT.SHAUNEWING.COM\n   IP Address: 216.40.250.172\n   Registrar: ENOM, INC.\n   Whois Server: whois.enom.com\n   Referral URL: http://www.enom.com\n\n   Server Name: MICROSOFT.COM.IS.NOT.YEPPA.ORG\n   Registrar: OVH\n   Whois Server: whois.ovh.com\n   Referral URL: http://www.ovh.com\n\n   Server Name: MICROSOFT.COM.IS.NOT.HOSTED.BY.ACTIVEDOMAINDNS.NET\n   IP Address: 217.148.161.5\n   Registrar: ENOM, INC.\n   Whois Server: whois.enom.com\n   Referral URL: http://www.enom.com\n\n   Server Name: MICROSOFT.COM.IS.IN.BED.WITH.CURTYV.COM\n   IP Address: 216.55.187.193\n   Registrar: ABACUS AMERICA, INC. DBA NAMES4EVER\n   Whois Server: whois.names4ever.com\n   Referral URL: http://www.names4ever.com\n\n   Server Name: MICROSOFT.COM.IS.HOSTED.ON.PROFITHOSTING.NET\n   IP Address: 66.49.213.213\n   Registrar: NAME.COM LLC\n   Whois Server: whois.name.com\n   Referral URL: http://www.name.com\n\n   Server Name: MICROSOFT.COM.IS.GOD.BECOUSE.UNIXSUCKS.COM\n   IP Address: 161.16.56.24\n   Registrar: MELBOURNE IT, LTD. D/B/A INTERNET NAMES WORLDWIDE\n   Whois Server: whois.melbourneit.com\n   Referral URL: http://www.melbourneit.com\n\n   Server Name: MICROSOFT.COM.IS.A.STEAMING.HEAP.OF.FUCKING-BULLSHIT.NET\n   IP Address: 63.99.165.11\n   Registrar: THE NAME IT CORPORATION DBA NAMESERVICES.NET\n   Whois Server: whois.aitdomains.com\n   Referral URL: http://www.aitdomains.com\n\n   Server Name: MICROSOFT.COM.IS.A.MESS.TIMPORTER.CO.UK\n   Registrar: MELBOURNE IT, LTD. D/B/A INTERNET NAMES WORLDWIDE\n   Whois Server: whois.melbourneit.com\n   Referral URL: http://www.melbourneit.com\n\n   Server Name: MICROSOFT.COM.HAS.ITS.OWN.CRACKLAB.COM\n   IP Address: 209.26.95.44\n   Registrar: DOTSTER, INC.\n   Whois Server: whois.dotster.com\n   Referral URL: http://www.dotster.com\n\n   Server Name: MICROSOFT.COM.HAS.A.PRESENT.COMING.FROM.HUGHESMISSILES.COM\n   IP Address: 66.154.11.27\n   Registrar: TUCOWS INC.\n   Whois Server: whois.tucows.com\n   Referral URL: http://domainhelp.opensrs.net\n\n   Server Name: MICROSOFT.COM.FILLS.ME.WITH.BELLIGERENCE.NET\n   IP Address: 130.58.82.232\n   Registrar: CRONON AG BERLIN, NIEDERLASSUNG REGENSBURG\n   Whois Server: whois.tmagnic.net\n   Referral URL: http://nsi-robo.tmag.de\n\n   Server Name: MICROSOFT.COM.CAN.GO.FUCK.ITSELF.AT.SECZY.COM\n   IP Address: 209.187.114.147\n   Registrar: INNERWISE, INC. D/B/A ITSYOURDOMAIN.COM\n   Whois Server: whois.itsyourdomain.com\n   Referral URL: http://www.itsyourdomain.com\n\n   Server Name: MICROSOFT.COM.ARE.GODDAMN.PIGFUCKERS.NET.NS-NOT-IN-SERVICE.COM\n   IP Address: 216.127.80.46\n   Registrar: TUCOWS INC.\n   Whois Server: whois.tucows.com\n   Referral URL: http://domainhelp.opensrs.net\n\n   Server Name: MICROSOFT.COM.AND.MINDSUCK.BOTH.SUCK.HUGE.ONES.AT.EXEGETE.NET\n   IP Address: 63.241.136.53\n   Registrar: DOTSTER, INC.\n   Whois Server: whois.dotster.com\n   Referral URL: http://www.dotster.com\n\n   Domain Name: MICROSOFT.COM\n   Registrar: TUCOWS INC.\n   Whois Server: whois.tucows.com\n   Referral URL: http://domainhelp.opensrs.net\n   Name Server: NS1.MSFT.NET\n   Name Server: NS2.MSFT.NET\n   Name Server: NS3.MSFT.NET\n   Name Server: NS4.MSFT.NET\n   Name Server: NS5.MSFT.NET\n   Status: clientDeleteProhibited\n   Status: clientTransferProhibited\n   Status: clientUpdateProhibited\n   Updated Date: 10-oct-2006\n   Creation Date: 02-may-1991\n   Expiration Date: 03-may-2014\n\n>>> Last update of whois database: Thu, 26 Jun 2008 21:39:39 EDT <<<\n\nNOTICE: The expiration date displayed in this record is the date the \nregistrar's sponsorship of the domain name registration in the registry is \ncurrently set to expire. This date does not necessarily reflect the expiration \ndate of the domain name registrant's agreement with the sponsoring \nregistrar.  Users may consult the sponsoring registrar's Whois database to \nview the registrar's reported date of expiration for this registration.\n\nTERMS OF USE: You are not authorized to access or query our Whois \ndatabase through the use of electronic processes that are high-volume and \nautomated except as reasonably necessary to register domain names or \nmodify existing registrations; the Data in VeriSign Global Registry \nServices' (\"VeriSign\") Whois database is provided by VeriSign for \ninformation purposes only, and to assist persons in obtaining information \nabout or related to a domain name registration record. VeriSign does not \nguarantee its accuracy. By submitting a Whois query, you agree to abide \nby the following terms of use: You agree that you may use this Data only \nfor lawful purposes and that under no circumstances will you use this Data \nto: (1) allow, enable, or otherwise support the transmission of mass \nunsolicited, commercial advertising or solicitations via e-mail, telephone, \nor facsimile; or (2) enable high volume, automated, electronic processes \nthat apply to VeriSign (or its computer systems). The compilation, \nrepackaging, dissemination or other use of this Data is expressly \nprohibited without the prior written consent of VeriSign. You agree not to \nuse electronic processes that are automated and high-volume to access or \nquery the Whois database except as reasonably necessary to register \ndomain names or modify existing registrations. VeriSign reserves the right \nto restrict your access to the Whois database in its sole discretion to ensure \noperational stability.  VeriSign may restrict or terminate your access to the \nWhois database for failure to abide by these terms of use. VeriSign \nreserves the right to modify these terms at any time. \n\nThe Registry database contains ONLY .COM, .NET, .EDU domains and\nRegistrars.Registrant:\n Microsoft Corporation\n One Microsoft Way\n Redmond, WA 98052\n US\n\n Domain name: MICROSOFT.COM\n\n\n Administrative Contact:\n    Administrator, Domain  domains@microsoft.com\n    One Microsoft Way\n    Redmond, WA 98052\n    US\n    +1.4258828080\n Technical Contact:\n    Hostmaster, MSN  msnhst@microsoft.com\n    One Microsoft Way\n    Redmond, WA 98052\n    US\n    +1.4258828080\n\n\n Registration Service Provider:\n    DBMS VeriSign, dbms-support@verisign.com\n    800-579-2848 x4\n    Please contact DBMS VeriSign for domain updates, DNS/Nameserver\n    changes, and general domain support questions.\n\n\n Registrar of Record: TUCOWS, INC.\n Record last updated on 15-Nov-2007.\n Record expires on 03-May-2014.\n Record created on 02-May-1991.\n\n Registrar Domain Name Help Center:\n    http://domainhelp.tucows.com\n\n Domain servers in listed order:\n    NS2.MSFT.NET   \n    NS4.MSFT.NET   \n    NS1.MSFT.NET   \n    NS5.MSFT.NET   \n    NS3.MSFT.NET   \n\n\n Domain status: clientDeleteProhibited\n                clientTransferProhibited\n                clientUpdateProhibited\n\nThe Data in the Tucows Registrar WHOIS database is provided to you by Tucows\nfor information purposes only, and may be used to assist you in obtaining\ninformation about or related to a domain name's registration record.\n\nTucows makes this information available \"as is,\" and does not guarantee its\naccuracy.\n\nBy submitting a WHOIS query, you agree that you will use this data only for\nlawful purposes and that, under no circumstances will you use this data to:\na) allow, enable, or otherwise support the transmission by e-mail,\ntelephone, or facsimile of mass, unsolicited, commercial advertising or\nsolicitations to entities other than the data recipient's own existing\ncustomers; or (b) enable high volume, automated, electronic processes that\nsend queries or data to the systems of any Registry Operator or\nICANN-Accredited registrar, except as reasonably necessary to register\ndomain names or modify existing registrations.\n\nThe compilation, repackaging, dissemination or other use of this Data is\nexpressly prohibited without the prior written consent of Tucows.\n\nTucows reserves the right to terminate your access to the Tucows WHOIS\ndatabase in its sole discretion, including without limitation, for excessive\nquerying of the WHOIS database or for failure to otherwise abide by this\npolicy.\n\nTucows reserves the right to modify these terms at any time.\n\nBy submitting this query, you agree to abide by these terms.\n\nNOTE: THE WHOIS DATABASE IS A CONTACT DATABASE ONLY.  LACK OF A DOMAIN\nRECORD DOES NOT SIGNIFY DOMAIN AVAILABILITY.\n\n\n"
  },
  {
    "path": "lib/pywhois/test/samples/whois/reddit.com",
    "content": "\nWhois Server Version 2.0\n\nDomain names in the .com and .net domains can now be registered\nwith many different competing registrars. Go to http://www.internic.net\nfor detailed information.\n\n   Domain Name: REDDIT.COM\n   Registrar: DSTR ACQUISITION PA I, LLC DBA DOMAINBANK.COM\n   Whois Server: rs.domainbank.net\n   Referral URL: http://www.domainbank.net\n   Name Server: ASIA1.AKAM.NET\n   Name Server: ASIA9.AKAM.NET\n   Name Server: AUS2.AKAM.NET\n   Name Server: NS1-1.AKAM.NET\n   Name Server: NS1-195.AKAM.NET\n   Name Server: USE4.AKAM.NET\n   Name Server: USW3.AKAM.NET\n   Name Server: USW5.AKAM.NET\n   Status: clientDeleteProhibited\n   Status: clientTransferProhibited\n   Status: clientUpdateProhibited\n   Updated Date: 04-jun-2008\n   Creation Date: 29-apr-2005\n   Expiration Date: 29-apr-2009\n\n>>> Last update of whois database: Fri, 27 Jun 2008 01:39:54 UTC <<<\n\nNOTICE: The expiration date displayed in this record is the date the \nregistrar's sponsorship of the domain name registration in the registry is \ncurrently set to expire. This date does not necessarily reflect the expiration \ndate of the domain name registrant's agreement with the sponsoring \nregistrar.  Users may consult the sponsoring registrar's Whois database to \nview the registrar's reported date of expiration for this registration.\n\nTERMS OF USE: You are not authorized to access or query our Whois \ndatabase through the use of electronic processes that are high-volume and \nautomated except as reasonably necessary to register domain names or \nmodify existing registrations; the Data in VeriSign Global Registry \nServices' (\"VeriSign\") Whois database is provided by VeriSign for \ninformation purposes only, and to assist persons in obtaining information \nabout or related to a domain name registration record. VeriSign does not \nguarantee its accuracy. By submitting a Whois query, you agree to abide \nby the following terms of use: You agree that you may use this Data only \nfor lawful purposes and that under no circumstances will you use this Data \nto: (1) allow, enable, or otherwise support the transmission of mass \nunsolicited, commercial advertising or solicitations via e-mail, telephone, \nor facsimile; or (2) enable high volume, automated, electronic processes \nthat apply to VeriSign (or its computer systems). The compilation, \nrepackaging, dissemination or other use of this Data is expressly \nprohibited without the prior written consent of VeriSign. You agree not to \nuse electronic processes that are automated and high-volume to access or \nquery the Whois database except as reasonably necessary to register \ndomain names or modify existing registrations. VeriSign reserves the right \nto restrict your access to the Whois database in its sole discretion to ensure \noperational stability.  VeriSign may restrict or terminate your access to the \nWhois database for failure to abide by these terms of use. VeriSign \nreserves the right to modify these terms at any time. \n\nThe Registry database contains ONLY .COM, .NET, .EDU domains and\nRegistrars.\nThe information in this whois database is provided for the sole\npurpose of assisting you in obtaining information about domain\nname registration records. This information is available \"as is,\"\nand we do not guarantee its accuracy. By submitting a whois\nquery, you agree that you will use this data only for lawful\npurposes and that, under no circumstances will you use this data\nto: (1) enable high volume, automated, electronic processes that\nstress or load this whois database system providing you this\ninformation; or (2) allow,enable, or otherwise support the\ntransmission of mass, unsolicited, commercial advertising or\nsolicitations via facsimile, electronic mail, or by telephone to\nentitites other than your own existing customers.  The\ncompilation, repackaging, dissemination or other use of this data\nis expressly prohibited without prior written consent from this\ncompany. We reserve the right to modify these terms at any\ntime. By submitting an inquiry, you agree to these terms of usage\nand limitations of warranty.  Please limit your queries to 10 per\nminute and one connection.\n\n   Domain Services Provided By:\n      Domain Bank, support@domainbank.com\n      http:///www.domainbank.com\n\nRegistrant:\n   CONDENET INC\n   Four Times Square\n   New York, NY  10036\n   US\n\n   Registrar: DOMAINBANK\n   Domain Name: REDDIT.COM\n      Created on: 29-APR-05\n      Expires on: 29-APR-09\n      Last Updated on: 04-JUN-08\n\n   Administrative Contact:\n      ,   domain_admin@advancemags.com\n      Advance Magazine Group\n      4 Times Square\n      23rd Floor\n      New York, New York  10036\n      US\n      2122862860\n\n   Technical Contact:\n      ,   domains@advancemags.com\n      Advance Magazine Group\n      1201 N. Market St\n      Wilmington, DE  19801\n      US\n      3028304630\n\n\n   Domain servers in listed order:\n      ASIA1.AKAM.NET \n      ASIA9.AKAM.NET \n      AUS2.AKAM.NET \n      NS1-1.AKAM.NET \n      NS1-195.AKAM.NET \n      USE4.AKAM.NET \n      USW3.AKAM.NET \n      USW5.AKAM.NET \n\nEnd of Whois Information\n"
  },
  {
    "path": "lib/pywhois/test/samples/whois/shazow.net",
    "content": "\nWhois Server Version 2.0\n\nDomain names in the .com and .net domains can now be registered\nwith many different competing registrars. Go to http://www.internic.net\nfor detailed information.\n\n   Domain Name: SHAZOW.NET\n   Registrar: NEW DREAM NETWORK, LLC\n   Whois Server: whois.dreamhost.com\n   Referral URL: http://www.dreamhost.com\n   Name Server: NS1.DREAMHOST.COM\n   Name Server: NS2.DREAMHOST.COM\n   Name Server: NS3.DREAMHOST.COM\n   Status: ok\n   Updated Date: 08-aug-2007\n   Creation Date: 13-sep-2003\n   Expiration Date: 13-sep-2009\n\n>>> Last update of whois database: Thu, 26 Jun 2008 21:39:08 EDT <<<\n\nNOTICE: The expiration date displayed in this record is the date the \nregistrar's sponsorship of the domain name registration in the registry is \ncurrently set to expire. This date does not necessarily reflect the expiration \ndate of the domain name registrant's agreement with the sponsoring \nregistrar.  Users may consult the sponsoring registrar's Whois database to \nview the registrar's reported date of expiration for this registration.\n\nTERMS OF USE: You are not authorized to access or query our Whois \ndatabase through the use of electronic processes that are high-volume and \nautomated except as reasonably necessary to register domain names or \nmodify existing registrations; the Data in VeriSign Global Registry \nServices' (\"VeriSign\") Whois database is provided by VeriSign for \ninformation purposes only, and to assist persons in obtaining information \nabout or related to a domain name registration record. VeriSign does not \nguarantee its accuracy. By submitting a Whois query, you agree to abide \nby the following terms of use: You agree that you may use this Data only \nfor lawful purposes and that under no circumstances will you use this Data \nto: (1) allow, enable, or otherwise support the transmission of mass \nunsolicited, commercial advertising or solicitations via e-mail, telephone, \nor facsimile; or (2) enable high volume, automated, electronic processes \nthat apply to VeriSign (or its computer systems). The compilation, \nrepackaging, dissemination or other use of this Data is expressly \nprohibited without the prior written consent of VeriSign. You agree not to \nuse electronic processes that are automated and high-volume to access or \nquery the Whois database except as reasonably necessary to register \ndomain names or modify existing registrations. VeriSign reserves the right \nto restrict your access to the Whois database in its sole discretion to ensure \noperational stability.  VeriSign may restrict or terminate your access to the \nWhois database for failure to abide by these terms of use. VeriSign \nreserves the right to modify these terms at any time. \n\nThe Registry database contains ONLY .COM, .NET, .EDU domains and\nRegistrars.\nLegal Stuff:\n\nThe information in DreamHost's whois database is to be used for\ninformational purposes only, and to obtain information on a\ndomain name registration. DreamHost does not guarantee its\naccuracy.\n\nYou are not authorized to query or access DreamHost's whois\ndatabase using high-volume, automated means without written\npermission from DreamHost.\n\nYou are not authorized to query or access DreamHost's whois\ndatabase in order to facilitate illegal activities, or to\nfacilitate the use of unsolicited bulk email, telephone, or\nfacsimile communications.\n\nYou are not authorized to collect, repackage, or redistribute the\ninformation in DreamHost's whois database.\n\nDreamHost may, at its sole discretion, restrict your access to\nthe whois database at any time, with or without notice. DreamHost\nmay modify these Terms of Service at any time, with or without\nnotice.\n\n+++++++++++++++++++++++++++++++++++++++++++\n\n   Domain Name: shazow.net\n\n   Registrant Contact:\n      shazow.net Private Registrant         shazow.net@proxy.dreamhost.com\n      DreamHost Web Hosting\n      417 Associated Rd #324\n      Brea, CA 92821\n      US\n      +1.2139471032\n\n   Administrative Contact:\n      shazow.net Private Registrant         shazow.net@proxy.dreamhost.com\n      DreamHost Web Hosting\n      417 Associated Rd #324\n      Brea, CA 92821\n      US\n      +1.2139471032\n\n   Technical Contact:\n      shazow.net Private Registrant         shazow.net@proxy.dreamhost.com\n      DreamHost Web Hosting\n      417 Associated Rd #324\n      Brea, CA 92821\n      US\n      +1.2139471032\n\n   Billing Contact:\n      shazow.net Private Registrant         shazow.net@proxy.dreamhost.com\n      DreamHost Web Hosting\n      417 Associated Rd #324\n      Brea, CA 92821\n      US\n      +1.2139471032\n\n   Record created on 2003-09-12 21:43:11.\n   Record expires on 2009-09-12 21:43:11.\n\n   Domain servers in listed order:\n\n      ns1.dreamhost.com\n      ns2.dreamhost.com\n      ns3.dreamhost.com\n\nDreamHost whois server terms of service: http://whois.dreamhost.com/terms.html\n"
  },
  {
    "path": "lib/pywhois/test/samples/whois/slashdot.org",
    "content": "NOTICE: Access to .ORG WHOIS information is provided to assist persons in \r\ndetermining the contents of a domain name registration record in the Public Interest Registry\r\nregistry database. The data in this record is provided by Public Interest Registry\r\nfor informational purposes only, and Public Interest Registry does not guarantee its \r\naccuracy.  This service is intended only for query-based access.  You agree \r\nthat you will use this data only for lawful purposes and that, under no \r\ncircumstances will you use this data to: (a) allow, enable, or otherwise \r\nsupport the transmission by e-mail, telephone, or facsimile of mass \r\nunsolicited, commercial advertising or solicitations to entities other than \r\nthe data recipient's own existing customers; or (b) enable high volume, \r\nautomated, electronic processes that send queries or data to the systems of \r\nRegistry Operator or any ICANN-Accredited Registrar, except as reasonably \r\nnecessary to register domain names or modify existing registrations.  All \r\nrights reserved. Public Interest Registry reserves the right to modify these terms at any \r\ntime. By submitting this query, you agree to abide by this policy. \r\n\r\nDomain ID:D2289308-LROR\r\nDomain Name:SLASHDOT.ORG\r\nCreated On:05-Oct-1997 04:00:00 UTC\r\nLast Updated On:23-Jun-2008 20:00:11 UTC\r\nExpiration Date:04-Oct-2008 04:00:00 UTC\r\nSponsoring Registrar:Tucows Inc. (R11-LROR)\r\nStatus:OK\r\nRegistrant ID:tuIIldggGKu3HogX\r\nRegistrant Name:DNS Administration\r\nRegistrant Organization:SourceForge, Inc.\r\nRegistrant Street1:650 Castro St.\r\nRegistrant Street2:Suite 450\r\nRegistrant Street3:\r\nRegistrant City:Mountain View\r\nRegistrant State/Province:CA\r\nRegistrant Postal Code:94041\r\nRegistrant Country:US\r\nRegistrant Phone:+1.6506942100\r\nRegistrant Phone Ext.:\r\nRegistrant FAX:\r\nRegistrant FAX Ext.:\r\nRegistrant Email:dns-admin@corp.sourceforge.com\r\nAdmin ID:tupyrGGXKEFJLdE5\r\nAdmin Name:DNS Administration\r\nAdmin Organization:SourceForge, Inc.\r\nAdmin Street1:650 Castro St.\r\nAdmin Street2:Suite 450\r\nAdmin Street3:\r\nAdmin City:Mountain View\r\nAdmin State/Province:CA\r\nAdmin Postal Code:94041\r\nAdmin Country:US\r\nAdmin Phone:+1.6506942100\r\nAdmin Phone Ext.:\r\nAdmin FAX:\r\nAdmin FAX Ext.:\r\nAdmin Email:dns-admin@corp.sourceforge.com\r\nTech ID:tuLQk02WUyJi47SS\r\nTech Name:DNS Technical\r\nTech Organization:SourceForge, Inc.\r\nTech Street1:650 Castro St.\r\nTech Street2:Suite 450\r\nTech Street3:\r\nTech City:Mountain View\r\nTech State/Province:CA\r\nTech Postal Code:94041\r\nTech Country:US\r\nTech Phone:+1.6506942100\r\nTech Phone Ext.:\r\nTech FAX:\r\nTech FAX Ext.:\r\nTech Email:dns-tech@corp.sourceforge.com\r\nName Server:NS-1.CH3.SOURCEFORGE.COM\r\nName Server:NS-2.CH3.SOURCEFORGE.COM\r\nName Server:NS-3.CORP.SOURCEFORGE.COM\r\nName Server: \r\nName Server: \r\nName Server: \r\nName Server: \r\nName Server: \r\nName Server: \r\nName Server: \r\nName Server: \r\nName Server: \r\nName Server: \r\n\r\n\n"
  },
  {
    "path": "lib/pywhois/test/samples/whois/squatter.net",
    "content": "\nWhois Server Version 2.0\n\nDomain names in the .com and .net domains can now be registered\nwith many different competing registrars. Go to http://www.internic.net\nfor detailed information.\n\n   Domain Name: SQUATTER.NET\n   Registrar: DOMAINDISCOVER\n   Whois Server: whois.domaindiscover.com\n   Referral URL: http://www.domaindiscover.com\n   Name Server: NS1.SBRACK.COM\n   Name Server: NS2.SBRACK.COM\n   Status: clientTransferProhibited\n   Updated Date: 07-nov-2007\n   Creation Date: 06-nov-1999\n   Expiration Date: 06-nov-2008\n\n>>> Last update of whois database: Thu, 26 Jun 2008 21:40:25 EDT <<<\n\nNOTICE: The expiration date displayed in this record is the date the \nregistrar's sponsorship of the domain name registration in the registry is \ncurrently set to expire. This date does not necessarily reflect the expiration \ndate of the domain name registrant's agreement with the sponsoring \nregistrar.  Users may consult the sponsoring registrar's Whois database to \nview the registrar's reported date of expiration for this registration.\n\nTERMS OF USE: You are not authorized to access or query our Whois \ndatabase through the use of electronic processes that are high-volume and \nautomated except as reasonably necessary to register domain names or \nmodify existing registrations; the Data in VeriSign Global Registry \nServices' (\"VeriSign\") Whois database is provided by VeriSign for \ninformation purposes only, and to assist persons in obtaining information \nabout or related to a domain name registration record. VeriSign does not \nguarantee its accuracy. By submitting a Whois query, you agree to abide \nby the following terms of use: You agree that you may use this Data only \nfor lawful purposes and that under no circumstances will you use this Data \nto: (1) allow, enable, or otherwise support the transmission of mass \nunsolicited, commercial advertising or solicitations via e-mail, telephone, \nor facsimile; or (2) enable high volume, automated, electronic processes \nthat apply to VeriSign (or its computer systems). The compilation, \nrepackaging, dissemination or other use of this Data is expressly \nprohibited without the prior written consent of VeriSign. You agree not to \nuse electronic processes that are automated and high-volume to access or \nquery the Whois database except as reasonably necessary to register \ndomain names or modify existing registrations. VeriSign reserves the right \nto restrict your access to the Whois database in its sole discretion to ensure \noperational stability.  VeriSign may restrict or terminate your access to the \nWhois database for failure to abide by these terms of use. VeriSign \nreserves the right to modify these terms at any time. \n\nThe Registry database contains ONLY .COM, .NET, .EDU domains and\nRegistrars.\nThis WHOIS database is provided for information purposes only. We do\nnot guarantee the accuracy of this data. The following uses of this \nsystem are expressly prohibited: (1) use of this system for unlawful \npurposes; (2) use of this system to collect information used in the \nmass transmission of unsolicited commercial messages in any medium; \n(3) use of high volume, automated, electronic processes against this \ndatabase. By submitting this query, you agree to abide by this \npolicy.\n\nRegistrant:\n   CustomPC\n   4047 N Bayberry St\n   Wichita, KS 67226-2418\n   US\n\n   Domain Name: SQUATTER.NET\n\n   Administrative Contact:\n      CustomPC\n      Derryl Brack\n      4047 N Bayberry St\n      Wichita, KS 67226-2418\n      US\n      3166402868\n      dbrack@cpcsales.com\n\n   Technical Contact, Zone Contact:\n      CustomPC\n      Brack, Derryl\n      4047 N Bayberry St\n      Wichita, KS 67226-2418\n      US\n      316-683-5010\n      316-683-5010 [fax]\n      brack@cpcsales.com\n\n   Domain created on 06-Nov-1999\n   Domain expires on 06-Nov-2008\n   Last updated on 05-Nov-2007\n\n   Domain servers in listed order:\n\n      NS1.SBRACK.COM              \n      NS2.SBRACK.COM              \n\nDomain registration and hosting powered by DomainDiscover\nAs low as $9/year, including FREE: responsive toll-free support, \nURL/frame/email forwarding, easy management system, and full featured DNS.\n\n"
  },
  {
    "path": "lib/pywhois/test/samples/whois/urlowl.com",
    "content": "\nWhois Server Version 2.0\n\nDomain names in the .com and .net domains can now be registered\nwith many different competing registrars. Go to http://www.internic.net\nfor detailed information.\n\n   Domain Name: URLOWL.COM\n   Registrar: NEW DREAM NETWORK, LLC\n   Whois Server: whois.dreamhost.com\n   Referral URL: http://www.dreamhost.com\n   Name Server: NS1.LINODE.COM\n   Name Server: NS2.LINODE.COM\n   Status: ok\n   Updated Date: 14-apr-2008\n   Creation Date: 14-apr-2008\n   Expiration Date: 14-apr-2009\n\n>>> Last update of whois database: Sun, 31 Aug 2008 00:18:23 UTC <<<\n\nNOTICE: The expiration date displayed in this record is the date the \nregistrar's sponsorship of the domain name registration in the registry is \ncurrently set to expire. This date does not necessarily reflect the expiration \ndate of the domain name registrant's agreement with the sponsoring \nregistrar.  Users may consult the sponsoring registrar's Whois database to \nview the registrar's reported date of expiration for this registration.\n\nTERMS OF USE: You are not authorized to access or query our Whois \ndatabase through the use of electronic processes that are high-volume and \nautomated except as reasonably necessary to register domain names or \nmodify existing registrations; the Data in VeriSign Global Registry \nServices' (\"VeriSign\") Whois database is provided by VeriSign for \ninformation purposes only, and to assist persons in obtaining information \nabout or related to a domain name registration record. VeriSign does not \nguarantee its accuracy. By submitting a Whois query, you agree to abide \nby the following terms of use: You agree that you may use this Data only \nfor lawful purposes and that under no circumstances will you use this Data \nto: (1) allow, enable, or otherwise support the transmission of mass \nunsolicited, commercial advertising or solicitations via e-mail, telephone, \nor facsimile; or (2) enable high volume, automated, electronic processes \nthat apply to VeriSign (or its computer systems). The compilation, \nrepackaging, dissemination or other use of this Data is expressly \nprohibited without the prior written consent of VeriSign. You agree not to \nuse electronic processes that are automated and high-volume to access or \nquery the Whois database except as reasonably necessary to register \ndomain names or modify existing registrations. VeriSign reserves the right \nto restrict your access to the Whois database in its sole discretion to ensure \noperational stability.  VeriSign may restrict or terminate your access to the \nWhois database for failure to abide by these terms of use. VeriSign \nreserves the right to modify these terms at any time. \n\nThe Registry database contains ONLY .COM, .NET, .EDU domains and\nRegistrars.\nLegal Stuff:\n\nThe information in DreamHost's whois database is to be used for\ninformational purposes only, and to obtain information on a\ndomain name registration. DreamHost does not guarantee its\naccuracy.\n\nYou are not authorized to query or access DreamHost's whois\ndatabase using high-volume, automated means without written\npermission from DreamHost.\n\nYou are not authorized to query or access DreamHost's whois\ndatabase in order to facilitate illegal activities, or to\nfacilitate the use of unsolicited bulk email, telephone, or\nfacsimile communications.\n\nYou are not authorized to collect, repackage, or redistribute the\ninformation in DreamHost's whois database.\n\nDreamHost may, at its sole discretion, restrict your access to\nthe whois database at any time, with or without notice. DreamHost\nmay modify these Terms of Service at any time, with or without\nnotice.\n\n+++++++++++++++++++++++++++++++++++++++++++\n\n   Domain Name: urlowl.com\n\n   Registrant Contact:\n      urlowl.com Private Registrant         urlowl.com@proxy.dreamhost.com\n      A Happy DreamHost Customer\n      417 Associated Rd #324\n      Brea, CA 92821\n      US\n      +1.2139471032\n\n   Administrative Contact:\n      urlowl.com Private Registrant         urlowl.com@proxy.dreamhost.com\n      A Happy DreamHost Customer\n      417 Associated Rd #324\n      Brea, CA 92821\n      US\n      +1.2139471032\n\n   Technical Contact:\n      urlowl.com Private Registrant         urlowl.com@proxy.dreamhost.com\n      A Happy DreamHost Customer\n      417 Associated Rd #324\n      Brea, CA 92821\n      US\n      +1.2139471032\n\n   Billing Contact:\n      urlowl.com Private Registrant         urlowl.com@proxy.dreamhost.com\n      A Happy DreamHost Customer\n      417 Associated Rd #324\n      Brea, CA 92821\n      US\n      +1.2139471032\n\n   Record created on 2008-04-14 14:34:20.\n   Record expires on 2009-04-14 14:34:20.\n\n   Domain servers in listed order:\n\n      ns1.linode.com\n      ns2.linode.com\n\nDreamHost whois server terms of service: http://whois.dreamhost.com/terms.html\n"
  },
  {
    "path": "lib/pywhois/test/test_main.py",
    "content": "# coding=utf-8\n\nfrom __future__ import unicode_literals\nfrom __future__ import print_function\nfrom __future__ import division\nfrom __future__ import absolute_import\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom builtins import *\nimport unittest\nfrom whois import extract_domain\n\n\nclass TestExtractDomain(unittest.TestCase):\n    def test_simple_ascii_domain(self):\n        url = 'google.com'\n        domain = url\n        self.assertEqual(domain, extract_domain(url))\n\n    def test_ascii_with_schema_path_and_query(self):\n        url = 'https://www.google.com/search?q=why+is+domain+whois+such+a+mess'\n        domain = 'www.google.com'\n        self.assertEqual(domain, extract_domain(url))\n\n    def test_simple_unicode_domain(self):\n        url = 'http://нарояци.com/'\n        domain = 'нарояци.com'\n        self.assertEqual(domain, extract_domain(url))\n\n    def test_unicode_domain_and_tld(self):\n        url = 'http://россия.рф/'\n        domain = 'россия.рф'\n        self.assertEqual(domain, extract_domain(url))\n"
  },
  {
    "path": "lib/pywhois/test/test_nicclient.py",
    "content": "# coding=utf-8\n\nfrom __future__ import unicode_literals\nfrom __future__ import print_function\nfrom __future__ import division\nfrom __future__ import absolute_import\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom builtins import *\nimport unittest\nfrom whois.whois import NICClient\n\n\nclass TestNICClient(unittest.TestCase):\n    def setUp(self):\n        self.client = NICClient()\n\n    def test_choose_server(self):\n        domain = 'рнидс.срб'\n        chosen = self.client.choose_server(domain)\n        suffix = domain.split('.')[-1].encode('idna').decode('utf-8')\n        correct = '{}.whois-servers.net'.format(suffix)\n        self.assertEqual(chosen, correct)\n"
  },
  {
    "path": "lib/pywhois/test/test_parser.py",
    "content": "from __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import division\nfrom __future__ import absolute_import\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom builtins import *\nimport unittest\n\nimport os\nimport sys\nsys.path.append('../')\n\nimport datetime\n\nimport simplejson\nfrom glob import glob\n\nfrom whois.parser import WhoisEntry, cast_date\n\nclass TestParser(unittest.TestCase):\n    def test_com_expiration(self):\n        data = \"\"\"\n        Status: ok\n        Updated Date: 14-apr-2008\n        Creation Date: 14-apr-2008\n        Expiration Date: 14-apr-2009\n        \n        >>> Last update of whois database: Sun, 31 Aug 2008 00:18:23 UTC <<<\n        \"\"\"\n        w = WhoisEntry.load('urlowl.com', data)\n        expires = w.expiration_date.strftime('%Y-%m-%d')\n        self.assertEqual(expires, '2009-04-14')\n\n    def test_cast_date(self):\n        dates = ['14-apr-2008', '2008-04-14']\n        for d in dates:\n            r = cast_date(d).strftime('%Y-%m-%d')\n            self.assertEqual(r, '2008-04-14')\n\n    def test_com_allsamples(self):\n        \"\"\"\n        Iterate over all of the sample/whois/*.com files, read the data,\n        parse it, and compare to the expected values in sample/expected/.\n        Only keys defined in keys_to_test will be tested.\n        \n        To generate fresh expected value dumps, see NOTE below.\n        \"\"\"\n        keys_to_test = ['domain_name', 'expiration_date', 'updated_date',\n                        'creation_date', 'status']\n        fail = 0\n        total = 0\n        for path in glob('test/samples/whois/*.com'):\n            # Parse whois data\n            domain = os.path.basename(path)\n            with open(path) as whois_fp:\n                data = whois_fp.read()\n            \n            w = WhoisEntry.load(domain, data)\n            results = {key: w.get(key) for key in keys_to_test}\n\n            # NOTE: Toggle condition below to write expected results from the\n            # parse results This will overwrite the existing expected results.\n            # Only do this if you've manually confirmed that the parser is\n            # generating correct values at its current state.\n            if False:\n                def date2str4json(obj):\n                    if isinstance(obj, datetime.datetime):\n                        return str(obj)\n                    raise TypeError(\n                            '{} is not JSON serializable'.format(repr(obj)))\n                outfile_name = os.path.join('test/samples/expected/', domain)\n                with open(outfile_name, 'w') as outfil:\n                    expected_results = simplejson.dump(results, outfil,\n                                                       default=date2str4json)\n                continue\n\n            # Load expected result\n            with open(os.path.join('test/samples/expected/', domain)) as infil:\n                expected_results = simplejson.load(infil)\n            \n            # Compare each key\n            for key in results:\n                total += 1\n                result = results.get(key)\n                if isinstance(result, datetime.datetime):\n                    result = str(result)\n                expected = expected_results.get(key)\n                if expected != result:\n                    print(\"%s \\t(%s):\\t %s != %s\" % (domain, key, result, expected))\n                    fail += 1\n            \n        if fail:\n            self.fail(\"%d/%d sample whois attributes were not parsed properly!\"\n                      % (fail, total))\n\n\n    def test_ca_parse(self):\n        data = \"\"\"\n        Domain name:           testdomain.ca\n        Domain status:         registered\n        Creation date:         2000/11/20\n        Expiry date:           2020/03/08\n        Updated date:          2016/04/29\n        DNSSEC:                Unsigned\n\n        Registrar:\n            Name:              Webnames.ca Inc.\n            Number:            70\n\n        Registrant:\n            Name:              Test Industries\n\n        Administrative contact:\n            Name:              Test Person1\n            Postal address:    Test Address\n                               Test City, TestVille\n            Phone:             +1.1235434123x123\n            Fax:               +1.123434123\n            Email:             testperson1@testcompany.ca\n\n        Technical contact:\n            Name:              Test Persion2\n            Postal address:    Other TestAddress\n                               TestTown OCAS Canada\n            Phone:             +1.09876545123\n            Fax:               +1.12312993873\n            Email:             testpersion2@testcompany.ca\n\n        Name servers:\n            ns1.testserver1.net\n            ns2.testserver2.net\n        \"\"\"\n        results = WhoisEntry.load('testcompany.ca', data)\n        expected_results = {\n            \"updated_date\": \"2016-04-29 00:00:00\", \n            \"registrant_name\": [\n                \"Webnames.ca Inc.\", \n                \"Test Industries\", \n                \"Test Person1\", \n                \"Test Persion2\"\n            ], \n            \"fax\": [\n                \"+1.123434123\", \n                \"+1.12312993873\"\n            ], \n            \"dnssec\": \"Unsigned\", \n            \"registrant_number\": \"70\", \n            \"expiration_date\": \"2020-03-08 00:00:00\", \n            \"domain_name\": \"testdomain.ca\", \n            \"creation_date\": \"2000-11-20 00:00:00\", \n            \"phone\": [\n                \"+1.1235434123x123\", \n                \"+1.09876545123\"\n            ], \n            \"domain_status\": \"registered\", \n            \"emails\": [\n                \"testperson1@testcompany.ca\", \n                \"testpersion2@testcompany.ca\"\n            ]\n        }\n        \n        fail = 0\n        total = 0\n\n        # Compare each key\n        for key in expected_results:\n            total += 1\n            result = results.get(key)\n            if isinstance(result, datetime.datetime):\n                result = str(result)\n            expected = expected_results.get(key)\n            if expected != result:\n                print(\"%s \\t(%s):\\t %s != %s\" % (domain, key, result, expected))\n                fail += 1\n        if fail:\n            self.fail(\"%d/%d sample whois attributes were not parsed properly!\"\n                      % (fail, total))\n\n\n\n        \n\nif __name__ == '__main__':\n    unittest.main()\n"
  },
  {
    "path": "lib/pywhois/test/test_query.py",
    "content": "# coding=utf-8\n\nfrom __future__ import unicode_literals\nfrom __future__ import print_function\nfrom __future__ import division\nfrom __future__ import absolute_import\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom builtins import *\nimport unittest\nfrom whois import whois\n\n\nclass TestQuery(unittest.TestCase):\n    def test_simple_ascii_domain(self):\n        domain = 'google.com'\n        whois(domain)\n\n    def test_simple_unicode_domain(self):\n        domain = 'нарояци.com'\n        whois(domain)\n\n    def test_unicode_domain_and_tld(self):\n        domain = 'россия.рф'\n        whois(domain)\n"
  },
  {
    "path": "lib/pywhois/whois/__init__.py",
    "content": "from __future__ import print_function\nfrom __future__ import absolute_import\nfrom __future__ import unicode_literals\nfrom __future__ import division\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom builtins import *\nimport re\nimport sys\nimport os\nimport subprocess\nimport socket\nfrom .parser import WhoisEntry\nfrom .whois import NICClient\n\n\ndef whois(url, command=False):\n    # clean domain to expose netloc\n    ip_match = re.match(r\"^\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}$\", url)\n    if ip_match:\n        domain = url\n        try:\n            result = socket.gethostbyaddr(url)\n        except socket.herror as e:\n            pass\n        else:\n            domain = result[0]\n    else:\n        domain = extract_domain(url)\n    if command:\n        # try native whois command\n        r = subprocess.Popen(['whois', domain], stdout=subprocess.PIPE)\n        text = r.stdout.read()\n    else:\n        # try builtin client\n        nic_client = NICClient()\n        text = nic_client.whois_lookup(None, domain, 0)\n    return WhoisEntry.load(domain, text)\n\n\ndef extract_domain(url):\n    \"\"\"Extract the domain from the given URL\n\n    >>> extract_domain('http://www.google.com.au/tos.html')\n    'google.com.au'\n    >>> extract_domain('www.webscraping.com')\n    'webscraping.com'\n    >>> extract_domain('198.252.206.140')\n    'stackoverflow.com'\n    >>> extract_domain('102.112.2O7.net')\n    '2o7.net'\n    >>> extract_domain('1-0-1-1-1-0-1-1-1-1-1-1-1-.0-0-0-0-0-0-0-0-0-0-0-0-0-10-0-0-0-0-0-0-0-0-0-0-0-0-0.info')\n    '0-0-0-0-0-0-0-0-0-0-0-0-0-10-0-0-0-0-0-0-0-0-0-0-0-0-0.info'\n    \"\"\"\n    if re.match(r'\\d+\\.\\d+\\.\\d+\\.\\d+', url):\n        # this is an IP address\n        return socket.gethostbyaddr(url)[0]\n\n    tlds_path = os.path.join(os.getcwd(), os.path.dirname(__file__), 'data', 'tlds.txt')\n    with open(tlds_path) as tlds_fil:\n        suffixes = [line.lower().encode('utf-8')\n                    for line in (x.strip() for x in tlds_fil)\n                    if not line.startswith('#')]\n    suff = 'xn--p1ai'\n\n    if not isinstance(url, str):\n        url = url.decode('utf-8')\n    url = re.sub('^.*://', '', url)\n    url = url.split('/')[0].lower().encode('idna')\n\n    domain = []\n    for section in url.split(b'.'):\n        if section in suffixes:\n            domain.append(section)\n        else:\n            domain = [section]\n    return b'.'.join(domain).decode('idna')\n\n\nif __name__ == '__main__':\n    try:\n        url = sys.argv[1]\n    except IndexError:\n        print('Usage: %s url' % sys.argv[0])\n    else:\n        print(whois(url))\n"
  },
  {
    "path": "lib/pywhois/whois/data/tlds.txt",
    "content": "# Version 2016011500, Last Updated Fri Jan 15 07:07:01 2016 UTC\nAAA\nAARP\nABB\nABBOTT\nABOGADO\nAC\nACADEMY\nACCENTURE\nACCOUNTANT\nACCOUNTANTS\nACO\nACTIVE\nACTOR\nAD\nADS\nADULT\nAE\nAEG\nAERO\nAF\nAFL\nAG\nAGENCY\nAI\nAIG\nAIRFORCE\nAIRTEL\nAL\nALLFINANZ\nALSACE\nAM\nAMICA\nAMSTERDAM\nANALYTICS\nANDROID\nAO\nAPARTMENTS\nAPP\nAPPLE\nAQ\nAQUARELLE\nAR\nARAMCO\nARCHI\nARMY\nARPA\nARTE\nAS\nASIA\nASSOCIATES\nAT\nATTORNEY\nAU\nAUCTION\nAUDI\nAUDIO\nAUTHOR\nAUTO\nAUTOS\nAW\nAX\nAXA\nAZ\nAZURE\nBA\nBAIDU\nBAND\nBANK\nBAR\nBARCELONA\nBARCLAYCARD\nBARCLAYS\nBARGAINS\nBAUHAUS\nBAYERN\nBB\nBBC\nBBVA\nBCN\nBD\nBE\nBEATS\nBEER\nBENTLEY\nBERLIN\nBEST\nBET\nBF\nBG\nBH\nBHARTI\nBI\nBIBLE\nBID\nBIKE\nBING\nBINGO\nBIO\nBIZ\nBJ\nBLACK\nBLACKFRIDAY\nBLOOMBERG\nBLUE\nBM\nBMS\nBMW\nBN\nBNL\nBNPPARIBAS\nBO\nBOATS\nBOEHRINGER\nBOM\nBOND\nBOO\nBOOK\nBOOTS\nBOSCH\nBOSTIK\nBOT\nBOUTIQUE\nBR\nBRADESCO\nBRIDGESTONE\nBROADWAY\nBROKER\nBROTHER\nBRUSSELS\nBS\nBT\nBUDAPEST\nBUGATTI\nBUILD\nBUILDERS\nBUSINESS\nBUY\nBUZZ\nBV\nBW\nBY\nBZ\nBZH\nCA\nCAB\nCAFE\nCAL\nCALL\nCAMERA\nCAMP\nCANCERRESEARCH\nCANON\nCAPETOWN\nCAPITAL\nCAR\nCARAVAN\nCARDS\nCARE\nCAREER\nCAREERS\nCARS\nCARTIER\nCASA\nCASH\nCASINO\nCAT\nCATERING\nCBA\nCBN\nCC\nCD\nCEB\nCENTER\nCEO\nCERN\nCF\nCFA\nCFD\nCG\nCH\nCHANEL\nCHANNEL\nCHAT\nCHEAP\nCHLOE\nCHRISTMAS\nCHROME\nCHURCH\nCI\nCIPRIANI\nCIRCLE\nCISCO\nCITIC\nCITY\nCITYEATS\nCK\nCL\nCLAIMS\nCLEANING\nCLICK\nCLINIC\nCLINIQUE\nCLOTHING\nCLOUD\nCLUB\nCLUBMED\nCM\nCN\nCO\nCOACH\nCODES\nCOFFEE\nCOLLEGE\nCOLOGNE\nCOM\nCOMMBANK\nCOMMUNITY\nCOMPANY\nCOMPARE\nCOMPUTER\nCOMSEC\nCONDOS\nCONSTRUCTION\nCONSULTING\nCONTACT\nCONTRACTORS\nCOOKING\nCOOL\nCOOP\nCORSICA\nCOUNTRY\nCOUPONS\nCOURSES\nCR\nCREDIT\nCREDITCARD\nCREDITUNION\nCRICKET\nCROWN\nCRS\nCRUISES\nCSC\nCU\nCUISINELLA\nCV\nCW\nCX\nCY\nCYMRU\nCYOU\nCZ\nDABUR\nDAD\nDANCE\nDATE\nDATING\nDATSUN\nDAY\nDCLK\nDE\nDEALER\nDEALS\nDEGREE\nDELIVERY\nDELL\nDELTA\nDEMOCRAT\nDENTAL\nDENTIST\nDESI\nDESIGN\nDEV\nDIAMONDS\nDIET\nDIGITAL\nDIRECT\nDIRECTORY\nDISCOUNT\nDJ\nDK\nDM\nDNP\nDO\nDOCS\nDOG\nDOHA\nDOMAINS\nDOOSAN\nDOWNLOAD\nDRIVE\nDUBAI\nDURBAN\nDVAG\nDZ\nEARTH\nEAT\nEC\nEDU\nEDUCATION\nEE\nEG\nEMAIL\nEMERCK\nENERGY\nENGINEER\nENGINEERING\nENTERPRISES\nEPSON\nEQUIPMENT\nER\nERNI\nES\nESQ\nESTATE\nET\nEU\nEUROVISION\nEUS\nEVENTS\nEVERBANK\nEXCHANGE\nEXPERT\nEXPOSED\nEXPRESS\nFAGE\nFAIL\nFAIRWINDS\nFAITH\nFAMILY\nFAN\nFANS\nFARM\nFASHION\nFAST\nFEEDBACK\nFERRERO\nFI\nFILM\nFINAL\nFINANCE\nFINANCIAL\nFIRESTONE\nFIRMDALE\nFISH\nFISHING\nFIT\nFITNESS\nFJ\nFK\nFLIGHTS\nFLORIST\nFLOWERS\nFLSMIDTH\nFLY\nFM\nFO\nFOO\nFOOTBALL\nFORD\nFOREX\nFORSALE\nFORUM\nFOUNDATION\nFOX\nFR\nFRESENIUS\nFRL\nFROGANS\nFUND\nFURNITURE\nFUTBOL\nFYI\nGA\nGAL\nGALLERY\nGAME\nGARDEN\nGB\nGBIZ\nGD\nGDN\nGE\nGEA\nGENT\nGENTING\nGF\nGG\nGGEE\nGH\nGI\nGIFT\nGIFTS\nGIVES\nGIVING\nGL\nGLASS\nGLE\nGLOBAL\nGLOBO\nGM\nGMAIL\nGMO\nGMX\nGN\nGOLD\nGOLDPOINT\nGOLF\nGOO\nGOOG\nGOOGLE\nGOP\nGOT\nGOV\nGP\nGQ\nGR\nGRAINGER\nGRAPHICS\nGRATIS\nGREEN\nGRIPE\nGROUP\nGS\nGT\nGU\nGUCCI\nGUGE\nGUIDE\nGUITARS\nGURU\nGW\nGY\nHAMBURG\nHANGOUT\nHAUS\nHEALTHCARE\nHELP\nHERE\nHERMES\nHIPHOP\nHITACHI\nHIV\nHK\nHM\nHN\nHOCKEY\nHOLDINGS\nHOLIDAY\nHOMEDEPOT\nHOMES\nHONDA\nHORSE\nHOST\nHOSTING\nHOTELES\nHOTMAIL\nHOUSE\nHOW\nHR\nHSBC\nHT\nHU\nHYUNDAI\nIBM\nICBC\nICE\nICU\nID\nIE\nIFM\nIINET\nIL\nIM\nIMMO\nIMMOBILIEN\nIN\nINDUSTRIES\nINFINITI\nINFO\nING\nINK\nINSTITUTE\nINSURANCE\nINSURE\nINT\nINTERNATIONAL\nINVESTMENTS\nIO\nIPIRANGA\nIQ\nIR\nIRISH\nIS\nISELECT\nIST\nISTANBUL\nIT\nITAU\nIWC\nJAGUAR\nJAVA\nJCB\nJE\nJETZT\nJEWELRY\nJLC\nJLL\nJM\nJMP\nJO\nJOBS\nJOBURG\nJOT\nJOY\nJP\nJPRS\nJUEGOS\nKAUFEN\nKDDI\nKE\nKFH\nKG\nKH\nKI\nKIA\nKIM\nKINDER\nKITCHEN\nKIWI\nKM\nKN\nKOELN\nKOMATSU\nKP\nKPN\nKR\nKRD\nKRED\nKW\nKY\nKYOTO\nKZ\nLA\nLACAIXA\nLAMBORGHINI\nLAMER\nLANCASTER\nLAND\nLANDROVER\nLASALLE\nLAT\nLATROBE\nLAW\nLAWYER\nLB\nLC\nLDS\nLEASE\nLECLERC\nLEGAL\nLEXUS\nLGBT\nLI\nLIAISON\nLIDL\nLIFE\nLIFESTYLE\nLIGHTING\nLIKE\nLIMITED\nLIMO\nLINCOLN\nLINDE\nLINK\nLIVE\nLIVING\nLIXIL\nLK\nLOAN\nLOANS\nLOL\nLONDON\nLOTTE\nLOTTO\nLOVE\nLR\nLS\nLT\nLTD\nLTDA\nLU\nLUPIN\nLUXE\nLUXURY\nLV\nLY\nMA\nMADRID\nMAIF\nMAISON\nMAKEUP\nMAN\nMANAGEMENT\nMANGO\nMARKET\nMARKETING\nMARKETS\nMARRIOTT\nMBA\nMC\nMD\nME\nMED\nMEDIA\nMEET\nMELBOURNE\nMEME\nMEMORIAL\nMEN\nMENU\nMEO\nMG\nMH\nMIAMI\nMICROSOFT\nMIL\nMINI\nMK\nML\nMM\nMMA\nMN\nMO\nMOBI\nMOBILY\nMODA\nMOE\nMOI\nMOM\nMONASH\nMONEY\nMONTBLANC\nMORMON\nMORTGAGE\nMOSCOW\nMOTORCYCLES\nMOV\nMOVIE\nMOVISTAR\nMP\nMQ\nMR\nMS\nMT\nMTN\nMTPC\nMTR\nMU\nMUSEUM\nMUTUELLE\nMV\nMW\nMX\nMY\nMZ\nNA\nNADEX\nNAGOYA\nNAME\nNAVY\nNC\nNE\nNEC\nNET\nNETBANK\nNETWORK\nNEUSTAR\nNEW\nNEWS\nNEXUS\nNF\nNG\nNGO\nNHK\nNI\nNICO\nNINJA\nNISSAN\nNL\nNO\nNOKIA\nNORTON\nNOWRUZ\nNP\nNR\nNRA\nNRW\nNTT\nNU\nNYC\nNZ\nOBI\nOFFICE\nOKINAWA\nOM\nOMEGA\nONE\nONG\nONL\nONLINE\nOOO\nORACLE\nORANGE\nOR\nORG\nORGANIC\nORIGINS\nOSAKA\nOTSUKA\nOVH\nPA\nPAGE\nPANERAI\nPARIS\nPARS\nPARTNERS\nPARTS\nPARTY\nPE\nPET\nPF\nPG\nPH\nPHARMACY\nPHILIPS\nPHOTO\nPHOTOGRAPHY\nPHOTOS\nPHYSIO\nPIAGET\nPICS\nPICTET\nPICTURES\nPID\nPIN\nPING\nPINK\nPIZZA\nPK\nPL\nPLACE\nPLAY\nPLAYSTATION\nPLUMBING\nPLUS\nPM\nPN\nPOHL\nPOKER\nPORN\nPOST\nPR\nPRAXI\nPRESS\nPRO\nPROD\nPRODUCTIONS\nPROF\nPROMO\nPROPERTIES\nPROPERTY\nPROTECTION\nPS\nPT\nPUB\nPW\nPY\nQA\nQPON\nQUEBEC\nRACING\nRE\nREAD\nREALTOR\nREALTY\nRECIPES\nRED\nREDSTONE\nREDUMBRELLA\nREHAB\nREISE\nREISEN\nREIT\nREN\nRENT\nRENTALS\nREPAIR\nREPORT\nREPUBLICAN\nREST\nRESTAURANT\nREVIEW\nREVIEWS\nREXROTH\nRICH\nRICOH\nRIO\nRIP\nRO\nROCHER\nROCKS\nRODEO\nROOM\nRS\nRSVP\nRU\nRUHR\nRUN\nRW\nRWE\nRYUKYU\nSA\nSAARLAND\nSAFE\nSAFETY\nSAKURA\nSALE\nSALON\nSAMSUNG\nSANDVIK\nSANDVIKCOROMANT\nSANOFI\nSAP\nSAPO\nSARL\nSAS\nSAXO\nSB\nSBS\nSC\nSCA\nSCB\nSCHAEFFLER\nSCHMIDT\nSCHOLARSHIPS\nSCHOOL\nSCHULE\nSCHWARZ\nSCIENCE\nSCOR\nSCOT\nSD\nSE\nSEAT\nSECURITY\nSEEK\nSELECT\nSENER\nSERVICES\nSEVEN\nSEW\nSEX\nSEXY\nSFR\nSG\nSH\nSHARP\nSHELL\nSHIA\nSHIKSHA\nSHOES\nSHOW\nSHRIRAM\nSI\nSINGLES\nSITE\nSJ\nSK\nSKI\nSKIN\nSKY\nSKYPE\nSL\nSM\nSMILE\nSN\nSNCF\nSO\nSOCCER\nSOCIAL\nSOFTWARE\nSOHU\nSOLAR\nSOLUTIONS\nSONY\nSOY\nSPACE\nSPIEGEL\nSPREADBETTING\nSR\nSRL\nST\nSTADA\nSTAR\nSTARHUB\nSTATEFARM\nSTATOIL\nSTC\nSTCGROUP\nSTOCKHOLM\nSTORAGE\nSTUDIO\nSTUDY\nSTYLE\nSU\nSUCKS\nSUPPLIES\nSUPPLY\nSUPPORT\nSURF\nSURGERY\nSUZUKI\nSV\nSWATCH\nSWISS\nSX\nSY\nSYDNEY\nSYMANTEC\nSYSTEMS\nSZ\nTAB\nTAIPEI\nTATAMOTORS\nTATAR\nTATTOO\nTAX\nTAXI\nTC\nTCI\nTD\nTEAM\nTECH\nTECHNOLOGY\nTEL\nTELEFONICA\nTEMASEK\nTENNIS\nTF\nTG\nTH\nTHD\nTHEATER\nTHEATRE\nTICKETS\nTIENDA\nTIPS\nTIRES\nTIROL\nTJ\nTK\nTL\nTM\nTN\nTO\nTODAY\nTOKYO\nTOOLS\nTOP\nTORAY\nTOSHIBA\nTOURS\nTOWN\nTOYOTA\nTOYS\nTR\nTRADE\nTRADING\nTRAINING\nTRAVEL\nTRAVELERS\nTRAVELERSINSURANCE\nTRUST\nTRV\nTT\nTUBE\nTUI\nTUSHU\nTV\nTW\nTZ\nUA\nUBS\nUG\nUK\nUNIVERSITY\nUNO\nUOL\nUS\nUY\nUZ\nVA\nVACATIONS\nVANA\nVC\nVE\nVEGAS\nVENTURES\nVERISIGN\nVERSICHERUNG\nVET\nVG\nVI\nVIAJES\nVIDEO\nVILLAS\nVIN\nVIP\nVIRGIN\nVISION\nVISTA\nVISTAPRINT\nVIVA\nVLAANDEREN\nVN\nVODKA\nVOLKSWAGEN\nVOTE\nVOTING\nVOTO\nVOYAGE\nVU\nWALES\nWALTER\nWANG\nWANGGOU\nWATCH\nWATCHES\nWEATHER\nWEBCAM\nWEBER\nWEBSITE\nWED\nWEDDING\nWEIR\nWF\nWHOSWHO\nWIEN\nWIKI\nWILLIAMHILL\nWIN\nWINDOWS\nWINE\nWME\nWORK\nWORKS\nWORLD\nWS\nWTC\nWTF\nXBOX\nXEROX\nXIN\nXN--11B4C3D\nXN--1QQW23A\nXN--30RR7Y\nXN--3BST00M\nXN--3DS443G\nXN--3E0B707E\nXN--3PXU8K\nXN--42C2D9A\nXN--45BRJ9C\nXN--45Q11C\nXN--4GBRIM\nXN--55QW42G\nXN--55QX5D\nXN--6FRZ82G\nXN--6QQ986B3XL\nXN--80ADXHKS\nXN--80AO21A\nXN--80ASEHDB\nXN--80ASWG\nXN--90A3AC\nXN--90AIS\nXN--9DBQ2A\nXN--9ET52U\nXN--B4W605FERD\nXN--C1AVG\nXN--C2BR7G\nXN--CG4BKI\nXN--CLCHC0EA0B2G2A9GCD\nXN--CZR694B\nXN--CZRS0T\nXN--CZRU2D\nXN--D1ACJ3B\nXN--D1ALF\nXN--ECKVDTC9D\nXN--EFVY88H\nXN--ESTV75G\nXN--FHBEI\nXN--FIQ228C5HS\nXN--FIQ64B\nXN--FIQS8S\nXN--FIQZ9S\nXN--FJQ720A\nXN--FLW351E\nXN--FPCRJ9C3D\nXN--FZC2C9E2C\nXN--GECRJ9C\nXN--H2BRJ9C\nXN--HXT814E\nXN--I1B6B1A6A2E\nXN--IMR513N\nXN--IO0A7I\nXN--J1AEF\nXN--J1AMH\nXN--J6W193G\nXN--JLQ61U9W7B\nXN--KCRX77D1X4A\nXN--KPRW13D\nXN--KPRY57D\nXN--KPU716F\nXN--KPUT3I\nXN--L1ACC\nXN--LGBBAT1AD8J\nXN--MGB9AWBF\nXN--MGBA3A3EJT\nXN--MGBA3A4F16A\nXN--MGBAAM7A8H\nXN--MGBAB2BD\nXN--MGBAYH7GPA\nXN--MGBB9FBPOB\nXN--MGBBH1A71E\nXN--MGBC0A9AZCG\nXN--MGBERP4A5D4AR\nXN--MGBPL2FH\nXN--MGBT3DHD\nXN--MGBTX2B\nXN--MGBX4CD0AB\nXN--MK1BU44C\nXN--MXTQ1M\nXN--NGBC5AZD\nXN--NGBE9E0A\nXN--NODE\nXN--NQV7F\nXN--NQV7FS00EMA\nXN--NYQY26A\nXN--O3CW4H\nXN--OGBPF8FL\nXN--P1ACF\nXN--P1AI\nXN--PBT977C\nXN--PGBS0DH\nXN--PSSY2U\nXN--Q9JYB4C\nXN--QCKA1PMC\nXN--QXAM\nXN--RHQV96G\nXN--S9BRJ9C\nXN--SES554G\nXN--T60B56A\nXN--TCKWE\nXN--UNUP4Y\nXN--VERMGENSBERATER-CTB\nXN--VERMGENSBERATUNG-PWB\nXN--VHQUV\nXN--VUQ861B\nXN--WGBH1C\nXN--WGBL6A\nXN--XHQ521B\nXN--XKC2AL3HYE2A\nXN--XKC2DL3A5EE0H\nXN--Y9A3AQ\nXN--YFRO4I67O\nXN--YGBI2AMMX\nXN--ZFR164B\nXPERIA\nXXX\nXYZ\nYACHTS\nYAMAXUN\nYANDEX\nYE\nYODOBASHI\nYOGA\nYOKOHAMA\nYOUTUBE\nYT\nZA\nZARA\nZERO\nZIP\nZM\nZONE\nZUERICH\nZW\n"
  },
  {
    "path": "lib/pywhois/whois/parser.py",
    "content": "# coding=utf-8\n# parser.py - Module for parsing whois response data\n# Copyright (c) 2008 Andrey Petrov\n#\n# This module is part of pywhois and is released under\n# the MIT license: http://www.opensource.org/licenses/mit-license.php\n\nfrom __future__ import absolute_import\nfrom __future__ import unicode_literals\nfrom __future__ import print_function\nfrom __future__ import division\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom builtins import *\nfrom builtins import str\nfrom past.builtins import basestring\n\nimport json\nfrom datetime import datetime\nimport re\ntry:\n    import dateutil.parser as dp\n    from .time_zones import tz_data\n    DATEUTIL = True\nexcept ImportError:\n    DATEUTIL = False\n\nEMAIL_REGEX = \"[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\"\n\nKNOWN_FORMATS = [\n    '%d-%b-%Y', \t\t\t\t# 02-jan-2000\n    '%Y-%m-%d', \t\t\t\t# 2000-01-02\n    '%d.%m.%Y', \t\t\t\t# 2.1.2000\n    '%Y.%m.%d',                 # 2000.01.02\n    '%Y/%m/%d',                 # 2000/01/02\n    '%d/%m/%Y',                 # 02/01/2013\n    '%Y. %m. %d.',              # 2000. 01. 02.\n    '%Y.%m.%d %H:%M:%S',        # 2014.03.08 10:28:24\n    '%d-%b-%Y %H:%M:%S %Z',\t\t# 24-Jul-2009 13:20:03 UTC\n    '%a %b %d %H:%M:%S %Z %Y',  # Tue Jun 21 23:59:59 GMT 2011\n    '%Y-%m-%dT%H:%M:%SZ',       # 2007-01-26T19:10:31Z\n    '%Y-%m-%dT%H:%M:%S%z',      # 2013-12-06T08:17:22-0800\n    '%Y-%m-%d %H:%M:%SZ',       # 2000-08-22 18:55:20Z\n    '%Y-%m-%d %H:%M:%S',        # 2000-08-22 18:55:20\n    '%d %b %Y %H:%M:%S',        # 08 Apr 2013 05:44:00\n    '%d/%m/%Y %H:%M:%S',     # 23/04/2015 12:00:07 EEST\n    '%d/%m/%Y %H:%M:%S %Z',     # 23/04/2015 12:00:07 EEST\n    '%d/%m/%Y %H:%M:%S.%f %Z',  # 23/04/2015 12:00:07.619546 EEST\n]\n\n\nclass PywhoisError(Exception):\n    pass\n\n\ndef datetime_parse(s):\n    for known_format in KNOWN_FORMATS:\n        try:\n            s = datetime.strptime(s, known_format)\n            break\n        except ValueError as e:\n            pass  # Wrong format, keep trying\n    return s\n\ndef cast_date(s, dayfirst=False, yearfirst=False):\n    \"\"\"Convert any date string found in WHOIS to a datetime object.\n    \"\"\"\n    if DATEUTIL:\n        try:\n            return dp.parse(\n                s,\n                tzinfos=tz_data,\n                dayfirst=dayfirst,\n                yearfirst=yearfirst\n            ).replace(tzinfo=None)\n        except Exception:\n            return datetime_parse(s)\n    else:\n        return datetime_parse(s)\n\n\nclass WhoisEntry(dict):\n    \"\"\"Base class for parsing a Whois entries.\n    \"\"\"\n    # regular expressions to extract domain data from whois profile\n    # child classes will override this\n    _regex = {\n        'domain_name':          'Domain Name: *(.+)',\n        'registrar':            'Registrar: *(.+)',\n        'whois_server':         'Whois Server: *(.+)',\n        'referral_url':         'Referral URL: *(.+)',  # http url of whois_server\n        'updated_date':         'Updated Date: *(.+)',\n        'creation_date':        'Creation Date: *(.+)',\n        'expiration_date':      'Expir\\w+ Date: *(.+)',\n        'name_servers':         'Name Server: *(.+)',  # list of name servers\n        'status':               'Status: *(.+)',  # list of statuses\n        'emails':               EMAIL_REGEX,  # list of email s\n        'dnssec':               'dnssec: *([\\S]+)',\n        'name':                 'Registrant Name: *(.+)',\n        'org':                  'Registrant\\s*Organization: *(.+)',\n        'address':              'Registrant Street: *(.+)',\n        'city':                 'Registrant City: *(.+)',\n        'state':                'Registrant State/Province: *(.+)',\n        'zipcode':              'Registrant Postal Code: *(.+)',\n        'country':              'Registrant Country: *(.+)',\n    }\n    dayfirst = False\n    yearfirst = False\n\n    def __init__(self, domain, text, regex=None):\n        if 'This TLD has no whois server, but you can access the whois database at' in text:\n            raise PywhoisError(text)\n        else:\n            self.domain = domain\n            self.text = text\n            if regex is not None:\n                self._regex = regex\n            self.parse()\n\n    def parse(self):\n        \"\"\"The first time an attribute is called it will be calculated here.\n        The attribute is then set to be accessed directly by subsequent calls.\n        \"\"\"\n        for attr, regex in list(self._regex.items()):\n            if regex:\n                values = []\n                for value in re.findall(regex, self.text, re.IGNORECASE):\n                    value = value.strip()\n                    if value and isinstance(value, basestring) and not value.isdigit():\n                        # try casting to date format\n                        value = cast_date(\n                            value,\n                            dayfirst=self.dayfirst,\n                            yearfirst=self.yearfirst)\n                    if value and value not in values:\n                        # avoid duplicates\n                        values.append(value)\n                if values and attr in ('registrar', 'whois_server', 'referral_url'):\n                    values = values[-1] # ignore junk\n                if len(values) == 1:\n                    values = values[0]\n                elif not values:\n                    values = None\n\n                self[attr] = values\n\n\n    def __setitem__(self, name, value):\n        super(WhoisEntry, self).__setitem__(name, value)\n\n\n    def __getattr__(self, name):\n        return self.get(name)\n\n\n    def __str__(self):\n        handler = lambda e: str(e)\n        return json.dumps(self, indent=2, default=handler)\n\n    def __getstate__(self):\n        return self.__dict__\n\n    def __setstate__(self, state):\n        self.__dict__ = state\n\n    @staticmethod\n    def load(domain, text):\n        \"\"\"Given whois output in ``text``, return an instance of ``WhoisEntry``\n        that represents its parsed contents.\n        \"\"\"\n        if text.strip() == 'No whois server is known for this kind of object.':\n            raise PywhoisError(text)\n\n        if domain.endswith('.com'):\n            return WhoisCom(domain, text)\n        elif domain.endswith('.net'):\n            return WhoisNet(domain, text)\n        elif domain.endswith('.org'):\n            return WhoisOrg(domain, text)\n        elif domain.endswith('.name'):\n            return WhoisName(domain, text)\n        elif domain.endswith('.me'):\n            return WhoisMe(domain, text)\n        elif domain.endswith('.au'):\n            return WhoisAU(domain, text)\n        elif domain.endswith('.ru'):\n            return WhoisRu(domain, text)\n        elif domain.endswith('.us'):\n            return WhoisUs(domain, text)\n        elif domain.endswith('.uk'):\n            return WhoisUk(domain, text)\n        elif domain.endswith('.fr'):\n            return WhoisFr(domain, text)\n        elif domain.endswith('.nl'):\n            return WhoisNl(domain, text)\n        elif domain.endswith('.fi'):\n            return WhoisFi(domain, text)\n        elif domain.endswith('.jp'):\n            return WhoisJp(domain, text)\n        elif domain.endswith('.pl'):\n            return WhoisPl(domain, text)\n        elif domain.endswith('.br'):\n            return WhoisBr(domain, text)\n        elif domain.endswith('.eu'):\n            return WhoisEu(domain, text)\n        elif domain.endswith('.ee'):\n            return WhoisEe(domain, text)\n        elif domain.endswith('.kr'):\n            return WhoisKr(domain, text)\n        elif domain.endswith('.pt'):\n            return WhoisPt(domain, text)\n        elif domain.endswith('.bg'):\n            return WhoisBg(domain, text)\n        elif domain.endswith('.de'):\n            return WhoisDe(domain, text)\n        elif domain.endswith('.at'):\n            return WhoisAt(domain, text)\n        elif domain.endswith('.ca'):\n            return WhoisCa(domain, text)\n        elif domain.endswith('.be'):\n            return WhoisBe(domain, text)\n        elif domain.endswith('.рф'):\n            return WhoisRf(domain, text)\n        elif domain.endswith('.info'):\n            return WhoisInfo(domain, text)\n        elif domain.endswith('.su'):\n            return WhoisSu(domain, text)\n        elif domain.endswith('.kg'):\n            return WhoisKg(domain, text)\n        elif domain.endswith('.io'):\n            return WhoisIo(domain, text)\n        elif domain.endswith('.biz'):\n            return WhoisBiz(domain, text)\n        elif domain.endswith('.mobi'):\n            return WhoisMobi(domain, text)\n        elif domain.endswith('.ch'):\n            return WhoisChLi(domain, text)\n        elif domain.endswith('.li'):\n            return WhoisChLi(domain, text)\n        elif domain.endswith('.id'):\n            return WhoisID(domain, text)\n        else:\n            return WhoisEntry(domain, text)\n\n\nclass WhoisCom(WhoisEntry):\n    \"\"\"Whois parser for .com domains\n    \"\"\"\n    def __init__(self, domain, text):\n        if 'No match for \"' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text)\n\n\nclass WhoisNet(WhoisEntry):\n    \"\"\"Whois parser for .net domains\n    \"\"\"\n    def __init__(self, domain, text):\n        if 'No match for \"' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text)\n\n\nclass WhoisOrg(WhoisEntry):\n    \"\"\"Whois parser for .org domains\n    \"\"\"\n    regex = {\n        'domain_name':      'Domain Name: *(.+)',\n        'registrar':        'Registrar: *(.+)',\n        'whois_server':     'Whois Server: *(.+)', # empty usually\n        'referral_url':     'Referral URL: *(.+)', # http url of whois_server: empty usually\n        'updated_date':     'Updated Date: *(.+)',\n        'creation_date':    'Creation Date: *(.+)',\n        'expiration_date':  'Registry Expiry Date: *(.+)',\n        'name_servers':     'Name Server: *(.+)', # list of name servers\n        'status':           'Status: *(.+)', # list of statuses\n        'emails':           EMAIL_REGEX, # list of email addresses\n    }\n\n    def __init__(self, domain, text):\n        if text.strip() == 'NOT FOUND':\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text)\n\n\nclass WhoisRu(WhoisEntry):\n    \"\"\"Whois parser for .ru domains\n    \"\"\"\n    regex = {\n        'domain_name': 'domain: *(.+)',\n        'registrar': 'registrar: *(.+)',\n        'creation_date': 'created: *(.+)',\n        'expiration_date': 'paid-till: *(.+)',\n        'name_servers': 'nserver: *(.+)',  # list of name servers\n        'status': 'state: *(.+)',  # list of statuses\n        'emails': EMAIL_REGEX,  # list of email addresses\n        'org': 'org: *(.+)'\n    }\n\n    def __init__(self, domain, text):\n        if text.strip() == 'No entries found':\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisNl(WhoisEntry):\n    \"\"\"Whois parser for .nl domains\n    \"\"\"\n    regex = {\n        'name': None,\n        'address': None,\n        'zip_code': None,\n        'city': None,\n        'country': None\n    }\n\n    def __init__(self, domain, text):\n        if text.endswith('is free'):\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n        match = re.compile('Registrar:(.*?)DNSSEC', re.DOTALL).search(text)\n        if match:\n            lines = [line.strip() for line in match.groups()[0].strip().splitlines()]\n            self['name'] = lines[0]\n            self['address'] = lines[1]\n            if len(lines) == 4:\n                self['zip_code'], _, self['city'] = lines[2].partition(' ')\n            self['country'] = lines[-1]\n\n\n\nclass WhoisName(WhoisEntry):\n    \"\"\"Whois parser for .name domains\n    \"\"\"\n    regex = {\n        'domain_name_id':  'Domain Name ID: *(.+)',\n        'domain_name':     'Domain Name: *(.+)',\n        'registrar_id':    'Sponsoring Registrar ID: *(.+)',\n        'registrar':       'Sponsoring Registrar: *(.+)',\n        'registrant_id':   'Registrant ID: *(.+)',\n        'admin_id':        'Admin ID: *(.+)',\n        'technical_id':    'Tech ID: *(.+)',\n        'billing_id':      'Billing ID: *(.+)',\n        'creation_date':   'Created On: *(.+)',\n        'expiration_date': 'Expires On: *(.+)',\n        'updated_date':    'Updated On: *(.+)',\n        'name_server_ids': 'Name Server ID: *(.+)',  # list of name server ids\n        'name_servers':    'Name Server: *(.+)',  # list of name servers\n        'status':          'Domain Status: *(.+)',  # list of statuses\n    }\n\n    def __init__(self, domain, text):\n        if 'No match for ' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisUs(WhoisEntry):\n    \"\"\"Whois parser for .us domains\n    \"\"\"\n    regex = {\n        'domain_name':                    'Domain Name: *(.+)',\n        'domain__id':                     'Domain ID: *(.+)',\n        'registrar':                      'Sponsoring Registrar: *(.+)',\n        'registrar_id':                   'Sponsoring Registrar IANA ID: *(.+)',\n        'registrar_url':                  'Registrar URL \\(registration services\\): *(.+)',\n        'status':                         'Domain Status: *(.+)',  # list of statuses\n        'registrant_id':                  'Registrant ID: *(.+)',\n        'registrant_name':                'Registrant Name: *(.+)',\n        'registrant_address1':            'Registrant Address1: *(.+)',\n        'registrant_address2':            'Registrant Address2: *(.+)',\n        'registrant_city':                'Registrant City: *(.+)',\n        'registrant_state_province':      'Registrant State/Province: *(.+)',\n        'registrant_postal_code':         'Registrant Postal Code: *(.+)',\n        'registrant_country':             'Registrant Country: *(.+)',\n        'registrant_country_code':        'Registrant Country Code: *(.+)',\n        'registrant_phone_number':        'Registrant Phone Number: *(.+)',\n        'registrant_email':               'Registrant Email: *(.+)',\n        'registrant_application_purpose': 'Registrant Application Purpose: *(.+)',\n        'registrant_nexus_category':      'Registrant Nexus Category: *(.+)',\n        'admin_id':                       'Administrative Contact ID: *(.+)',\n        'admin_name':                     'Administrative Contact Name: *(.+)',\n        'admin_address1':                 'Administrative Contact Address1: *(.+)',\n        'admin_address2':                 'Administrative Contact Address2: *(.+)',\n        'admin_city':                     'Administrative Contact City: *(.+)',\n        'admin_state_province':           'Administrative Contact State/Province: *(.+)',\n        'admin_postal_code':              'Administrative Contact Postal Code: *(.+)',\n        'admin_country':                  'Administrative Contact Country: *(.+)',\n        'admin_country_code':             'Administrative Contact Country Code: *(.+)',\n        'admin_phone_number':             'Administrative Contact Phone Number: *(.+)',\n        'admin_email':                    'Administrative Contact Email: *(.+)',\n        'admin_application_purpose':      'Administrative Application Purpose: *(.+)',\n        'admin_nexus_category':           'Administrative Nexus Category: *(.+)',\n        'billing_id':                     'Billing Contact ID: *(.+)',\n        'billing_name':                   'Billing Contact Name: *(.+)',\n        'billing_address1':               'Billing Contact Address1: *(.+)',\n        'billing_address2':               'Billing Contact Address2: *(.+)',\n        'billing_city':                   'Billing Contact City: *(.+)',\n        'billing_state_province':         'Billing Contact State/Province: *(.+)',\n        'billing_postal_code':            'Billing Contact Postal Code: *(.+)',\n        'billing_country':                'Billing Contact Country: *(.+)',\n        'billing_country_code':           'Billing Contact Country Code: *(.+)',\n        'billing_phone_number':           'Billing Contact Phone Number: *(.+)',\n        'billing_email':                  'Billing Contact Email: *(.+)',\n        'billing_application_purpose':    'Billing Application Purpose: *(.+)',\n        'billing_nexus_category':         'Billing Nexus Category: *(.+)',\n        'tech_id':                        'Technical Contact ID: *(.+)',\n        'tech_name':                      'Technical Contact Name: *(.+)',\n        'tech_address1':                  'Technical Contact Address1: *(.+)',\n        'tech_address2':                  'Technical Contact Address2: *(.+)',\n        'tech_city':                      'Technical Contact City: *(.+)',\n        'tech_state_province':            'Technical Contact State/Province: *(.+)',\n        'tech_postal_code':               'Technical Contact Postal Code: *(.+)',\n        'tech_country':                   'Technical Contact Country: *(.+)',\n        'tech_country_code':              'Technical Contact Country Code: *(.+)',\n        'tech_phone_number':              'Technical Contact Phone Number: *(.+)',\n        'tech_email':                     'Technical Contact Email: *(.+)',\n        'tech_application_purpose':       'Technical Application Purpose: *(.+)',\n        'tech_nexus_category':            'Technical Nexus Category: *(.+)',\n        'name_servers':                   'Name Server: *(.+)',  # list of name servers\n        'created_by_registrar':           'Created by Registrar: *(.+)',\n        'last_updated_by_registrar':      'Last Updated by Registrar: *(.+)',\n        'creation_date':                  'Domain Registration Date: *(.+)',\n        'expiration_date':                'Domain Expiration Date: *(.+)',\n        'updated_date':                   'Domain Last Updated Date: *(.+)',\n    }\n\n    def __init__(self, domain, text):\n        if 'Not found:' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisPl(WhoisEntry):\n    \"\"\"Whois parser for .pl domains\n    \"\"\"\n    regex = {\n        'domain_name':                    'DOMAIN NAME: *(.+)\\n',\n        'registrar':                      'REGISTRAR:\\n\\s*(.+)',\n        'registrar_url':                  'URL: *(.+)',        # not available\n        'status':                         'Registration status:\\n\\s*(.+)',  # not available\n        'registrant_name':                'Registrant:\\n\\s*(.+)',   # not available\n        'creation_date':                  'created: *(.+)\\n',\n        'expiration_date':                'renewal date: *(.+)',\n        'updated_date':                   'last modified: *(.+)\\n',\n    }\n\n    def __init__(self, domain, text):\n        if 'No information available about domain name' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisCa(WhoisEntry):\n    \"\"\"Whois parser for .ca domains\n    \"\"\"\n    regex = {\n        'domain_name':                    'Domain name: *(.+)',\n        'registrant_name':                '(?<!Domain) Name: *(.+)',\n        'registrant_number':              'Number: *(.+)\\n',\n        'domain_status':                  'Domain status: *(.+)',\n        'emails':                         'Email: *(.+)',\n        'updated_date':                   'Updated Date: *(.+)',\n        'creation_date':                  'Creation Date: *(.+)',\n        'expiration_date':                'Expiry Date: *(.+)',\n        'phone':                          'Phone: *(.+)',\n        'fax':                            'Fax: *(.+)',\n        'dnssec':                         'dnssec: *([\\S]+)'\n    }\n\n    def __init__(self, domain, text):\n        if 'Domain status:         available' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisMe(WhoisEntry):\n    \"\"\"Whois parser for .me domains\n    \"\"\"\n    regex = {\n        'domain_id':                   'Domain ID:(.+)',\n        'domain_name':                 'Domain Name:(.+)',\n        'creation_date':               'Domain Create Date:(.+)',\n        'updated_date':                'Domain Last Updated Date:(.+)',\n        'expiration_date':             'Domain Expiration Date:(.+)',\n        'transfer_date':               'Last Transferred Date:(.+)',\n        'trademark_name':              'Trademark Name:(.+)',\n        'trademark_country':           'Trademark Country:(.+)',\n        'trademark_number':            'Trademark Number:(.+)',\n        'trademark_application_date':  'Date Trademark Applied For:(.+)',\n        'trademark_registration_date': 'Date Trademark Registered:(.+)',\n        'registrar':                   'Sponsoring Registrar:(.+)',\n        'created_by':                  'Created by:(.+)',\n        'updated_by':                  'Last Updated by Registrar:(.+)',\n        'status':                      'Domain Status:(.+)',  # list of statuses\n        'registrant_id':               'Registrant ID:(.+)',\n        'registrant_name':             'Registrant Name:(.+)',\n        'registrant_org':              'Registrant Organization:(.+)',\n        'registrant_address':          'Registrant Address:(.+)',\n        'registrant_address2':         'Registrant Address2:(.+)',\n        'registrant_address3':         'Registrant Address3:(.+)',\n        'registrant_city':             'Registrant City:(.+)',\n        'registrant_state_province':   'Registrant State/Province:(.+)',\n        'registrant_country':          'Registrant Country/Economy:(.+)',\n        'registrant_postal_code':      'Registrant Postal Code:(.+)',\n        'registrant_phone':            'Registrant Phone:(.+)',\n        'registrant_phone_ext':        'Registrant Phone Ext\\.:(.+)',\n        'registrant_fax':              'Registrant FAX:(.+)',\n        'registrant_fax_ext':          'Registrant FAX Ext\\.:(.+)',\n        'registrant_email':            'Registrant E-mail:(.+)',\n        'admin_id':                    'Admin ID:(.+)',\n        'admin_name':                  'Admin Name:(.+)',\n        'admin_org':                   'Admin Organization:(.+)',\n        'admin_address':               'Admin Address:(.+)',\n        'admin_address2':              'Admin Address2:(.+)',\n        'admin_address3':              'Admin Address3:(.+)',\n        'admin_city':                  'Admin City:(.+)',\n        'admin_state_province':        'Admin State/Province:(.+)',\n        'admin_country':               'Admin Country/Economy:(.+)',\n        'admin_postal_code':           'Admin Postal Code:(.+)',\n        'admin_phone':                 'Admin Phone:(.+)',\n        'admin_phone_ext':             'Admin Phone Ext\\.:(.+)',\n        'admin_fax':                   'Admin FAX:(.+)',\n        'admin_fax_ext':               'Admin FAX Ext\\.:(.+)',\n        'admin_email':                 'Admin E-mail:(.+)',\n        'tech_id':                     'Tech ID:(.+)',\n        'tech_name':                   'Tech Name:(.+)',\n        'tech_org':                    'Tech Organization:(.+)',\n        'tech_address':                'Tech Address:(.+)',\n        'tech_address2':               'Tech Address2:(.+)',\n        'tech_address3':               'Tech Address3:(.+)',\n        'tech_city':                   'Tech City:(.+)',\n        'tech_state_province':         'Tech State/Province:(.+)',\n        'tech_country':                'Tech Country/Economy:(.+)',\n        'tech_postal_code':            'Tech Postal Code:(.+)',\n        'tech_phone':                  'Tech Phone:(.+)',\n        'tech_phone_ext':              'Tech Phone Ext\\.:(.+)',\n        'tech_fax':                    'Tech FAX:(.+)',\n        'tech_fax_ext':                'Tech FAX Ext\\.:(.+)',\n        'tech_email':                  'Tech E-mail:(.+)',\n        'name_servers':                'Nameservers:(.+)',  # list of name servers\n    }\n\n    def __init__(self, domain, text):\n        if 'NOT FOUND' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisUk(WhoisEntry):\n    \"\"\"Whois parser for .uk domains\n    \"\"\"\n    regex = {\n        'domain_name':                    'Domain name:\\n\\s*(.+)',\n        'registrar':                      'Registrar:\\n\\s*(.+)',\n        'registrar_url':                  'URL: *(.+)',\n        'status':                         'Registration status:\\n\\s*(.+)',  # list of statuses\n        'registrant_name':                'Registrant:\\n\\s*(.+)',\n        'creation_date':                  'Registered on: *(.+)',\n        'expiration_date':                'Expiry date: *(.+)',\n        'updated_date':                   'Last updated: *(.+)',\n        'name_servers':                   'Name servers: *(.+)',\n    }\n\n    def __init__(self, domain, text):\n        if 'No match for ' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisFr(WhoisEntry):\n    \"\"\"Whois parser for .fr domains\n    \"\"\"\n    regex = {\n        'domain_name': 'domain: *(.+)',\n        'registrar': 'registrar: *(.+)',\n        'creation_date': 'created: *(.+)',\n        'expiration_date': 'Expir\\w+ Date:\\s?(.+)',\n        'name_servers': 'nserver: *(.+)',  # list of name servers\n        'status': 'status: *(.+)',  # list of statuses\n        'emails': EMAIL_REGEX,  # list of email addresses\n        'updated_date': 'last-update: *(.+)',\n    }\n\n    def __init__(self, domain, text):\n        if 'No entries found' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisFi(WhoisEntry):\n    \"\"\"Whois parser for .fi domains\n    \"\"\"\n    regex = {\n        'domain_name':                    'domain\\.*: *([\\S]+)',\n        'name':                           'descr\\.*: *([\\S\\ ]+)',\n        'address':                        'address\\.*: *([\\S\\ ]+)',\n        'phone':                          'phone\\.*: *([\\S\\ ]+)',\n        'status':                         'status\\.*: *([\\S]+)',  # list of statuses\n        'creation_date':                  'created\\.*: *([\\S]+)',\n        'updated_date':                   'modified\\.*: *([\\S]+)',\n        'expiration_date':                'expires\\.*: *([\\S]+)',\n        'name_servers':                   'nserver\\.*: *([\\S]+) \\[\\S+\\]',  # list of name servers\n        'name_server_statuses':           'nserver\\.*: *([\\S]+) \\[\\S+\\]',  # list of name servers and statuses\n        'dnssec':                         'dnssec\\.*: *([\\S]+)',\n\n    }\n\n    def __init__(self, domain, text):\n        if 'Domain not ' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisJp(WhoisEntry):\n    \"\"\"Whois parser for .jp domains\n    \"\"\"\n    regex = {\n        'domain_name': 'a\\. \\[Domain Name\\]\\s*(.+)',\n        'registrant_org': 'g\\. \\[Organization\\](.+)',\n        'creation_date': r'\\[Registered Date\\]\\s*(.+)',\n        'name_servers': 'p\\. \\[Name Server\\]\\s*(.+)',  # list of name servers\n        'updated_date':  '\\[Last Update\\]\\s?(.+)',\n        'status': '\\[State\\]\\s*(.+)',  # list of statuses\n    }\n\n    def __init__(self, domain, text):\n        if 'No match!!' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisAU(WhoisEntry):\n    \"\"\"Whois parser for .au domains\n    \"\"\"\n    regex = {\n        'domain_name':                    'Domain Name: *(.+)\\n',\n        'last_modified':\t\t\t      'Last Modified: *(.+)\\n',\n        'registrar':                      'Registrar Name: *(.+)\\n',\n        'status':                         'Status: *(.+)',\n        'registrant_name':                'Registrant: *(.+)',\n        'name_servers':                   'Name Server: *(.+)',\n    }\n\n    def __init__(self, domain, text):\n        if text.strip() == 'No Data Found':\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisEu(WhoisEntry):\n    \"\"\"Whois parser for .eu domains\n    \"\"\"\n    regex = {\n        'domain_name': r'Domain: *([^\\n\\r]+)',\n        'tech_name': r'Technical: *Name: *([^\\n\\r]+)',\n        'tech_org': r'Technical: *Name: *[^\\n\\r]+\\s*Organisation: *([^\\n\\r]+)',\n        'tech_phone': r'Technical: *Name: *[^\\n\\r]+\\s*Organisation: *[^\\n\\r]+\\s*Language: *[^\\n\\r]+\\s*Phone: *([^\\n\\r]+)',\n        'tech_fax': r'Technical: *Name: *[^\\n\\r]+\\s*Organisation: *[^\\n\\r]+\\s*Language: *[^\\n\\r]+\\s*Phone: *[^\\n\\r]+\\s*Fax: *([^\\n\\r]+)',\n        'tech_email': r'Technical: *Name: *[^\\n\\r]+\\s*Organisation: *[^\\n\\r]+\\s*Language: *[^\\n\\r]+\\s*Phone: *[^\\n\\r]+\\s*Fax: *[^\\n\\r]+\\s*Email: *([^\\n\\r]+)',\n        'registrar': r'Registrar: *Name: *([^\\n\\r]+)',\n        'name_servers': r'Name servers: *([^\\n\\r]+)\\s*([^\\n\\r]*)',  # list of name servers\n    }\n\n    def __init__(self, domain, text):\n        if text.strip() == 'Status: AVAILABLE':\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisEe(WhoisEntry):\n    \"\"\"Whois parser for .ee domains\n    \"\"\"\n    regex = {\n        'domain_name': r'Domain: *[\\n\\r]+\\s*name: *([^\\n\\r]+)',\n        'status': r'Domain: *[\\n\\r]+\\s*name: *[^\\n\\r]+\\sstatus: *([^\\n\\r]+)',\n        'registered': r'Domain: *[\\n\\r]+\\s*name: *[^\\n\\r]+\\sstatus: *[^\\n\\r]+\\sregistered: *([^\\n\\r]+)',\n        'changed': r'Domain: *[\\n\\r]+\\s*name: *[^\\n\\r]+\\sstatus: *[^\\n\\r]+\\sregistered: *[^\\n\\r]+\\schanged: *([^\\n\\r]+)',\n        'expire': r'Domain: *[\\n\\r]+\\s*name: *[^\\n\\r]+\\sstatus: *[^\\n\\r]+\\sregistered: *[^\\n\\r]+\\schanged: *[^\\n\\r]+\\sexpire: *([^\\n\\r]+)',\n\n        # 'tech_name': r'Technical: *Name: *([^\\n\\r]+)',\n        # 'tech_org': r'Technical: *Name: *[^\\n\\r]+\\s*Organisation: *([^\\n\\r]+)',\n        # 'tech_phone': r'Technical: *Name: *[^\\n\\r]+\\s*Organisation: *[^\\n\\r]+\\s*Language: *[^\\n\\r]+\\s*Phone: *([^\\n\\r]+)',\n        # 'tech_fax': r'Technical: *Name: *[^\\n\\r]+\\s*Organisation: *[^\\n\\r]+\\s*Language: *[^\\n\\r]+\\s*Phone: *[^\\n\\r]+\\s*Fax: *([^\\n\\r]+)',\n        # 'tech_email': r'Technical: *Name: *[^\\n\\r]+\\s*Organisation: *[^\\n\\r]+\\s*Language: *[^\\n\\r]+\\s*Phone: *[^\\n\\r]+\\s*Fax: *[^\\n\\r]+\\s*Email: *([^\\n\\r]+)',\n        'registrar': r'Registrar: *[\\n\\r]+\\s*name: *([^\\n\\r]+)',\n        'name_servers': r'nserver: *(.*)',  # list of name servers\n    }\n\n    def __init__(self, domain, text):\n        if text.strip() == 'Domain not found':\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisBr(WhoisEntry):\n    \"\"\"Whois parser for .br domains\n    \"\"\"\n    regex = {\n        'domain':                        'domain: *(.+)\\n',\n        'owner':                         'owner: *([\\S ]+)',\n        'ownerid':                       'ownerid: *(.+)',\n        'country':                       'country: *(.+)',\n        'owner_c':                       'owner-c: *(.+)',\n        'admin_c':                       'admin-c: *(.+)',\n        'tech_c':                        'tech-c: *(.+)',\n        'billing_c':                     'billing-c: *(.+)',\n        'nserver':                       'nserver: *(.+)',\n        'nsstat':                        'nsstat: *(.+)',\n        'nslastaa':                      'nslastaa: *(.+)',\n        'saci':                          'saci: *(.+)',\n        'created':                       'created: *(.+)',\n        'expires':                       'expires: *(.+)',\n        'changed':                       'changed: *(.+)',\n        'status':                        'status: *(.+)',\n        'nic_hdl_br':                    'nic-hdl-br: *(.+)',\n        'person':                        'person: *([\\S ]+)',\n        'email':                         'e-mail: *(.+)',\n    }\n\n    def __init__(self, domain, text):\n\n        if 'Not found:' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisKr(WhoisEntry):\n    \"\"\"Whois parser for .kr domains\n    \"\"\"\n    regex = {\n        'domain_name': 'Domain Name\\s*: *(.+)',\n        'registrant_org': 'Registrant\\s*: *(.+)',\n        'registrant_address': 'Registrant Address\\s*: *(.+)',\n        'registrant_zip': 'Registrant Zip Code\\s*: *(.+)',\n        'admin_name': 'Administrative Contact\\(AC\\)\\s*: *(.+)',\n        'admin_email': 'AC E-Mail\\s*: *(.+)',\n        'admin_phone': 'AC Phone Number\\s*: *(.+)',\n        'creation_date': 'Registered Date\\s*: *(.+)',\n        'updated_date':  'Last updated Date\\s*: *(.+)',\n        'expiration_date':  'Expiration Date\\s*: *(.+)',\n        'registrar':  'Authorized Agency\\s*: *(.+)',\n        'name_servers': 'Host Name\\s*: *(.+)',  # list of name servers\n    }\n\n    def __init__(self, domain, text):\n        if text.endswith(' no match'):\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisPt(WhoisEntry):\n    \"\"\"Whois parser for .pt domains\n    \"\"\"\n    regex = {\n        'domain_name': 'domain name: *(.+)',\n        'creation_date': 'creation date \\(dd\\/mm\\/yyyy\\): *(.+)',\n        'expiration_date': 'expiration date \\(dd\\/mm\\/yyyy\\): *(.+)',\n        'name_servers': '\\tNS\\t(.+).',  # list of name servers\n        'status': 'status: *(.+)',  # list of statuses\n        'emails': EMAIL_REGEX,  # list of email addresses\n    }\n\n    def __init__(self, domain, text):\n        if text.strip() == 'No entries found':\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisBg(WhoisEntry):\n    \"\"\"Whois parser for .bg domains\n    \"\"\"\n    regex = {\n        'expiration_date': 'expires at: *(.+)',\n    }\n\n    dayfirst = True\n\n    def __init__(self, domain, text):\n        if 'does not exist in database!' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisDe(WhoisEntry):\n    \"\"\"Whois parser for .de domains\n    \"\"\"\n    regex = {\n        'name': 'name: *(.+)',\n        'org': 'Organisation: *(.+)',\n        'address': 'Address: *(.+)',\n        'zipcode': 'PostalCode: *(.+)',\n        'city': 'City: *(.+)',\n        'country_code': 'CountryCode: *(.+)',\n        'phone': 'Phone: *(.+)',\n        'fax': 'Fax: *(.+)'\n    }\n\n    def __init__(self, domain, text):\n        if 'Status: free' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\nclass WhoisAt(WhoisEntry):\n    \"\"\"Whois parser for .at domains\n    \"\"\"\n    regex = {\n        'name': 'personname: *(.+)',\n        'org': 'organization: *(.+)',\n        'address': 'street address: *(.+)',\n        'zipcode': 'postal code: *(.+)',\n        'city': 'city: *(.+)',\n        'country': 'country: *(.+)',\n        'phone': 'phone: *(.+)',\n        'fax': 'fax-no: *(.+)',\n        'changed': 'changed: *(.+)',\n    }\n\n    def __init__(self, domain, text):\n        if 'Status: free' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\nclass WhoisBe(WhoisEntry):\n    \"\"\"Whois parser for .be domains\n    \"\"\"\n    regex = {\n        'name': 'Name: *(.+)',\n        'org': 'Organisation: *(.+)',\n        'phone': 'Phone: *(.+)',\n        'fax': 'Fax: *(.+)',\n        'email': 'Email: *(.+)',\n    }\n\n    def __init__(self, domain, text):\n        if 'Status: AVAILABLE' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\n\nclass WhoisInfo(WhoisEntry):\n    \"\"\"Whois parser for .info domains\n    \"\"\"\n    regex = {\n        'domain_name':      'Domain Name: *(.+)',\n        'registrar':        'Registrar: *(.+)',\n        'whois_server':     'Whois Server: *(.+)', # empty usually\n        'referral_url':     'Referral URL: *(.+)', # http url of whois_server: empty usually\n        'updated_date':     'Updated Date: *(.+)',\n        'creation_date':    'Creation Date: *(.+)',\n        'expiration_date':  'Registry Expiry Date: *(.+)',\n        'name_servers':     'Name Server: *(.+)', # list of name servers\n        'status':           'Status: *(.+)', # list of statuses\n        'emails':           EMAIL_REGEX, # list of email addresses\n        'name':             'Registrant Name: *(.+)',\n        'org':              'Registrant Organization: *(.+)',\n        'address':          'Registrant Street: *(.+)',\n        'city':             'Registrant City: *(.+)',\n        'state':            'Registrant State/Province: *(.+)',\n        'zipcode':          'Registrant Postal Code: *(.+)',\n        'country':          'Registrant Country: *(.+)',\n    }\n\n    def __init__(self, domain, text):\n        if text.strip() == 'NOT FOUND':\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisRf(WhoisRu):\n    \"\"\"Whois parser for .su domains\n    \"\"\"\n    def __init__(self, domain, text):\n        WhoisRu.__init__(self, domain, text)\n\n\nclass WhoisSu(WhoisRu):\n    \"\"\"Whois parser for .su domains\n    \"\"\"\n    def __init__(self, domain, text):\n        WhoisRu.__init__(self, domain, text)\n\n\nclass WhoisClub(WhoisEntry):\n    \"\"\"Whois parser for .us domains\n    \"\"\"\n    regex = {\n        'domain_name':                    'Domain Name: *(.+)',\n        'domain__id':                     'Domain ID: *(.+)',\n        'registrar':                      'Sponsoring Registrar: *(.+)',\n        'registrar_id':                   'Sponsoring Registrar IANA ID: *(.+)',\n        'registrar_url':                  'Registrar URL \\(registration services\\): *(.+)',\n        # list of statuses\n        'status':                         'Domain Status: *(.+)',\n        'registrant_id':                  'Registrant ID: *(.+)',\n        'registrant_name':                'Registrant Name: *(.+)',\n        'registrant_address1':            'Registrant Address1: *(.+)',\n        'registrant_address2':            'Registrant Address2: *(.+)',\n        'registrant_city':                'Registrant City: *(.+)',\n        'registrant_state_province':      'Registrant State/Province: *(.+)',\n        'registrant_postal_code':         'Registrant Postal Code: *(.+)',\n        'registrant_country':             'Registrant Country: *(.+)',\n        'registrant_country_code':        'Registrant Country Code: *(.+)',\n        'registrant_phone_number':        'Registrant Phone Number: *(.+)',\n        'registrant_email':               'Registrant Email: *(.+)',\n        'registrant_application_purpose': 'Registrant Application Purpose: *(.+)',\n        'registrant_nexus_category':      'Registrant Nexus Category: *(.+)',\n        'admin_id':                       'Administrative Contact ID: *(.+)',\n        'admin_name':                     'Administrative Contact Name: *(.+)',\n        'admin_address1':                 'Administrative Contact Address1: *(.+)',\n        'admin_address2':                 'Administrative Contact Address2: *(.+)',\n        'admin_city':                     'Administrative Contact City: *(.+)',\n        'admin_state_province':           'Administrative Contact State/Province: *(.+)',\n        'admin_postal_code':              'Administrative Contact Postal Code: *(.+)',\n        'admin_country':                  'Administrative Contact Country: *(.+)',\n        'admin_country_code':             'Administrative Contact Country Code: *(.+)',\n        'admin_phone_number':             'Administrative Contact Phone Number: *(.+)',\n        'admin_email':                    'Administrative Contact Email: *(.+)',\n        'admin_application_purpose':      'Administrative Application Purpose: *(.+)',\n        'admin_nexus_category':           'Administrative Nexus Category: *(.+)',\n        'billing_id':                     'Billing Contact ID: *(.+)',\n        'billing_name':                   'Billing Contact Name: *(.+)',\n        'billing_address1':               'Billing Contact Address1: *(.+)',\n        'billing_address2':               'Billing Contact Address2: *(.+)',\n        'billing_city':                   'Billing Contact City: *(.+)',\n        'billing_state_province':         'Billing Contact State/Province: *(.+)',\n        'billing_postal_code':            'Billing Contact Postal Code: *(.+)',\n        'billing_country':                'Billing Contact Country: *(.+)',\n        'billing_country_code':           'Billing Contact Country Code: *(.+)',\n        'billing_phone_number':           'Billing Contact Phone Number: *(.+)',\n        'billing_email':                  'Billing Contact Email: *(.+)',\n        'billing_application_purpose':    'Billing Application Purpose: *(.+)',\n        'billing_nexus_category':         'Billing Nexus Category: *(.+)',\n        'tech_id':                        'Technical Contact ID: *(.+)',\n        'tech_name':                      'Technical Contact Name: *(.+)',\n        'tech_address1':                  'Technical Contact Address1: *(.+)',\n        'tech_address2':                  'Technical Contact Address2: *(.+)',\n        'tech_city':                      'Technical Contact City: *(.+)',\n        'tech_state_province':            'Technical Contact State/Province: *(.+)',\n        'tech_postal_code':               'Technical Contact Postal Code: *(.+)',\n        'tech_country':                   'Technical Contact Country: *(.+)',\n        'tech_country_code':              'Technical Contact Country Code: *(.+)',\n        'tech_phone_number':              'Technical Contact Phone Number: *(.+)',\n        'tech_email':                     'Technical Contact Email: *(.+)',\n        'tech_application_purpose':       'Technical Application Purpose: *(.+)',\n        'tech_nexus_category':            'Technical Nexus Category: *(.+)',\n        # list of name servers\n        'name_servers':                   'Name Server: *(.+)',\n        'created_by_registrar':           'Created by Registrar: *(.+)',\n        'last_updated_by_registrar':      'Last Updated by Registrar: *(.+)',\n        'creation_date':                  'Domain Registration Date: *(.+)',\n        'expiration_date':                'Domain Expiration Date: *(.+)',\n        'updated_date':                   'Domain Last Updated Date: *(.+)',\n    }\n\n    def __init__(self, domain, text):\n        if 'Not found:' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisIo(WhoisEntry):\n    \"\"\"Whois parser for .io domains\n    \"\"\"\n    regex = {\n        'status':           'Status\\s*: *(.+)',\n        'name_servers':     'NS \\d?\\s*: *(.+)',\n        #'owner':            'Owner\\s*: *(.+)',\n        'owner':            'Owner OrgName\\s*: *(.+)',\n        'expiration_date':  'Expiry\\s*: *(.+)',\n        'domain_name':      'Domain\\s*: *(.+)',\n        'registrar':        r'Check for \\'[\\w\\.]*\\' --- (.+)',\n    }\n\n    def __init__(self, domain, text):\n        if 'is available for purchase' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisBiz(WhoisUs):\n    \"\"\"Whois parser for .biz domains\n    \"\"\"\n    def __init__(self, domain, text):\n        WhoisUs.__init__(self, domain, text)\n\n\nclass WhoisMobi(WhoisMe):\n    \"\"\"Whois parser for .mobi domains\n    \"\"\"\n    def __init__(self, domain, text):\n        WhoisMe.__init__(self, domain, text)\n\n\nclass WhoisKg(WhoisEntry):\n    \"\"\"Whois parser for .kg domains\n    \"\"\"\n    regex = {\n        'domain_name':                    'Domain\\s*([\\w]+\\.[\\w]{2,5})',\n        'registrar':                      'Domain support: \\s*(.+)',\n        'registrant_name':                'Name: *(.+)',\n        'registrant_address1':            'Address: *(.+)',\n        'registrant_phone_number':        'phone: *(.+)',\n        'registrant_email':               'Email: *(.+)',\n        # # list of name servers\n        'name_servers':                   'Name servers in the listed order: *([\\d\\w\\.\\s]+)',\n        # 'name_servers':      r'([\\w]+\\.[\\w]+\\.[\\w]{2,5}\\s*\\d{1,3}\\.\\d]{1,3}\\.[\\d]{1-3}\\.[\\d]{1-3})',\n        'creation_date':                  'Record created: *(.+)',\n        'expiration_date':                'Record expires on \\s*(.+)',\n        'updated_date':                   'Record last updated on\\s*(.+)',\n\n    }\n    def __init__(self, domain, text):\n        if 'Data not found. This domain is available for registration' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\n\nclass WhoisChLi(WhoisEntry):\n    \"\"\"Whois Parser for .ch and .li domains\n    \"\"\"\n    regex = {\n        'domain_name':                      '\\nDomain name:\\n*(.+)',\n        'registrant':                       'Holder of domain name:\\n*([\\n\\s\\S]+)\\nContractual Language:',\n        'registrar':                        'Registrar:\\n*(.+)',\n        'creation_date':                    'First registration date:\\n*(.+)',\n        'dnssec':                           'DNSSEC:*([\\S]+)',\n        'tech-c':                           'Technical contact:\\n*([\\n\\s\\S]+)\\nRegistrar:',\n        'name_servers':                     'Name servers:\\n *([\\n\\S\\s]+)'\n    }\n    def __init__(self,domain,text):\n        if 'We do not have an entry in our database matching your query.' in text:\n            raise PywhoisError(text)\n        else:\n            WhoisEntry.__init__(self, domain, text, self.regex)\n\nclass WhoisID(WhoisEntry):\n        \"\"\"Whois parser for .id domains\n        \"\"\"\n        regex = {\n            'domain_id':                   'Domain ID:(.+)',\n            'domain_name':                 'Domain Name:(.+)',\n            'creation_date':               'Created On:(.+)',\n            'expiration_date':             'Expiration Date(.+)',\n            'registrar':                   'Sponsoring Registrar ID:(.+)',\n            'status':                      'Status:(.+)',  # list of statuses\n            'registrant_id':               'Registrant ID:(.+)',\n            'registrant_name':             'Registrant Name:(.+)',\n            'registrant_org':              'Registrant Organization:(.+)',\n            'registrant_address':          'Registrant Street1:(.+)',\n            'registrant_address2':         'Registrant Street2:(.+)',\n            'registrant_address3':         'Registrant Street3:(.+)',\n            'registrant_city':             'Registrant City:(.+)',\n            'registrant_country':          'Registrant Country:(.+)',\n            'registrant_postal_code':      'Registrant Postal Code:(.+)',\n            'registrant_phone':            'Registrant Phone:(.+)',\n            'registrant_fax':              'Registrant FAX:(.+)',\n            'registrant_email':            'Registrant Email:(.+)',\n            'name_servers':                'Name Server:(.+)',  # list of name servers\n        }\n\n        def __init__(self, domain, text):\n            if 'NOT FOUND' in text:\n                raise PywhoisError(text)\n            else:\n                WhoisEntry.__init__(self, domain, text, self.regex)\n"
  },
  {
    "path": "lib/pywhois/whois/time_zones.py",
    "content": "from __future__ import unicode_literals\nfrom __future__ import print_function\nfrom __future__ import division\nfrom __future__ import absolute_import\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom builtins import *\n_tz_string = '''-12 Y\n-11 X NUT SST\n-10 W CKT HAST HST TAHT TKT\n-9 V AKST GAMT GIT HADT HNY\n-8 U AKDT CIST HAY HNP PST PT\n-7 T HAP HNR MST PDT\n-6 S CST EAST GALT HAR HNC MDT\n-5 R CDT COT EASST ECT EST ET HAC HNE PET\n-4 Q AST BOT CLT COST EDT FKT GYT HAE HNA PYT\n-3 P ADT ART BRT CLST FKST GFT HAA PMST PYST SRT UYT WGT\n-2 O BRST FNT PMDT UYST WGST\n-1 N AZOT CVT EGT\n0 Z EGST GMT UTC WET WT\n1 A CET DFT WAT WEDT WEST\n2 B CAT CEDT CEST EET SAST WAST\n3 C EAT EEDT EEST IDT MSK\n4 D AMT AZT GET GST KUYT MSD MUT RET SAMT SCT\n5 E AMST AQTT AZST HMT MAWT MVT PKT TFT TJT TMT UZT YEKT\n6 F ALMT BIOT BTT IOT KGT NOVT OMST YEKST\n7 G CXT DAVT HOVT ICT KRAT NOVST OMSST THA WIB\n8 H ACT AWST BDT BNT CAST HKT IRKT KRAST MYT PHT SGT ULAT WITA WST\n9 I AWDT IRKST JST KST PWT TLT WDT WIT YAKT\n10 K AEST ChST PGT VLAT YAKST YAPT\n11 L AEDT LHDT MAGT NCT PONT SBT VLAST VUT\n12 M ANAST ANAT FJT GILT MAGST MHT NZST PETST PETT TVT WFT\n13 FJST NZDT\n11.5 NFT\n10.5 ACDT LHST\n9.5 ACST\n6.5 CCT MMT\n5.75 NPT\n5.5 SLT\n4.5 AFT IRDT\n3.5 IRST\n-2.5 HAT NDT\n-3.5 HNT NST NT\n-4.5 HLV VET\n-9.5 MART MIT'''\n\ntz_data = {}\n\nfor tz_descr in (tz_spec.split() for tz_spec in _tz_string.split('\\n')):\n    tz_offset = int(float(tz_descr[0]) * 3600)\n    for tz_code in tz_descr[1:]:\n        tz_data[tz_code] = tz_offset\n"
  },
  {
    "path": "lib/pywhois/whois/whois.py",
    "content": "\"\"\"\nWhois client for python\n\ntransliteration of:\nhttp://www.opensource.apple.com/source/adv_cmds/adv_cmds-138.1/whois/whois.c\n\nCopyright (c) 2010 Chris Wolf\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.\n\"\"\"\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import division\nfrom __future__ import absolute_import\nfrom future import standard_library\nstandard_library.install_aliases()\nfrom builtins import *\nfrom builtins import object\nimport re\nimport sys\nimport socket\nimport optparse\n\n\nclass NICClient(object):\n\n    ABUSEHOST = \"whois.abuse.net\"\n    NICHOST = \"whois.crsnic.net\"\n    INICHOST = \"whois.networksolutions.com\"\n    DNICHOST = \"whois.nic.mil\"\n    GNICHOST = \"whois.nic.gov\"\n    ANICHOST = \"whois.arin.net\"\n    LNICHOST = \"whois.lacnic.net\"\n    RNICHOST = \"whois.ripe.net\"\n    PNICHOST = \"whois.apnic.net\"\n    MNICHOST = \"whois.ra.net\"\n    QNICHOST_TAIL = \".whois-servers.net\"\n    SNICHOST = \"whois.6bone.net\"\n    BNICHOST = \"whois.registro.br\"\n    NORIDHOST = \"whois.norid.no\"\n    IANAHOST = \"whois.iana.org\"\n    PANDIHOST = \"whois.pandi.or.id\"\n    DENICHOST = \"de.whois-servers.net\"\n    DEFAULT_PORT = \"nicname\"\n\n    WHOIS_RECURSE = 0x01\n    WHOIS_QUICK = 0x02\n\n    ip_whois = [LNICHOST, RNICHOST, PNICHOST, BNICHOST, PANDIHOST ]\n\n    def __init__(self):\n        self.use_qnichost = False\n\n    def findwhois_server(self, buf, hostname, query):\n        \"\"\"Search the initial TLD lookup results for the regional-specifc\n        whois server for getting contact details.\n        \"\"\"\n        nhost = None\n        match = re.compile('Domain Name: ' + query + '\\s*.*?Whois Server: (.*?)\\s', flags=re.IGNORECASE|re.DOTALL).search(buf)\n        if match:\n            nhost = match.groups()[0]\n            # if the whois address is domain.tld/something then\n            # s.connect((hostname, 43)) does not work\n            if nhost.count('/') > 0:\n                nhost = None\n        elif hostname == NICClient.ANICHOST:\n            for nichost in NICClient.ip_whois:\n                if buf.find(nichost) != -1:\n                    nhost = nichost\n                    break\n        return nhost\n\n    def whois(self, query, hostname, flags, many_results=False):\n        \"\"\"Perform initial lookup with TLD whois server\n        then, if the quick flag is false, search that result\n        for the region-specifc whois server and do a lookup\n        there for contact details\n        \"\"\"\n        response = b''\n        try:\n            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n            s.settimeout(10)\n            s.connect((hostname, 43))\n\n            try:\n                query = query.decode('utf-8')\n            except UnicodeEncodeError:\n                pass  # Already Unicode (python2's error)\n            except AttributeError:\n                pass  # Already Unicode (python3's error)\n\n            if hostname == NICClient.DENICHOST:\n                query_bytes = \"-T dn,ace -C UTF-8 \" + query\n            elif hostname.endswith(NICClient.QNICHOST_TAIL) and many_results:\n                query_bytes = '=' + query\n            else:\n                query_bytes = query\n            s.send((query_bytes).encode('idna') + b\"\\r\\n\")\n            # recv returns bytes\n            while True:\n                d = s.recv(4096)\n                response += d\n                if not d:\n                    break\n            s.close()\n        except socket.error as socketerror:\n            print('Socket Error:', socketerror)\n\n        nhost = None\n        response = response.decode('utf-8', errors='replace')\n        if 'with \"=xxx\"' in response:\n            return self.whois(query, hostname, flags, True)\n        if flags & NICClient.WHOIS_RECURSE and nhost is None:\n            nhost = self.findwhois_server(response, hostname, query)\n        if nhost is not None:\n            response += self.whois(query, nhost, 0)\n        return response\n\n    def choose_server(self, domain):\n        \"\"\"Choose initial lookup NIC host\"\"\"\n        try:\n            domain = domain.encode('idna').decode('utf-8')\n        except TypeError:\n            domain = domain.decode('utf-8').encode('idna').decode('utf-8')\n        if domain.endswith(\"-NORID\"):\n            return NICClient.NORIDHOST\n        if domain.endswith(\"id\"):\n            return NICClient.PANDIHOST\n\n        domain = domain.split('.')\n        if len(domain) < 2:\n            return None\n        tld = domain[-1]\n        if tld[0].isdigit():\n            return NICClient.ANICHOST\n\n        return tld + NICClient.QNICHOST_TAIL\n\n    def whois_lookup(self, options, query_arg, flags):\n        \"\"\"Main entry point: Perform initial lookup on TLD whois server,\n        or other server to get region-specific whois server, then if quick\n        flag is false, perform a second lookup on the region-specific\n        server for contact records\"\"\"\n        nichost = None\n        # whoud happen when this function is called by other than main\n        if options is None:\n            options = {}\n\n        if ('whoishost' not in options or options['whoishost'] is None) \\\n                and ('country' not in options or options['country'] is None):\n            self.use_qnichost = True\n            options['whoishost'] = NICClient.NICHOST\n            if not (flags & NICClient.WHOIS_QUICK):\n                flags |= NICClient.WHOIS_RECURSE\n\n        if 'country' in options and options['country'] is not None:\n            result = self.whois(\n                query_arg,\n                options['country'] + NICClient.QNICHOST_TAIL,\n                flags\n            )\n        elif self.use_qnichost:\n            nichost = self.choose_server(query_arg)\n            if nichost is not None:\n                result = self.whois(query_arg, nichost, flags)\n            else:\n                result = ''\n        else:\n            result = self.whois(query_arg, options['whoishost'], flags)\n        return result\n\n\ndef parse_command_line(argv):\n    \"\"\"Options handling mostly follows the UNIX whois(1) man page, except\n    long-form options can also be used.\n    \"\"\"\n    flags = 0\n\n    usage = \"usage: %prog [options] name\"\n\n    parser = optparse.OptionParser(add_help_option=False, usage=usage)\n    parser.add_option(\"-a\", \"--arin\", action=\"store_const\",\n                      const=NICClient.ANICHOST, dest=\"whoishost\",\n                      help=\"Lookup using host \" + NICClient.ANICHOST)\n    parser.add_option(\"-A\", \"--apnic\", action=\"store_const\",\n                      const=NICClient.PNICHOST, dest=\"whoishost\",\n                      help=\"Lookup using host \" + NICClient.PNICHOST)\n    parser.add_option(\"-b\", \"--abuse\", action=\"store_const\",\n                      const=NICClient.ABUSEHOST, dest=\"whoishost\",\n                      help=\"Lookup using host \" + NICClient.ABUSEHOST)\n    parser.add_option(\"-c\", \"--country\", action=\"store\",\n                      type=\"string\", dest=\"country\",\n                      help=\"Lookup using country-specific NIC\")\n    parser.add_option(\"-d\", \"--mil\", action=\"store_const\",\n                      const=NICClient.DNICHOST, dest=\"whoishost\",\n                      help=\"Lookup using host \" + NICClient.DNICHOST)\n    parser.add_option(\"-g\", \"--gov\", action=\"store_const\",\n                      const=NICClient.GNICHOST, dest=\"whoishost\",\n                      help=\"Lookup using host \" + NICClient.GNICHOST)\n    parser.add_option(\"-h\", \"--host\", action=\"store\",\n                      type=\"string\", dest=\"whoishost\",\n                      help=\"Lookup using specified whois host\")\n    parser.add_option(\"-i\", \"--nws\", action=\"store_const\",\n                      const=NICClient.INICHOST, dest=\"whoishost\",\n                      help=\"Lookup using host \" + NICClient.INICHOST)\n    parser.add_option(\"-I\", \"--iana\", action=\"store_const\",\n                      const=NICClient.IANAHOST, dest=\"whoishost\",\n                      help=\"Lookup using host \" + NICClient.IANAHOST)\n    parser.add_option(\"-l\", \"--lcanic\", action=\"store_const\",\n                      const=NICClient.LNICHOST, dest=\"whoishost\",\n                      help=\"Lookup using host \" + NICClient.LNICHOST)\n    parser.add_option(\"-m\", \"--ra\", action=\"store_const\",\n                      const=NICClient.MNICHOST, dest=\"whoishost\",\n                      help=\"Lookup using host \" + NICClient.MNICHOST)\n    parser.add_option(\"-p\", \"--port\", action=\"store\",\n                      type=\"int\", dest=\"port\",\n                      help=\"Lookup using specified tcp port\")\n    parser.add_option(\"-Q\", \"--quick\", action=\"store_true\",\n                      dest=\"b_quicklookup\",\n                      help=\"Perform quick lookup\")\n    parser.add_option(\"-r\", \"--ripe\", action=\"store_const\",\n                      const=NICClient.RNICHOST, dest=\"whoishost\",\n                      help=\"Lookup using host \" + NICClient.RNICHOST)\n    parser.add_option(\"-R\", \"--ru\", action=\"store_const\",\n                      const=\"ru\", dest=\"country\",\n                      help=\"Lookup Russian NIC\")\n    parser.add_option(\"-6\", \"--6bone\", action=\"store_const\",\n                      const=NICClient.SNICHOST, dest=\"whoishost\",\n                      help=\"Lookup using host \" + NICClient.SNICHOST)\n    parser.add_option(\"-n\", \"--ina\", action=\"store_const\",\n                          const=NICClient.PANDIHOST, dest=\"whoishost\",\n                          help=\"Lookup using host \" + NICClient.PANDIHOST)\n    parser.add_option(\"-?\", \"--help\", action=\"help\")\n\n    return parser.parse_args(argv)\n\n\nif __name__ == \"__main__\":\n    flags = 0\n    nic_client = NICClient()\n    options, args = parse_command_line(sys.argv)\n    if options.b_quicklookup:\n        flags = flags | NICClient.WHOIS_QUICK\n    print(nic_client.whois_lookup(options.__dict__, args[1], flags))\n"
  },
  {
    "path": "plugins/__init__.py",
    "content": ""
  },
  {
    "path": "plugins/about_project.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nclass AboutProject(object):\n    def __init__(self):\n        self.__info__ = 'Collecting Public Data & Public Document for OSINT purpose'\n        self.__author__ = 'Petruknisme'\n        self.__version__ = 'v0.2.4'\n        self.__name__= \"Belati\"\n        self.__giturl__ = \"https://github.com/aancw/Belati\"\n        self.__authorurl__ = \"https://petruknisme.com\"\n\nif __name__ == '__main__':\n    AboutProjectApp = AboutProject()\n    AboutProjectApp\n"
  },
  {
    "path": "plugins/banner_grab.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport sys\nfrom url_request import URLRequest\n\nurl_req = URLRequest()\n\nclass BannerGrab(object):\n    def show_banner(self, domain_name, proxy_address):\n        try:\n            data = url_req.header_info(domain_name, proxy_address)\n            return data\n        except:\n            pass\n\nif __name__ == '__main__':\n    BannerGrabApp = BannerGrab()\n    BannerGrabApp\n"
  },
  {
    "path": "plugins/check_domain.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport sys\nfrom lib.pywhois import whois\nfrom url_request import URLRequest\n\nurl_req = URLRequest()\n\nclass CheckDomain(object):\n\tdef domain_checker(self, domain_name, proxy_address):\n\t\ttry:\n\t\t\tdata = url_req.just_url_open(domain_name, proxy_address)\n\t\t\tif data is not \"\" and data is not \"notexist\" and not \"ERROR\" in data:\n\t\t\t\treturn \"OK!\"\n\t\texcept:\n\t\t\treturn \"NOT OK!\"\n\n\tdef alive_check(self, domain_name, proxy_address):\n\t\ttry:\n\t\t\tdata = url_req.just_url_open(domain_name, proxy_address)\n\t\t\tif data is not \"\" and data is not \"notexist\" and not \"ERROR\" in data:\n\t\t\t\treturn \"OK!\"\n\t\texcept:\n\t\t\treturn \"NOT OK!\"\n\n\tdef whois_domain(self, domain_name):\n\t\tresponse = whois.whois(domain_name)\n\t\treturn response\n\nif __name__ == '__main__':\n    CheckDomainApp = CheckDomain()\n    CheckDomainApp\n"
  },
  {
    "path": "plugins/common_service_check.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport sys, socket, errno\nfrom logger import Logger\nfrom url_request import URLRequest\n\n\n# Console color\nG = '\\033[92m'  # green\nY = '\\033[93m'  # yellow\nB = '\\033[94m'  # blue\nR = '\\033[91m'  # red\nW = '\\033[0m'   # white\n\nurl_req = URLRequest()\nlog = Logger()\n\nclass CommonServiceCheck(object):\n    ## STILL NOT ACCURATE!\n    def check_available_service(self, host):\n        list_available_port = []\n        list_common_port = [21,22,23,25,53,80,110,111,135,139,143,443,445,993,995,1723,3306,3389,5900,8080]\n        for port in list_common_port:\n            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n            try:\n                s.connect((host, port))\n                if port == 80:\n                    data = url_req.header_info(\"http://\" + host, \"\")\n                    log.console_log(\"Found HTPP Service : ({} OPEN)\".format(str(port)) )\n                    log.console_log(\"\\n{}\".format(data))\n                elif port == 443:\n                    data = url_req.header_info(\"https://\" + host, \"\")\n                else:\n                    print(\"port :\" + str(port) + \" OPEN! \" + s.recv(4096))\n            except socket.error as e:\n                if e.errno == errno.ECONNREFUSED or e.errno == 113:\n                    pass\n                else:\n                    print(\"port :\" + str(port) + str(e) + \"closed\")\n            s.close()\n"
  },
  {
    "path": "plugins/config.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport sys, os, errno\nimport ConfigParser\nfrom logger import Logger\nfrom util import Util\n\n# Console color\nG = '\\033[92m'  # green\nY = '\\033[93m'  # yellow\nB = '\\033[94m'  # blue\nR = '\\033[91m'  # red\nW = '\\033[0m'   # white\n\nlog = Logger()\nconfig = ConfigParser.ConfigParser()\nutil = Util()\n\nclass Config(object):\n    def __init__(self):\n        self.config_file = \"belati.conf\"\n        if os.path.isfile(self.config_file):\n            db_string = self.get_config(\"Database\", \"db_location\")\n            if db_string == \"belati.db\":\n                log.console_log(\"{}[-] Hm... You are using old Belati configuration{}\".format(Y, W))\n                self.init_config_file()\n        else:\n            log.console_log(\"{}[-] No Configuration file found. Setting up...{}\".format(Y, W))\n            self.init_config_file()\n\n    def get_config(self, conf_section, conf_key):\n        config.read(self.config_file)\n        value = config.get(conf_section, conf_key)\n        return value\n\n    def set_config(self, conf_section, conf_key, conf_value):\n        config.read(self.config_file)\n        config.set(conf_section, conf_key, conf_value)\n        with open(self.config_file, \"wb\") as conf_file:\n            config.write(conf_file)\n\n    def init_config_file(self):\n        log.console_log(\"\\n{} -----> Initiating Configuration <-----\\n{}\".format(Y, W))\n\n        if config.has_section(\"Database\"):\n            pass\n        else:\n            config.add_section(\"Database\")\n\n        self.set_config(\"Database\", \"db_location\", \"web/db.sqlite3\")\n        log.console_log(\"\\n{} Setting database location to {}\\n{}\".format(Y,\"web/db.sqlite3\", W))\n\n        if config.has_section(\"Environment\"):\n            pass\n        else:\n            config.add_section(\"Environment\")\n        \n        log.console_log(\"{} Setting Current Directory to {} {}\".format(Y, util.get_current_work_dir(), W))   \n\n        python_binary = raw_input(\"\\nPlease enter Python v2.x Binary name [python]:\") or \"python\"\n\n        self.set_config(\"Environment\", \"py_bin\", python_binary)\n        self.set_config(\"Environment\", \"curr_dir\", util.get_current_work_dir())\n"
  },
  {
    "path": "plugins/database.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport sys, os, errno\nimport sqlite3 as db\nfrom logger import Logger\nfrom config import Config\n\n# Console color\nG = '\\033[92m'  # green\nY = '\\033[93m'  # yellow\nB = '\\033[94m'  # blue\nR = '\\033[91m'  # red\nW = '\\033[0m'   # white\n\nlog = Logger()\n\nclass Database(object):\n    def __init__(self):\n        conf = Config()\n        self.conn = None\n        db_location = conf.get_config(\"Database\", \"db_location\")\n        try:\n            self.conn = db.connect(db_location)\n            self.conn.text_factory = str\n        except db.Error, e:\n            print(\"Error: \" +  str(e.args[0]))\n            sys.exit()\n\n    def create_new_project(self, project_domain, project_org, time):\n        cur = self.conn.cursor()\n        cur.execute(\"INSERT INTO projects(`project_domain`, `project_org`, `started_time`) VALUES (?, ?, ?)\", (project_domain, project_org, time))\n        self.conn.commit()\n\n        return cur.lastrowid\n\n    def check_subdomain_exist(self, project_id, subdomain):\n        cur = self.conn.cursor()\n        cur.execute(\"SELECT id from subdomain_results WHERE project_id = ? AND subdomain = ?\",(project_id, subdomain))\n        data = cur.fetchone()\n        return data\n\n    def insert_banner(self, domain, project_id, banner_info):\n        cur = self.conn.cursor()\n        subdomain_exist = self.check_subdomain_exist(project_id, domain)\n\n        if subdomain_exist == None:\n            cur.execute(\"INSERT INTO subdomain_results(`project_id`, `subdomain`, `banner`) VALUES(?, ?, ?)\", (project_id, domain, banner_info))\n        else:\n            cur.execute(\"UPDATE subdomain_results SET `banner` = ? WHERE project_id = ? AND subdomain = ? \", (banner_info, project_id, domain))\n\n        self.conn.commit()\n\n    def insert_robots_txt(self, project_id, domain, robots_txt):\n        cur = self.conn.cursor()\n        subdomain_exist = self.check_subdomain_exist(project_id, domain)\n\n        if subdomain_exist == None:\n            cur.execute(\"INSERT INTO subdomain_results(`project_id`, `subdomain`, `robots_txt`) VALUES(?, ?, ?)\", (project_id, domain, robots_txt))\n        else:\n            cur.execute(\"UPDATE subdomain_results SET `robots_txt` = ? WHERE project_id = ? AND subdomain = ? \", (robots_txt, project_id, domain))\n\n        self.conn.commit()\n\n    def insert_wappalyzing(self, project_id, domain, wappalyzing_result):\n        cur = self.conn.cursor()\n        subdomain_exist = self.check_subdomain_exist(project_id, domain)\n\n        if subdomain_exist == None:\n            cur.execute(\"INSERT INTO subdomain_results(`project_id`, `subdomain`, `wappalyzer`) VALUES(?, ?, ?)\", (project_id, domain, wappalyzing_result))\n        else:\n            cur.execute(\"UPDATE subdomain_results SET `wappalyzer` = ? WHERE project_id = ? AND subdomain = ? \", (wappalyzing_result, project_id, domain))\n\n        self.conn.commit()\n\n    def update_subdomain_ip(self, project_id, subdomain, ipaddress):\n        cur = self.conn.cursor()\n        subdomain_exist = self.check_subdomain_exist(project_id, subdomain)\n\n        if subdomain_exist:\n            cur.execute(\"UPDATE subdomain_results SET ip_address = ? WHERE project_id = ? AND subdomain = ?\", (ipaddress, project_id, subdomain))\n\n        self.conn.commit()\n\n    def update_git_finder(self, project_id, subdomain, status):\n        cur = self.conn.cursor()\n        subdomain_exist = self.check_subdomain_exist(project_id, subdomain)\n        status_fix = \"Yes\" if status == \"Yes\" else \"No\"\n\n        if subdomain_exist:\n            cur.execute(\"UPDATE subdomain_results SET is_contain_git = ? WHERE project_id = ? AND subdomain = ?\", (status_fix, project_id, subdomain))\n\n        self.conn.commit()\n\n    def update_svn_finder(self, project_id, subdomain, status):\n        cur = self.conn.cursor()\n        subdomain_exist = self.check_subdomain_exist(project_id, subdomain)\n        status_fix = \"Yes\" if status == \"Yes\" else \"No\"\n\n        if subdomain_exist:\n            cur.execute(\"UPDATE subdomain_results SET is_contain_svn = ? WHERE project_id = ? AND subdomain = ?\", (status_fix, project_id, subdomain))\n\n        self.conn.commit()\n\n    def insert_domain_result(self, project_id, domain, domain_whois, email):\n        cur = self.conn.cursor()\n        cur.execute(\"INSERT INTO main_domain_results(`project_id`, `domain`, `domain_whois`, `email`) VALUES(?, ?, ?, ?)\", (project_id, domain, domain_whois, email))\n        self.conn.commit()\n\n    def update_dns_zone(self, project_id, domain, ns_record, mx_record):\n        cur = self.conn.cursor()\n        cur.execute(\"UPDATE main_domain_results SET NS_record = ?, MX_record = ? WHERE project_id = ? AND domain = ?\", (ns_record, mx_record, project_id, domain))\n        self.conn.commit()\n\n    def insert_email_result(self, project_id, mail_results):\n        cur = self.conn.cursor()\n        cur.execute(\"INSERT INTO mail_harvest_results(`project_id`, `mail_results`) VALUES(?, ?)\", (project_id, mail_results))\n        self.conn.commit()\n\n    def update_pgp_email(self, project_id, mail_pgp_results):\n        cur = self.conn.cursor()\n        cur.execute(\"UPDATE mail_harvest_results SET mail_pgp_results = ? WHERE project_id = ?\", (mail_pgp_results, project_id))\n        self.conn.commit()\n\n    def insert_public_doc(self, project_id, doc_ext, doc_url, doc_location, doc_full_location, doc_meta_exif):\n        cur = self.conn.cursor()\n        cur.execute(\"INSERT INTO doc_results(`project_id`, `doc_ext`, `doc_url`, `doc_location`, `doc_full_location`, `doc_meta_exif` ) VALUES(?, ?, ?, ?, ?, ?)\", (project_id, doc_ext, doc_url, doc_location, doc_full_location, doc_meta_exif))\n        self.conn.commit()\n\n    def insert_linkedin_company_info(self, project_id, company_name, company_linkedin_url, company_description):\n        cur = self.conn.cursor()\n        cur.execute(\"INSERT INTO linkedin_company_info(`project_id`, `company_name`, `company_linkedin_url`, `company_description`) VALUES  (?, ?, ?, ?)\",(project_id, company_name, company_linkedin_url, company_description))\n        self.conn.commit()\n\n        return cur.lastrowid\n\n    def insert_company_employees(self, project_id, name, job_title, linkedin_url ):\n        cur = self.conn.cursor()\n        cur.execute(\"INSERT INTO linkedin_company_employees(`project_id`, `name`, `job_title`, `linkedin_url`) VALUES(?, ?, ?, ?)\", (project_id, name, job_title, linkedin_url))\n        self.conn.commit()\n\n    # def read(table, **kwargs):\n    # \"\"\" Generates SQL for a SELECT statement matching the kwargs passed. \"\"\"\n    # sql = list()\n    # sql.append(\"SELECT * FROM %s \" % table)\n    # if kwargs:\n    #     sql.append(\"WHERE \" + \" AND \".join(\"%s = '%s'\" % (k, v) for k, v in kwargs.iteritems()))\n    # sql.append(\";\")\n    # return \"\".join(sql)\n    # cursor.execute(\"INSERT INTO table VALUES ?\", args)\n    # cursor.execute('INSERT INTO media_files (%s) VALUES (%%s, %%s, %%s, %%s,   ...)' % ','.join(fieldlist), valuelist)\n"
  },
  {
    "path": "plugins/dep_check.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport sys, os, operator, pkg_resources\nfrom logger import Logger\n\n# Console color\nG = '\\033[92m'  # green\nY = '\\033[93m'  # yellow\nB = '\\033[94m'  # blue\nR = '\\033[91m'  # red\nW = '\\033[0m'   # white\n\nlog = Logger()\n\ntry:\n    # Check if pip module is installed\n    import pip\nexcept ImportError:\n    log.console_log(\"{}[-] Sorry, please install pip before using Belati : https://pip.pypa.io/en/stable/installing/ {}\".format(R, W))\n    sys.exit(1)\n\ntry:\n    # Check for older pip version\n    from pip._vendor.packaging.version import Version\nexcept ImportError:\n    log.console_log(\"{}[-] Old pip version detected, please upgrade using: sudo pip install --upgrade pip {}\".format(Y, W))\n    sys.exit(1)\n\nclass DepCheck(object):\n\n    def check_dependency(self):\n        list_deps = []\n        missing_deps = []\n\n        with open('requirements.txt') as f:\n            list_deps = f.read().splitlines()\n\n\tdists = [d for d in pkg_resources.working_set]\n\tpip_list = sorted([(i.key) for i in dists])\n        #pip_list = sorted([(i.key) for i in pip.get_installed_distributions()])\n\n        for req_dep in list_deps:\n            compare_char = [\"==\", \">=\", \"<=\", \">\", \"<\", \"!=\"]\n            for c in compare_char:\n                if c in req_dep:\n                    pkg = req_dep.split(c)\n                    if pkg[0] not in pip_list:\n                        missing_deps.append(req_dep)\n                        break\n                    else:\n                        installed_ver = pkg_resources.get_distribution(pkg[0]).version\n                        if self.get_truth(installed_ver, c, pkg[1]):\n                            break\n                        else:\n                            missing_deps.append(req_dep)                            \n                else:\n                    if req_dep not in pip_list:\n                        # Why this package is not in get_installed_distributions ?\n                        if str(req_dep) == \"argparse\":\n                            pass\n                        else:\n                            missing_deps.append(req_dep)\n\n        missing_deps = set(missing_deps)\n        if missing_deps:\n            missing_deps_warning =\"\"\"\n            You are missing a module required for Belati. In order to continue using Belati, please install them with:\n\n            {}`pip install --upgrade --force-reinstall -r requirements.txt`{}\n\n            or manually install missing modules with:\n\n            {}`pip install --upgrade --force-reinstall {}`{}\n\n            \"\"\"\n\n            log.console_log(missing_deps_warning.format(Y, W, Y, ' '.join(missing_deps), W))\n            sys.exit()\n\n    def get_truth(self, inp, relate, cut):\n        ops = {'>': operator.gt,\n        '<': operator.lt,\n        '>=': operator.ge,\n        '<=': operator.le,\n        '==': operator.eq}\n        return ops[relate](inp, cut)\n"
  },
  {
    "path": "plugins/gather_company.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport re,sys\nfrom bs4 import BeautifulSoup\nfrom database import Database\nfrom logger import Logger\nfrom url_request import URLRequest\n\n# Console color\nG = '\\033[92m'  # green\nY = '\\033[93m'  # yellow\nB = '\\033[94m'  # blue\nR = '\\033[91m'  # red\nW = '\\033[0m'   # white\n\nurl_req = URLRequest()\nlog = Logger()\n\nclass GatherCompany(object):\n    def crawl_company_employee(self, company_name, proxy_address, project_id):\n        self.db = Database()\n        self.project_id = project_id\n        self.company_id = 0\n        comp_strip = company_name.replace(\" \", \"+\")\n        url = 'https://www.google.com/search?q=\"Current+*+{}+*\"+site:linkedin.com&num=200'.format(comp_strip)\n\n        data = url_req.standart_request(url, proxy_address)\n\n        soup = BeautifulSoup( data, 'html.parser')\n        company_linkedin_url_list = []\n\n        #Getting all h3 tags with class 'r'\n        scrap_container = soup.find_all('div', class_='rc')\n        for rc in scrap_container:\n            soup2 = BeautifulSoup( str(rc), 'html.parser' )\n            url = soup2.find_all('h3', class_= 'r')\n            url_fix = re.findall(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', str(url))\n            linkedin_url = re.findall(r'(http[s]?://.*\\.linkedin\\.com/in/.*)', str(url_fix).strip(\"\\'[]\")) # filter only *.linked.com\n            company_linkedin_url = re.findall(r'(http[s]?://.*\\.linkedin\\.com/company/.*)', str(url_fix).strip(\"\\'[]\")) # filter only *.linked.com/company\n            job_title = soup2.find_all('div', class_='slp f')\n            url_tag = soup2.find_all(\"a\")[0].string\n\n            # Check if URL is match with one of the string from company name(?)\n            if company_linkedin_url:\n                is_contain_name = 0\n                for x in company_name.split():\n                    if x in url_tag:\n                        is_contain_name = 1\n                        break\n\n                if is_contain_name == 1:\n                    company_linkedin_url_list.append(company_linkedin_url)\n                    self.company_id = self.db.insert_linkedin_company_info(self.project_id, str(company_name), str(company_linkedin_url), \"Lorem ipsum\")\n\n            # Get data when linkedin url is like this : *.linkedin.com/in\n            if not linkedin_url:\n                pass\n            else:\n                name_result = re.sub('<[^<]+?>', '', str(rc.h3.a)) # strip all html tags like <em>\n                job_title_result = re.sub('<[^<]+?>', '', str(job_title)) # strip all html tags like <em>\n                name_fix = str(name_result.replace('| LinkedIn', ''))\n                job_title_fix   = str(job_title_result.replace('\\u200e', ' ')).strip(\"\\'[]\")\n                linkedin_url_fix = str(linkedin_url).strip(\"\\'[]\")\n                log.console_log(\"{}[+] --------------------------------------------------- [+]{}\".format(Y, W))\n                log.console_log(\"Name: {}\".format( name_fix ))\n                log.console_log(\"Job Title: {}\".format( job_title_fix ))\n                log.console_log(\"Url: {}\".format( linkedin_url_fix ))\n                log.console_log(\"{}[+] --------------------------------------------------- [+]{}\\n\".format(Y, W))\n\n                self.db.insert_company_employees(self.project_id, name_fix, job_title_fix, linkedin_url_fix)\n\n        log.console_log(\"\\n\\n{}[+] --------------------------------------------------- [+]{}\".format(Y, W))\n        log.console_log(\"{}[+] Found LinkedIn Company URL: {}\".format(Y, W))\n        for url in company_linkedin_url_list:\n            log.console_log(\"{} {} {}\".format(Y, str(url), W))\n"
  },
  {
    "path": "plugins/git_finder.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport sys, re, time\nfrom url_request import URLRequest\n\n# Console color\nG = '\\033[92m'  # green\nY = '\\033[93m'  # yellow\nB = '\\033[94m'  # blue\nR = '\\033[91m'  # red\nW = '\\033[0m'   # white\n\nurl_req = URLRequest()\n\nclass GitFinder(object):\n    def check_git(self, domain, proxy_address):\n        try:\n            data = url_req.just_url_open(url_req.ssl_checker(domain) + \"/.git/HEAD\", proxy_address)\n            if data is not None and data is not \"notexist\":\n                decode_data = data.read(200).decode()\n\n                if not 'refs/heads' in decode_data:\n                    return False\n                else:\n                    return True\n        except:\n            pass\n"
  },
  {
    "path": "plugins/harvest_email.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\n# This is part of MailHarvester and EMINGOO regex\n# Thanks to pantuts and maldevel\n\nimport sys, re, time\nfrom url_request import URLRequest\n\nurl_req = URLRequest()\n\nclass HarvestEmail(object):\n    def crawl_search(self, domain, proxy_address):\n        url = 'https://www.google.com/search?num=200&start=0&filter=0&hl=en&q=@' + domain\n        try:\n            data = url_req.standart_request(url, proxy_address)\n            dataStrip = re.sub('<[^<]+?>', '', data) # strip all html tags like <em>\n            dataStrip1 =  re.findall(r'[a-zA-Z0-9._+-]+@[a-zA-Z0-9._+-]+' + domain, dataStrip)\n            dataStrip2 = re.findall(r'[a-zA-Z0-9._+-]+@' + domain, dataStrip)\n            dataEmail = set(dataStrip1 + dataStrip2)\n            dataFix = [x for x in dataEmail if not x.startswith('x22') and not x.startswith('3D') and not x.startswith('x3d') and not x.startswith('Cached') and not x.startswith('page')] # ignore email because bad parsing\n            return list(dataFix)\n        except:\n            pass\n\n    def crawl_pgp_mit_edu(self, domain, proxy_address):\n        url = 'http://pgp.mit.edu:11371/pks/lookup?op=index&search=' + domain\n        try:\n            data = url_req.standart_request(url, proxy_address, 'Googlebot/3.1 (+http://www.googlebot.com/bot.html)')\n            dataStrip = re.sub('<[^<]+?>', '', data) # strip all html tags like <em>\n            dataStrip1 =  re.findall(r'[a-zA-Z0-9._+-]+@[a-zA-Z0-9._+-]+' + domain, dataStrip)\n            dataStrip2 = re.findall(r'[a-zA-Z0-9._+-]+@' + domain, dataStrip)\n            dataEmail = set(dataStrip1 + dataStrip2)\n            return list(dataEmail)\n        except:\n            pass\n\nif __name__ == '__main__':\n    HarvestEmailApp = HarvestEmail()\n    HarvestEmailApp\n"
  },
  {
    "path": "plugins/harvest_public_document.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport re, os, errno\nimport urllib\nfrom database import Database\nfrom logger import Logger\nfrom tqdm import tqdm\nimport requests\nfrom url_request import URLRequest\nfrom meta_exif_extractor import MetaExifExtractor\nfrom util import Util\n\n# Console color\nG = '\\033[92m'  # green\nY = '\\033[93m'  # yellow\nB = '\\033[94m'  # blue\nR = '\\033[91m'  # red\nW = '\\033[0m'   # white\n\nurl_req = URLRequest()\nlog = Logger()\nutil = Util()\n\nclass HarvestPublicDocument(object):\n    def __init__(self):\n        self.db = Database()\n        self.project_id = 0\n\n    def init_crawl(self, domain, proxy_address, project_id):\n        self.project_id = project_id\n        log.console_log(\"{}[*] Gather Link from Google Search for domain {}{}\".format(G, domain, W))\n        self.harvest_public_doc(domain, \"pdf\", proxy_address)\n        self.harvest_public_doc(domain, \"doc\", proxy_address)\n        self.harvest_public_doc(domain, \"xls\", proxy_address)\n        self.harvest_public_doc(domain, \"odt\", proxy_address)\n        self.harvest_public_doc(domain, \"ppt\", proxy_address)\n        self.harvest_public_doc(domain, \"rtf\", proxy_address)\n        self.harvest_public_doc(domain, \"txt\", proxy_address)\n        #https://www.google.com/search?q=site:domain.com%20ext:pdf&filter=0&num=100#q=site:domain.com+ext:txt&start=100&filter=0\n\n    def harvest_public_doc(self, domain, extension, proxy_address):\n        log.console_log(\"{}[*] Searching {} Document... {}\".format(G, extension.upper(), W))\n        total_files = 0\n        url = 'https://www.google.com/search?q=site:' + domain + '%20ext:' + extension + '&filter=0&num=200'\n        try:\n            data = url_req.standart_request(url, proxy_address)\n            # Re<url>https?:\\/\\/[A-Za-z0-9\\-\\?&#_~@=\\.\\/%\\[\\]\\+]+.pdf\n            # (?P<url>https?://[A-Za-z0-9\\-\\?&#_~@=\\.\\/%\\[\\]\\+]+\\.pdf)\n            #  \"(?P<url>https?://[^:]+\\.%s)\" % extension\n            regex = \"(?P<url>https?://[A-Za-z0-9\\-\\?&#_~@=\\.\\/%\\[\\]\\+]+\\.{})\".format(extension)\n            if type(data)==str:\n                data = re.findall(regex, data)\n                list_files_download = list(set(data))\n                total_files = str(len(list_files_download))\n                if total_files != \"0\":\n                    log.console_log(\"{}[*] Found {} {} files!\".format(G, total_files, extension.upper(), W) )\n                    log.console_log(\"{}[*] Please wait, lemme download it for you ;) {}[NO PROXY] {}\".format(G, Y, W))\n                    for files_download in list_files_download:\n                        log.no_console_log(files_download.split('/')[-1])\n                        self.download_files(files_download, domain)\n            else:\n                log.console_log(\"{}[!] Error: Google probably now is blocking our requests{}\".format(R,W))\n        except:\n            pass\n\n    def download_files(self, url, folder_domain):\n        filename = url.split('/')[-1]\n        full_filename = 'belatiFiles/{}/{}'.format(folder_domain, filename)\n        full_filename_location = '{}/belatiFiles/{}/{}'.format(util.get_current_work_dir(), folder_domain, filename)\n        meta = MetaExifExtractor()\n\n        if not os.path.exists(os.path.dirname(full_filename)):\n            try:\n                os.makedirs(os.path.dirname(full_filename))\n            except OSError as exc: # Guard against race condition\n                if exc.errno != errno.EEXIST:\n                    raise\n\n        with tqdm(unit='B', unit_scale=True, miniters=1,desc=filename) as t:\n            try:\n                urllib.urlretrieve(url, filename=full_filename,reporthook=self.my_hook(t), data=None)\n            except:\n                pass\n\n        meta_exif_json = meta.extract_json(full_filename_location)\n        self.db.insert_public_doc(self.project_id, str(os.path.splitext(filename)[1]), str(url), str(full_filename), str(full_filename_location), str(meta_exif_json))\n\n    def my_hook(self,t):\n      \"\"\"\n      Wraps tqdm instance. Don't forget to close() or __exit__()\n      the tqdm instance once you're done with it (easiest using `with` syntax).\n\n      Example\n      -------\n\n      >>> with tqdm(...) as t:\n      ...     reporthook = my_hook(t)\n      ...     urllib.urlretrieve(..., reporthook=reporthook)\n\n      \"\"\"\n      last_b = [0]\n\n      def inner(b=1, bsize=1, tsize=None):\n        \"\"\"\n        b  : int, optional\n            Number of blocks just transferred [default: 1].\n        bsize  : int, optional\n            Size of each block (in tqdm units) [default: 1].\n        tsize  : int, optional\n            Total size (in tqdm units). If [default: None] remains unchanged.\n        \"\"\"\n        if tsize is not None:\n            t.total = tsize\n        t.update((b - last_b[0]) * bsize)\n        last_b[0] = b\n      return inner\n\nif __name__ == '__main__':\n    HarvestPublicDocumentApp = HarvestPublicDocument()\n    HarvestPublicDocumentApp\n"
  },
  {
    "path": "plugins/json_beautifier.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport json\n\nclass JsonBeautifier(object):\n\tdef beautifier(self, json_data):\n\t\tparsed = json.loads(json_data)\n\t\treturn json.dumps(parsed, indent=4, sort_keys=True)"
  },
  {
    "path": "plugins/logger.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport sys, os, errno\nimport logging\nimport time\n\nclass Logger(object):\n    def __init__(self):\n        timestamp = int(time.time())\n        datetime = time.strftime(\"%d%m%Y\")\n        log_dir = \"logs/\"\n        log_filename = log_dir + \"Belati-\" + datetime + \"-\" + str(timestamp) + \".log\"\n\n        if not os.path.exists(os.path.dirname(log_filename)):\n            try:\n                os.makedirs(os.path.dirname(log_filename))\n            except OSError as exc: # Guard against race condition\n                if exc.errno != errno.EEXIST:\n                    raise\n\n        logging.basicConfig(filename=log_filename, format='%(message)s')\n\n    def console_log(self, log_word, newline=1):\n        logging.warning(log_word)\n        if newline == 1:\n            print(log_word)\n        else:\n            sys.stdout.write(log_word)\n\n    def no_console_log(self, log_word, newline=1):\n        logging.warning(log_word)\n\nif __name__ == '__main__':\n    LoggerApp = Logger()\n    LoggerApp\n"
  },
  {
    "path": "plugins/meta_exif_extractor.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport pyexifinfo as p\nimport json\n\nclass MetaExifExtractor(object):\n\tdef extract_json(self, filename):\n\t\tdata = p.get_json(filename)\n\t\tjson_data = json.dumps(data, sort_keys=True, indent=4, separators=(',', ': '))\n\t\treturn json_data\n\n\tdef extract_xml(self, filename):\n\t\tdata = p.get_xml(filename)\n\t\treturn data\t\n\n"
  },
  {
    "path": "plugins/robots_scraper.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nfrom url_request import URLRequest\n\nurl_req = URLRequest()\n\nclass RobotsScraper(object):\n    def check_robots(self, domain_name, proxy_address):\n        try:\n            url_request = \"{}/robots.txt\".format(domain_name)\n            data = url_req.just_url_open(url_request, proxy_address)\n            if data is not \"\" and data is not \"notexist\":\n            \t# We need to check if file is valid, no redirect, no reload, or something\n                if data.getcode() == 200 and data.getcode() != 302 and url_request in data.geturl() :\n                    return data\n        except:\n            pass\n"
  },
  {
    "path": "plugins/scan_nmap.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nfrom logger import Logger\nimport shlex, subprocess\n\nlog = Logger()\n\nclass ScanNmap(object):\n    def run_scanning(self, ipaddress):\n        command = \"nmap -sS -A -Pn \" + ipaddress\n        process = subprocess.Popen(shlex.split(command), stdout=subprocess.PIPE)\n        while True:\n            output = process.stdout.readline()\n            if output == '' and process.poll() is not None:\n                break\n            if output:\n                log.console_log(output.strip())\n        rc = process.poll()\n        return rc\n\nif __name__ == '__main__':\n    ScanNmapApp = ScanNmap()\n    ScanNmapApp\n"
  },
  {
    "path": "plugins/subdomain_enum.py",
    "content": "#!/usr/bin/env python\r\n# -*- coding: utf-8 -*-\r\n#\r\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\r\n#   This tools is inspired by Foca and Datasploit for OSINT\r\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\r\n\r\n#    This program is free software: you can redistribute it and/or modify\r\n#    it under the terms of the GNU General Public License as published by\r\n#    the Free Software Foundation, either version 2 of the License, or\r\n#    (at your option) any later version.\r\n\r\n#    This program is distributed in the hope that it will be useful,\r\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\r\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\r\n#    GNU General Public License for more details.\r\n\r\n#    You should have received a copy of the GNU General Public License\r\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\r\n\r\n# This file is part of Belati project\r\n\r\nimport sys\r\n\r\nfrom bs4 import BeautifulSoup\r\nfrom dnsdumpster.DNSDumpsterAPI import DNSDumpsterAPI\r\nfrom url_request import URLRequest\r\n\r\nurl_req = URLRequest()\r\n\r\nclass SubdomainEnum(object):\r\n\tdef scan_dnsdumpster(self, domain_name):\r\n\t\tresults = DNSDumpsterAPI().search(domain_name)\r\n\t\treturn results\r\n\r\n\tdef scan_crtsh(self, domain_name, proxy_address):\r\n\t\ttry:\r\n\t\t\turl = \"https://crt.sh/?q=%25.\" + domain_name\r\n\t\t\tdata = url_req.standart_request(url, proxy_address)\r\n\t\t\tsoup = BeautifulSoup( data, 'lxml')\r\n\t\t\tsubdomain_list = []\r\n\t\t\ttry:\r\n\t\t\t\ttable = soup.findAll('table')[2]\r\n\t\t\t\trows = table.find_all(['tr'])\r\n\t\t\t\tfor row in rows:\r\n\t\t\t\t\tcells = row.find_all('td', limit=5)\r\n\t\t\t\t\tif cells:\r\n\t\t\t\t\t\tname = cells[4].text\r\n\t\t\t\t\t\t# we don't need wildcard domain\r\n\t\t\t\t\t\tif \"*.\" not in name:\r\n\t\t\t\t\t\t\tsubdomain_list.append(name)\r\n\r\n\t\t\t\treturn list(set(subdomain_list))\r\n\t\t\texcept:\r\n\t\t\t\tpass\r\n\t\texcept:\r\n\t\t\tpass\r\n\r\n\tdef scan_findsubdomainsCom(self,domain_name,proxy_address):\r\n\t\ttry:\r\n\t\t\turl = \"https://findsubdomains.com/subdomains-of/{}\".format(domain_name)\r\n\t\t\tdata = url_req.standart_request(url, proxy_address)\r\n\t\t\tsoup = BeautifulSoup( data, 'lxml')\r\n\t\t\tsubdomain_list = []\r\n\t\t\ttry:\r\n\t\t\t\ttmp = soup.findAll(\"a\",attrs={\"href\" : \"javascript:void(0);\",\"class\" : \"desktop-hidden\"})\r\n\t\t\t\tfor raw in tmp:\r\n\t\t\t\t\tsubdomain_list.append(raw.text)\r\n\t\t\t\treturn list(set(subdomain_list))\r\n\t\t\texcept:\r\n\t\t\t\tpass\r\n\t\texcept:\r\n\t\t\tpass"
  },
  {
    "path": "plugins/svn_finder.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport sys, re, time\nfrom url_request import URLRequest\n\n# Console color\nG = '\\033[92m'  # green\nY = '\\033[93m'  # yellow\nB = '\\033[94m'  # blue\nR = '\\033[91m'  # red\nW = '\\033[0m'   # white\n\nurl_req = URLRequest()\n\nclass SVNFinder(object):\n    def check_svn(self, domain, proxy_address):\n        try:\n            data = url_req.just_url_open(url_req.ssl_checker(domain) + \"/.svn/\", proxy_address)\n\n            if data is not None and data is not \"notexist\":\n                if data == 403:\n                    return data\n                if data.getcode() == 200 and data.getcode() != 302 and url_request in data.geturl():\n                    return data.getcode()\n        except:\n            pass\n"
  },
  {
    "path": "plugins/updater.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\n# All Utilities Function will be here ;)\n\nimport sys, os\nimport shlex, subprocess\nfrom logger import Logger\nfrom config import Config\nfrom distutils.version import LooseVersion, StrictVersion\nfrom urlparse import urlparse\nfrom url_request import URLRequest\nfrom util import Util\n\n# Console color\nG = '\\033[92m'  # green\nY = '\\033[93m'  # yellow\nB = '\\033[94m'  # blue\nR = '\\033[91m'  # red\nW = '\\033[0m'   # white\n\nlog = Logger()\nurl_req = URLRequest()\nutil = Util()\nconf = Config()\n\nclass Updater(object):\n    def check_update(self, version):\n        # If git repo available it will result 0 and 32768 when no repo available\n        if not (os.path.isdir(\".git\") and os.system('git rev-parse 2> /dev/null > /dev/null')) == 0:\n            log.console_log(\"{}[-] No Git Control. Skip update check... {}\".format(Y, W))\n        else:\n            connection_status = url_req.connection_test()\n            remote_version_url = \"https://raw.githubusercontent.com/aancw/Belati/master/version\"\n            log.console_log(\"{}[+] Checking Network Connection... {} {}\".format(G, \"OK\" if connection_status else \"FAILED\" ,W))\n\n            if not connection_status:\n    \t    \tlog.console_log(\"{}[-] Belati can't be used in Offline Mode. Please check your network connection {}\".format(R, W)) \n    \t    \tsys.exit()\n            else:\n                log.console_log(\"{}[+] Checking Version Update for Belati... {}\".format(G, W))\n                \n                remote_version = str(url_req.just_url_open(remote_version_url, \"\").read())\n                \n                if self.update_version(version, remote_version):\n                    log.console_log(\"{}[+] Update is available for version {}{}\".format(G, remote_version, W))\n                    log.console_log(\"{}[*] Updating from master repo\")\n                    self.do_update()\n                    self.migrate_db()\n            \telse:\n            \t\tlog.console_log(\"{}[+] Belati version is uptodate \\m/{}\".format(Y, W))\n\n    def update_version(self, local_version, remote_version):\n    \treturn LooseVersion(util.clean_version_string(local_version)) < LooseVersion(util.clean_version_string(remote_version))\n\n    def do_update(self):\n    \tutil.do_command(\"git\", \"pull\")\n\n    def migrate_db(self):\n        py_bin = conf.get_config(\"Environment\", \"py_bin\")\n        command = \"{} web/manage.py\".format(py_bin)\n        util.do_command(command,\"makemigrations web\")\n        util.do_command(command,\"migrate web\")\n"
  },
  {
    "path": "plugins/url_request.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nimport sys, socket\nimport ssl\nimport urllib2, httplib\nfrom user_agents import UserAgents\nfrom urlparse import urlparse\nfrom logger import Logger\nimport random\n\n# Console color\nG = '\\033[92m'  # green\nY = '\\033[93m'  # yellow\nB = '\\033[94m'  # blue\nR = '\\033[91m'  # red\nW = '\\033[0m'   # white\n\nlog = Logger()\nua = UserAgents()\n\nclass URLRequest(object):\n    def standart_request(self, url_request, proxy_address, user_agents=None):\n        try:\n            if type(proxy_address) is list:\n                # Get random proxy from list\n                proxy_address_fix = random.choice(proxy_address)\n            else:\n                proxy_address_fix = proxy_address\n\n            if proxy_address is not \"\":\n                log.console_log(\"{}[*] Using Proxy Address : {}{}\".format(Y, proxy_address_fix, W))\n\n            if user_agents is not None:\n                user_agent_fix = user_agents\n            else:\n                user_agent_fix = ua.get_user_agent()\n\n            parse = urlparse(proxy_address_fix)\n            proxy_scheme = parse.scheme\n            proxy = str(parse.hostname) + ':' + str(parse.port)\n            proxy_handler = urllib2.ProxyHandler({ proxy_scheme: proxy})\n            opener = urllib2.build_opener(proxy_handler)\n            opener.addheaders = [('User-agent', user_agent_fix )]\n            urllib2.install_opener(opener)\n            req = urllib2.Request(url_request)\n            data = urllib2.urlopen(req).read()\n            return data\n        except urllib2.HTTPError, e:\n            log.console_log('Error code: {}'.format( str(e.code)))\n            return e.code\n        except Exception, detail:\n            log.console_log('ERROR {}'.format( str(detail)))\n            return 1\n\n    def header_info(self, url_request, proxy_address):\n        try:\n            if type(proxy_address) is list:\n                # Get random proxy from list\n                proxy_address_fix = random.choice(proxy_address)\n            else:\n                proxy_address_fix = proxy_address\n\n            if proxy_address is not \"\":\n                log.console_log(\"{}[*] Using Proxy Address : {}{}\".format(Y, proxy_address_fix, W))\n\n            parse = urlparse(proxy_address_fix)\n            proxy_scheme = parse.scheme\n            proxy = str(parse.hostname) + ':' + str(parse.port)\n            proxy_handler = urllib2.ProxyHandler({ proxy_scheme: proxy})\n            opener = urllib2.build_opener(proxy_handler)\n            opener.addheaders = [('User-agent', ua.get_user_agent() )]\n            urllib2.install_opener(opener)\n            req = urllib2.Request(url_request)\n            data = urllib2.urlopen(req).info()\n            return data\n        except urllib2.HTTPError, e:\n            log.console_log('Error code: {}'.format( str(e.code)))\n            return e.code\n        except Exception, detail:\n            log.console_log('ERROR {}'.format( str(detail)))\n            return 1\n        except httplib.BadStatusLine:\n            pass\n\n    def just_url_open(self, url_request, proxy_address):\n        try:\n            if type(proxy_address) is list:\n                # Get random proxy from list\n                proxy_address_fix = random.choice(proxy_address)\n            else:\n                proxy_address_fix = proxy_address\n\n            if proxy_address is not \"\":\n                log.console_log(\"{}[*] Using Proxy Address : {}{}\".format(Y, proxy_address_fix, W))\n\n            parse = urlparse(proxy_address_fix)\n            proxy_scheme = parse.scheme\n            proxy = str(parse.hostname) + ':' + str(parse.port)\n            proxy_handler = urllib2.ProxyHandler({ proxy_scheme: proxy})\n            opener = urllib2.build_opener(proxy_handler)\n            opener.addheaders = [('User-agent', ua.get_user_agent() )]\n            urllib2.install_opener(opener)\n            req = urllib2.Request(url_request)\n            data = urllib2.urlopen(req, timeout=25)\n            return data\n        except urllib2.HTTPError, e:\n                return e.code\n    \texcept urllib2.URLError, e:\n\t\t\tif str(e.reason) == \"[Errno -2] Name or service not known\":\n\t\t\t\tlog.console_log(\"Not EXIST!\")\n\t\t\t\tlog.console_log(\"Check your internet connection or check your target domain\")\n\t\t\t\treturn \"notexist\"\n\n    def ssl_checker(self, domain):\n        domain_fix = \"https://{}\".format(domain)\n\n        try:\n            # Skip SSL Verification Check!\n            # https://stackoverflow.com/questions/27835619/ssl-certificate-verify-failed-error\n            gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1)  # Only for gangstars\n            data = urllib2.urlopen(\"https://{}\".format(domain), timeout=25, context=gcontext)\n            if \"ERROR\" in data or \"Errno\" in data:\n                domain_fix = \"http://{}\".format(domain)\n        except urllib2.HTTPError, e:\n            pass\n        except urllib2.URLError, e:\n            domain_fix = \"http://{}\".format(domain)\n        except ssl.SSLError as e:\n            domain_fix = \"http://{}\".format(domain)\n        except httplib.BadStatusLine:\n            domain_fix = \"http://{}\".format(domain)\n\n        return domain_fix\n\n    def connection_test(self):\n        server_test = \"github.com\"\n        try:\n            host = socket.gethostbyname(server_test)\n            s = socket.create_connection((host, 80), 2)\n            return True\n        except:\n            pass\n        return False\n"
  },
  {
    "path": "plugins/user_agents.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nfrom fake_useragent import UserAgent\nfrom fake_useragent import FakeUserAgentError\nua = UserAgent()\n\nclass UserAgents(object):\n    def get_user_agent(self):\n        try:\n            return ua.random\n        except FakeUserAgentError:\n            return \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:19.0) Gecko/20100101 Firefox/49.0\"\n    def update_user_agent(self):\n        ua.update()\n\nif __name__ == '__main__':\n    UserAgentsApp = UserAgents()\n    UserAgentsApp\n"
  },
  {
    "path": "plugins/util.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\n# All Utilities Function will be here ;)\n\nimport sys, os\nimport shlex, subprocess\nfrom logger import Logger\nfrom distutils.version import LooseVersion, StrictVersion\nfrom urlparse import urlparse\nfrom url_request import URLRequest\n\n# Console color\nG = '\\033[92m'  # green\nY = '\\033[93m'  # yellow\nB = '\\033[94m'  # blue\nR = '\\033[91m'  # red\nW = '\\033[0m'   # white\n\nlog = Logger()\nurl_req = URLRequest()\n\nclass Util(object):\n    def check_python_version(self):\n\t    if sys.version[:3] == \"2.7\" or \"2\" in sys.version[:3]:\n\t        log.console_log(\"{}[*] Python version OK! {}{}\".format(G, sys.version[:6], W))\n\t    elif \"3\" in sys.version[:3]:\n\t        log.console_log(\"{}[-] Nope. This system not yet compatible for Python 3!{}\".format(Y, W))\n\t        sys.exit()\n\t    else:\n\t        log.console_log(\"{}[-] Duh. Your python version too old for running this :({}\".format(Y, W))\n\t        sys.exit()\n\n    def do_command(self, command, parameter):\n    \tfull_command = \"{} {}\".format(command, parameter)\n        process = subprocess.Popen(shlex.split(full_command), stdout=subprocess.PIPE)\n        while True:\n            output = process.stdout.readline()\n            if output == '' and process.poll() is not None:\n                break\n            if output:\n                log.console_log(output.strip())\n        rc = process.poll()\n        return rc\n\t\n    def clean_version_string(self, text):\n    \t# strip v0.2.2-dev\n    \tstrip_dev = text.strip(\"-dev\\n\")\n    \treturn strip_dev\n\n    def get_current_work_dir(self):\n\t\treturn os.getcwd()\n\n    def clean_list_string(self, text):\n    \treturn str(\", \".join(text))\n   \n    def strip_scheme(self, url):\n\t    parsed = urlparse(url)\n\t    scheme = \"%s://\" % parsed.scheme\n\t    return parsed.geturl().replace(scheme, '', 1)\n"
  },
  {
    "path": "plugins/wappalyzer.py",
    "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n#   Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n#   This tools is inspired by Foca and Datasploit for OSINT\n#   Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n#    This program is free software: you can redistribute it and/or modify\n#    it under the terms of the GNU General Public License as published by\n#    the Free Software Foundation, either version 2 of the License, or\n#    (at your option) any later version.\n\n#    This program is distributed in the hope that it will be useful,\n#    but WITHOUT ANY WARRANTY; without even the implied warranty of\n#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n#    GNU General Public License for more details.\n\n#    You should have received a copy of the GNU General Public License\n#    along with this program.  If not, see <http://www.gnu.org/licenses/>.\n\n# This file is part of Belati project\n\nfrom Wappalyzer import Wappalyzer, WebPage\nfrom user_agents import UserAgents\nfrom logger import Logger\n\nlog = Logger()\nua = UserAgents()\nanalyzer = Wappalyzer.latest()\n\nclass Wappalyzer(object):\n    def run_wappalyze(self, domain):\n        webpage = WebPage.new_from_url(domain)\n        analyze_result = analyzer.analyze(webpage)\n        if analyze_result:\n            for result in analyze_result:\n                log.console_log(result)\n        else:\n            log.console_log(\"Result Not Found\")\n\n        return str(analyze_result)\n\nif __name__ == '__main__':\n    wappalyzerApp = wappalyzer()\n    wappalyzerApp\n"
  },
  {
    "path": "requirements.txt",
    "content": "dnspython\nrequests\nargparse\ntexttable\npython-geoip-geolite2\npython-geoip\ndnsknife\ntermcolor\ncolorama\nvalidators\ntqdm\ntldextract\nfake-useragent\npython-wappalyzer\nfuture\nbeautifulsoup4\npython-whois\nfutures\ndjango==1.11.6\npyexifinfo\ncmd2==0.8.0\ntabulate\ndnsdumpster\n"
  },
  {
    "path": "version",
    "content": "v0.2.4.2\n"
  },
  {
    "path": "web/manage.py",
    "content": "#!/usr/bin/env python\nimport os\nimport sys\n\nif __name__ == \"__main__\":\n    os.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"web.settings\")\n    try:\n        from django.core.management import execute_from_command_line\n    except ImportError:\n        # The above import may fail for some other reason. Ensure that the\n        # issue is really that Django is missing to avoid masking other\n        # exceptions on Python 2.\n        try:\n            import django\n        except ImportError:\n            raise ImportError(\n                \"Couldn't import Django. Are you sure it's installed and \"\n                \"available on your PYTHONPATH environment variable? Did you \"\n                \"forget to activate a virtual environment?\"\n            )\n        raise\n    execute_from_command_line(sys.argv)\n"
  },
  {
    "path": "web/web/__init__.py",
    "content": ""
  },
  {
    "path": "web/web/migrations/0001_initial.py",
    "content": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.8 on 2017-07-27 17:37\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n    initial = True\n\n    dependencies = [\n    ]\n\n    operations = [\n        migrations.CreateModel(\n            name='AuthGroup',\n            fields=[\n                ('id', models.IntegerField(primary_key=True, serialize=False)),\n                ('name', models.CharField(max_length=80, unique=True)),\n            ],\n            options={\n                'db_table': 'auth_group',\n                'managed': False,\n            },\n        ),\n        migrations.CreateModel(\n            name='AuthGroupPermissions',\n            fields=[\n                ('id', models.IntegerField(primary_key=True, serialize=False)),\n            ],\n            options={\n                'db_table': 'auth_group_permissions',\n                'managed': False,\n            },\n        ),\n        migrations.CreateModel(\n            name='AuthPermission',\n            fields=[\n                ('id', models.IntegerField(primary_key=True, serialize=False)),\n                ('codename', models.CharField(max_length=100)),\n                ('name', models.CharField(max_length=255)),\n            ],\n            options={\n                'db_table': 'auth_permission',\n                'managed': False,\n            },\n        ),\n        migrations.CreateModel(\n            name='AuthUser',\n            fields=[\n                ('id', models.IntegerField(primary_key=True, serialize=False)),\n                ('password', models.CharField(max_length=128)),\n                ('last_login', models.DateTimeField(blank=True, null=True)),\n                ('is_superuser', models.BooleanField()),\n                ('first_name', models.CharField(max_length=30)),\n                ('last_name', models.CharField(max_length=30)),\n                ('email', models.CharField(max_length=254)),\n                ('is_staff', models.BooleanField()),\n                ('is_active', models.BooleanField()),\n                ('date_joined', models.DateTimeField()),\n                ('username', models.CharField(max_length=150, unique=True)),\n            ],\n            options={\n                'db_table': 'auth_user',\n                'managed': False,\n            },\n        ),\n        migrations.CreateModel(\n            name='AuthUserGroups',\n            fields=[\n                ('id', models.IntegerField(primary_key=True, serialize=False)),\n            ],\n            options={\n                'db_table': 'auth_user_groups',\n                'managed': False,\n            },\n        ),\n        migrations.CreateModel(\n            name='AuthUserUserPermissions',\n            fields=[\n                ('id', models.IntegerField(primary_key=True, serialize=False)),\n            ],\n            options={\n                'db_table': 'auth_user_user_permissions',\n                'managed': False,\n            },\n        ),\n        migrations.CreateModel(\n            name='DjangoAdminLog',\n            fields=[\n                ('id', models.IntegerField(primary_key=True, serialize=False)),\n                ('object_id', models.TextField(blank=True, null=True)),\n                ('object_repr', models.CharField(max_length=200)),\n                ('action_flag', models.PositiveSmallIntegerField()),\n                ('change_message', models.TextField()),\n                ('action_time', models.DateTimeField()),\n            ],\n            options={\n                'db_table': 'django_admin_log',\n                'managed': False,\n            },\n        ),\n        migrations.CreateModel(\n            name='DjangoContentType',\n            fields=[\n                ('id', models.IntegerField(primary_key=True, serialize=False)),\n                ('app_label', models.CharField(max_length=100)),\n                ('model', models.CharField(max_length=100)),\n            ],\n            options={\n                'db_table': 'django_content_type',\n                'managed': False,\n            },\n        ),\n        migrations.CreateModel(\n            name='DjangoMigrations',\n            fields=[\n                ('id', models.IntegerField(primary_key=True, serialize=False)),\n                ('app', models.CharField(max_length=255)),\n                ('name', models.CharField(max_length=255)),\n                ('applied', models.DateTimeField()),\n            ],\n            options={\n                'db_table': 'django_migrations',\n                'managed': False,\n            },\n        ),\n        migrations.CreateModel(\n            name='DjangoSession',\n            fields=[\n                ('session_key', models.CharField(max_length=40, primary_key=True, serialize=False)),\n                ('session_data', models.TextField()),\n                ('expire_date', models.DateTimeField()),\n            ],\n            options={\n                'db_table': 'django_session',\n                'managed': False,\n            },\n        ),\n        migrations.CreateModel(\n            name='DocResults',\n            fields=[\n                ('id', models.IntegerField(blank=True, primary_key=True, serialize=False)),\n                ('project_id', models.IntegerField(blank=True, null=True)),\n                ('doc_ext', models.TextField(blank=True, null=True)),\n                ('doc_url', models.TextField(blank=True, null=True)),\n                ('doc_location', models.TextField(blank=True, null=True)),\n            ],\n            options={\n                'db_table': 'doc_results',\n                'managed': True,\n            },\n        ),\n        migrations.CreateModel(\n            name='LinkedinCompanyEmployees',\n            fields=[\n                ('id', models.IntegerField(blank=True, primary_key=True, serialize=False)),\n                ('project_id', models.IntegerField(blank=True, null=True)),\n                ('name', models.TextField(blank=True, null=True)),\n                ('job_title', models.TextField(blank=True, null=True)),\n                ('linkedin_url', models.IntegerField(blank=True, null=True)),\n            ],\n            options={\n                'db_table': 'linkedin_company_employees',\n                'managed': True,\n            },\n        ),\n        migrations.CreateModel(\n            name='LinkedinCompanyInfo',\n            fields=[\n                ('id', models.IntegerField(blank=True, primary_key=True, serialize=False)),\n                ('project_id', models.IntegerField(blank=True, null=True)),\n                ('company_name', models.IntegerField(blank=True, null=True)),\n                ('company_linkedin_url', models.IntegerField(blank=True, null=True)),\n                ('company_description', models.IntegerField(blank=True, null=True)),\n            ],\n            options={\n                'db_table': 'linkedin_company_info',\n                'managed': True,\n            },\n        ),\n        migrations.CreateModel(\n            name='MailHarvestResults',\n            fields=[\n                ('id', models.IntegerField(blank=True, primary_key=True, serialize=False)),\n                ('project_id', models.IntegerField(blank=True, null=True)),\n                ('mail_results', models.TextField(blank=True, null=True)),\n                ('mail_pgp_results', models.TextField(blank=True, null=True)),\n            ],\n            options={\n                'db_table': 'mail_harvest_results',\n                'managed': True,\n            },\n        ),\n        migrations.CreateModel(\n            name='MainDomainResults',\n            fields=[\n                ('id', models.IntegerField(blank=True, primary_key=True, serialize=False)),\n                ('project_id', models.IntegerField(blank=True, null=True)),\n                ('domain', models.TextField(blank=True, null=True)),\n                ('domain_whois', models.TextField(blank=True, null=True)),\n                ('email', models.TextField(blank=True, null=True)),\n                ('domain_reputation', models.TextField(blank=True, null=True)),\n                ('domain_blacklist', models.TextField(blank=True, null=True)),\n                ('ns_record', models.TextField(blank=True, db_column='NS_record', null=True)),\n                ('mx_record', models.TextField(blank=True, db_column='MX_record', null=True)),\n            ],\n            options={\n                'db_table': 'main_domain_results',\n                'managed': True,\n            },\n        ),\n        migrations.CreateModel(\n            name='Projects',\n            fields=[\n                ('project_id', models.IntegerField(blank=True, primary_key=True, serialize=False)),\n                ('project_domain', models.TextField(blank=True, null=True)),\n                ('project_org', models.TextField(blank=True, null=True)),\n                ('started_time', models.TextField(blank=True, null=True)),\n            ],\n            options={\n                'db_table': 'projects',\n                'managed': True,\n            },\n        ),\n        migrations.CreateModel(\n            name='SubdomainResults',\n            fields=[\n                ('id', models.IntegerField(blank=True, primary_key=True, serialize=False)),\n                ('project_id', models.IntegerField(blank=True, null=True)),\n                ('subdomain', models.TextField(blank=True, null=True)),\n                ('ip_address', models.TextField(blank=True, null=True)),\n                ('banner', models.TextField(blank=True, null=True)),\n                ('wappalyzer', models.TextField(blank=True, null=True)),\n                ('robots_txt', models.TextField(blank=True, null=True)),\n                ('is_contain_git', models.IntegerField(blank=True, null=True)),\n                ('is_contain_svn', models.IntegerField(blank=True, null=True)),\n            ],\n            options={\n                'db_table': 'subdomain_results',\n                'managed': True,\n            },\n        ),\n    ]\n"
  },
  {
    "path": "web/web/migrations/0002_auto_20170727_1741.py",
    "content": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.8 on 2017-07-27 17:41\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n    dependencies = [\n        ('web', '0001_initial'),\n    ]\n\n    operations = [\n        migrations.AddField(\n            model_name='docresults',\n            name='doc_full_location',\n            field=models.TextField(blank=True, null=True),\n        ),\n        migrations.AddField(\n            model_name='docresults',\n            name='doc_meta_exif',\n            field=models.TextField(blank=True, null=True),\n        ),\n    ]\n"
  },
  {
    "path": "web/web/migrations/0003_docresults_doc_author.py",
    "content": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.8 on 2017-08-01 07:50\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n    dependencies = [\n        ('web', '0002_auto_20170727_1741'),\n    ]\n\n    operations = [\n        migrations.AddField(\n            model_name='docresults',\n            name='doc_author',\n            field=models.TextField(blank=True, null=True),\n        ),\n    ]\n"
  },
  {
    "path": "web/web/migrations/__init__.py",
    "content": ""
  },
  {
    "path": "web/web/models.py",
    "content": "# This is an auto-generated Django model module.\n# You'll have to do the following manually to clean this up:\n#   * Rearrange models' order\n#   * Make sure each model has one field with primary_key=True\n#   * Make sure each ForeignKey has `on_delete` set to the desired behavior.\n#   * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table\n# Feel free to rename the models, but don't rename db_table values or field names.\nfrom __future__ import unicode_literals\n\nfrom django.db import models\n\n\nclass AuthGroup(models.Model):\n    id = models.IntegerField(primary_key=True)  # AutoField?\n    name = models.CharField(unique=True, max_length=80)\n\n    class Meta:\n        managed = False\n        db_table = 'auth_group'\n\n\nclass AuthGroupPermissions(models.Model):\n    id = models.IntegerField(primary_key=True)  # AutoField?\n    group = models.ForeignKey(AuthGroup, models.DO_NOTHING)\n    permission = models.ForeignKey('AuthPermission', models.DO_NOTHING)\n\n    class Meta:\n        managed = False\n        db_table = 'auth_group_permissions'\n        unique_together = (('group', 'permission'),)\n\n\nclass AuthPermission(models.Model):\n    id = models.IntegerField(primary_key=True)  # AutoField?\n    content_type = models.ForeignKey('DjangoContentType', models.DO_NOTHING)\n    codename = models.CharField(max_length=100)\n    name = models.CharField(max_length=255)\n\n    class Meta:\n        managed = False\n        db_table = 'auth_permission'\n        unique_together = (('content_type', 'codename'),)\n\n\nclass AuthUser(models.Model):\n    id = models.IntegerField(primary_key=True)  # AutoField?\n    password = models.CharField(max_length=128)\n    last_login = models.DateTimeField(blank=True, null=True)\n    is_superuser = models.BooleanField()\n    first_name = models.CharField(max_length=30)\n    last_name = models.CharField(max_length=30)\n    email = models.CharField(max_length=254)\n    is_staff = models.BooleanField()\n    is_active = models.BooleanField()\n    date_joined = models.DateTimeField()\n    username = models.CharField(unique=True, max_length=150)\n\n    class Meta:\n        managed = False\n        db_table = 'auth_user'\n\n\nclass AuthUserGroups(models.Model):\n    id = models.IntegerField(primary_key=True)  # AutoField?\n    user = models.ForeignKey(AuthUser, models.DO_NOTHING)\n    group = models.ForeignKey(AuthGroup, models.DO_NOTHING)\n\n    class Meta:\n        managed = False\n        db_table = 'auth_user_groups'\n        unique_together = (('user', 'group'),)\n\n\nclass AuthUserUserPermissions(models.Model):\n    id = models.IntegerField(primary_key=True)  # AutoField?\n    user = models.ForeignKey(AuthUser, models.DO_NOTHING)\n    permission = models.ForeignKey(AuthPermission, models.DO_NOTHING)\n\n    class Meta:\n        managed = False\n        db_table = 'auth_user_user_permissions'\n        unique_together = (('user', 'permission'),)\n\n\nclass DjangoAdminLog(models.Model):\n    id = models.IntegerField(primary_key=True)  # AutoField?\n    object_id = models.TextField(blank=True, null=True)\n    object_repr = models.CharField(max_length=200)\n    action_flag = models.PositiveSmallIntegerField()\n    change_message = models.TextField()\n    content_type = models.ForeignKey('DjangoContentType', models.DO_NOTHING, blank=True, null=True)\n    user = models.ForeignKey(AuthUser, models.DO_NOTHING)\n    action_time = models.DateTimeField()\n\n    class Meta:\n        managed = False\n        db_table = 'django_admin_log'\n\n\nclass DjangoContentType(models.Model):\n    id = models.IntegerField(primary_key=True)  # AutoField?\n    app_label = models.CharField(max_length=100)\n    model = models.CharField(max_length=100)\n\n    class Meta:\n        managed = False\n        db_table = 'django_content_type'\n        unique_together = (('app_label', 'model'),)\n\n\nclass DjangoMigrations(models.Model):\n    id = models.IntegerField(primary_key=True)  # AutoField?\n    app = models.CharField(max_length=255)\n    name = models.CharField(max_length=255)\n    applied = models.DateTimeField()\n\n    class Meta:\n        managed = False\n        db_table = 'django_migrations'\n\n\nclass DjangoSession(models.Model):\n    session_key = models.CharField(primary_key=True, max_length=40)\n    session_data = models.TextField()\n    expire_date = models.DateTimeField()\n\n    class Meta:\n        managed = False\n        db_table = 'django_session'\n\n\nclass DocResults(models.Model):\n    id = models.IntegerField(blank=True, primary_key=True)\n    project_id = models.IntegerField(blank=True, null=True)\n    doc_ext = models.TextField(blank=True, null=True)\n    doc_url = models.TextField(blank=True, null=True)\n    doc_location = models.TextField(blank=True, null=True)\n    doc_full_location = models.TextField(blank=True, null=True)\n    doc_meta_exif = models.TextField(blank=True, null=True)\n    doc_author = models.TextField(blank=True, null=True)\n    \n    class Meta:\n        managed = True\n        db_table = 'doc_results'\n\n\nclass LinkedinCompanyEmployees(models.Model):\n    id = models.IntegerField(blank=True, primary_key=True)\n    project_id = models.IntegerField(blank=True, null=True)\n    name = models.TextField(blank=True, null=True)\n    job_title = models.TextField(blank=True, null=True)\n    linkedin_url = models.IntegerField(blank=True, null=True)\n\n    class Meta:\n        managed = True\n        db_table = 'linkedin_company_employees'\n\n\nclass LinkedinCompanyInfo(models.Model):\n    id = models.IntegerField(blank=True, primary_key=True)\n    project_id = models.IntegerField(blank=True, null=True)\n    company_name = models.IntegerField(blank=True, null=True)\n    company_linkedin_url = models.IntegerField(blank=True, null=True)\n    company_description = models.IntegerField(blank=True, null=True)\n\n    class Meta:\n        managed = True\n        db_table = 'linkedin_company_info'\n\n\nclass MailHarvestResults(models.Model):\n    id = models.IntegerField(blank=True, primary_key=True)\n    project_id = models.IntegerField(blank=True, null=True)\n    mail_results = models.TextField(blank=True, null=True)\n    mail_pgp_results = models.TextField(blank=True, null=True)\n\n    class Meta:\n        managed = True\n        db_table = 'mail_harvest_results'\n\n\nclass MainDomainResults(models.Model):\n    id = models.IntegerField(blank=True, primary_key=True)\n    project_id = models.IntegerField(blank=True, null=True)\n    domain = models.TextField(blank=True, null=True)\n    domain_whois = models.TextField(blank=True, null=True)\n    email = models.TextField(blank=True, null=True)\n    domain_reputation = models.TextField(blank=True, null=True)\n    domain_blacklist = models.TextField(blank=True, null=True)\n    ns_record = models.TextField(db_column='NS_record', blank=True, null=True)  # Field name made lowercase.\n    mx_record = models.TextField(db_column='MX_record', blank=True, null=True)  # Field name made lowercase.\n\n    class Meta:\n        managed = True\n        db_table = 'main_domain_results'\n\n\nclass Projects(models.Model):\n    project_id = models.IntegerField(blank=True, primary_key=True)\n    project_domain = models.TextField(blank=True, null=True)\n    project_org = models.TextField(blank=True, null=True)\n    started_time = models.TextField(blank=True, null=True)\n\n    class Meta:\n        managed = True\n        db_table = 'projects'\n\n    def __unicode__(self):\n        return self.project_domain\n\nclass SubdomainResults(models.Model):\n    id = models.IntegerField(blank=True, primary_key=True)\n    project_id = models.IntegerField(blank=True, null=True)\n    subdomain = models.TextField(blank=True, null=True)\n    ip_address = models.TextField(blank=True, null=True)\n    banner = models.TextField(blank=True, null=True)\n    wappalyzer = models.TextField(blank=True, null=True)\n    robots_txt = models.TextField(blank=True, null=True)\n    is_contain_git = models.IntegerField(blank=True, null=True)\n    is_contain_svn = models.IntegerField(blank=True, null=True)\n\n    class Meta:\n        managed = True\n        db_table = 'subdomain_results'\n"
  },
  {
    "path": "web/web/settings.py",
    "content": "\"\"\"\nDjango settings for web project.\n\nGenerated by 'django-admin startproject' using Django 1.11.2.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/1.11/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/1.11/ref/settings/\n\"\"\"\n\nimport os\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = 'i+2-zy6rl=!w+z8zai5un*ta$!+^lmlogkid*a6j0*ssfq&f_@'\n\n# SECURITY WARNING: don't run with debug turned on in production!\nDEBUG = True\n\nALLOWED_HOSTS = []\n\n\n# Application definition\n\nINSTALLED_APPS = [\n    'django.contrib.admin',\n    'django.contrib.auth',\n    'django.contrib.contenttypes',\n    'django.contrib.sessions',\n    'django.contrib.messages',\n    'django.contrib.staticfiles',\n    'web',\n]\n\nMIDDLEWARE = [\n    'django.middleware.security.SecurityMiddleware',\n    'django.contrib.sessions.middleware.SessionMiddleware',\n    'django.middleware.common.CommonMiddleware',\n    'django.middleware.csrf.CsrfViewMiddleware',\n    'django.contrib.auth.middleware.AuthenticationMiddleware',\n    'django.contrib.messages.middleware.MessageMiddleware',\n    'django.middleware.clickjacking.XFrameOptionsMiddleware',\n]\n\nROOT_URLCONF = 'web.urls'\n\nTEMPLATES = [\n    {\n        'BACKEND': 'django.template.backends.django.DjangoTemplates',\n        'DIRS': [],\n        'APP_DIRS': True,\n        'OPTIONS': {\n            'context_processors': [\n                'django.template.context_processors.debug',\n                'django.template.context_processors.request',\n                'django.contrib.auth.context_processors.auth',\n                'django.contrib.messages.context_processors.messages',\n            ],\n        },\n    },\n]\n\nWSGI_APPLICATION = 'web.wsgi.application'\n\n\n# Database\n# https://docs.djangoproject.com/en/1.11/ref/settings/#databases\n\nDATABASES = {\n    'default': {\n        'ENGINE': 'django.db.backends.sqlite3',\n        'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),\n    }\n}\n\n\n# Password validation\n# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n    {\n        'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',\n    },\n    {\n        'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',\n    },\n    {\n        'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',\n    },\n    {\n        'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',\n    },\n]\n\n\n# Internationalization\n# https://docs.djangoproject.com/en/1.11/topics/i18n/\n\nLANGUAGE_CODE = 'en-us'\n\nTIME_ZONE = 'UTC'\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/1.11/howto/static-files/\n\nSTATIC_URL = '/static/'\n"
  },
  {
    "path": "web/web/templates/about.html",
    "content": "{% extends \"header.html\" %}\n\n{% block content %}\n\n\n<h4> Belati - The Traditional Swiss Army Knife For OSINT </h4>\n<hr/>\n<p>\nBelati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. This tools is inspired by Foca and Datasploit for OSINT :)\n</p>\n\n<h4>Why I Made this?</h4>\n<hr/>\nJust for learning stuff and OSINT purpose.\n\n<h4>What Belati can do?</h4>\n<hr/>\n<ul>\n  <li>Whois(Indonesian TLD Support)</li>\n  <li>Banner Grabbing</li>\n  <li>Subdomain Enumeration</li>\n  <li>Service Scanning for all Subdomain Machine</li>\n  <li>Web Appalyzer Support</li>\n  <li>DNS mapping / Zone Scanning</li>\n  <li>Mail Harvester from Website & Search Engine</li>\n  <li>Mail Harvester from MIT PGP Public Key Server</li>\n  <li>Scrapping Public Document for Domain from Search Engine</li>\n  <li>Fake and Random User Agent ( Prevent from blocking )</li>\n  <li>Proxy Support for Harvesting Emails and Documents</li>\n  <li>Public Git Finder in domain/subdomain</li>\n  <li>Public SVN Finder in domain/subdomain</li>\n  <li>Robot.txt Scraper in domain/subdomain</li>\n  <li>Gather Public Company Info & Employee</li>\n  <li>SQLite3 Database Support for storing Belati Results</li>\n  <li>Setup Wizard/Configuration for Belati</li>\n</ul>\n\n<h4>Author</h4>\n<hr/>\nAan Wahyu a.k.a Petruknisme(<a href=\"https://petruknisme.com\">https://petruknisme.com</a>)\n\n<h4>Thanks To</h4>\n<hr/>\nThanks to PyWhois Library, Sublist3r, MailHarvester, Emingoo for being part of my code. Also thanks to Hispagatos, Infosec-ninjas, eCHo, RNDC( Research and development center ) and all other people who are inspiring this project :)\n<br/><br/>\nThanks to Echo-Zine Staff for approving my Ezine : <a href=\"http://ezine.echo.or.id/issue31/005.txt\">http://ezine.echo.or.id/issue31/005.txt</a> - Belati : Collecting Public Data & Public Document for OSINT Purpose - Petruknisme\n\n<h4>Feedback/Suggestion</h4>\n<hr/>\nFeel free to create Issue in this repository or email me at <b>cacaddv [at] gmail.com</b> . Your feedback and suggestion is useful for Belati development progress :)\n\n<h4>License</h4>\n<hr/>\nBelati is licensed under GPL V2. You can use, modify, or redistribute this tool under the terms of GNU General Public License (GPLv2).\n<br/>\n<blockquote>\n  Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose.\n  This tools is inspired by Foca and Datasploit for OSINT\n  Copyright (C) 2017  cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu)\n\n  This program is free software: you can redistribute it and/or modify\n  it under the terms of the GNU General Public License as published by\n  the Free Software Foundation, either version 2 of the License, or\n  (at your option) any later version.\n\n  This program is distributed in the hope that it will be useful,\n  but WITHOUT ANY WARRANTY; without even the implied warranty of\n  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n  GNU General Public License for more details.\n\n  You should have received a copy of the GNU General Public License\n  along with this program.  If not, see <http://www.gnu.org/licenses/>.\n</blockquote>\n{% endblock %}\n"
  },
  {
    "path": "web/web/templates/base.html",
    "content": "{% block header %}{% endblock %}\n{% block navbar %}{% endblock %}\n{% block content %}{% endblock %}\n{% block footer %}{% endblock %}\n"
  },
  {
    "path": "web/web/templates/footer.html",
    "content": "<footer class=\"footer\">\n   <div class=\"container\">\n     <span class=\"text-muted\">Belati - Copyright 2018</span>\n   </div>\n</footer>\n"
  },
  {
    "path": "web/web/templates/header.html",
    "content": "<head>\n<!-- Latest compiled and minified CSS -->\n<link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css\">\n\n<!-- Optional theme -->\n<link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap-theme.min.css\">\n<script src=\"https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js\"></script>\n<!-- Latest compiled and minified JavaScript -->\n<script src=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js\"></script>\n\n<style>\n/* Sticky footer styles\n-------------------------------------------------- */\nhtml {\n  position: relative;\n  min-height: 100%;\n}\nbody {\n  /* Margin bottom by footer height */\n  margin-bottom: 60px;\n}\n.footer {\n  position: absolute;\n  bottom: 0;\n  width: 100%;\n  /* Set the fixed height of the footer here */\n  height: 60px;\n  line-height: 60px; /* Vertically center the text there */\n  background-color: #f5f5f5;\n  text-align: center;\n}\n\n\n/* Custom page CSS\n-------------------------------------------------- */\n/* Not required for template or sticky footer method. */\n\n.container {\n  width: auto;\n  max-width: 90%;\n  padding: 0 15px;\n}\n\n\n#exTab1 .tab-content {\n  color : white;\n  background-color: #428bca;\n  padding : 5px 15px;\n}\n\n#exTab2 h3 {\n  color : white;\n  background-color: #428bca;\n  padding : 5px 15px;\n}\n\n/* remove border radius for the tab */\n\n#exTab1 .nav-pills > li > a {\n  border-radius: 0;\n}\n\n/* change border radius for the tab , apply corners on top*/\n\n#exTab3 .nav-pills > li > a {\n  border-radius: 4px 4px 0 0 ;\n}\n\n#exTab3 .tab-content {\n  color : white;\n  background-color: #428bca;\n  padding : 5px 15px;\n}\n\n</style>\n</head>\n\n<body>\n<nav class=\"navbar navbar-default\">\n  <div class=\"container-fluid\">\n    <!-- Brand and toggle get grouped for better mobile display -->\n    <div class=\"navbar-header\">\n      <button type=\"button\" class=\"navbar-toggle collapsed\" data-toggle=\"collapse\" data-target=\"#bs-example-navbar-collapse-1\" aria-expanded=\"false\">\n        <span class=\"sr-only\">Toggle navigation</span>\n        <span class=\"icon-bar\"></span>\n        <span class=\"icon-bar\"></span>\n        <span class=\"icon-bar\"></span>\n      </button>\n      <a class=\"navbar-brand\" href=\"http://127.0.0.1:8000\">Belati</a>\n    </div>\n\n    <!-- Collect the nav links, forms, and other content for toggling -->\n    <div class=\"collapse navbar-collapse\" id=\"bs-example-navbar-collapse-1\">\n      <ul class=\"nav navbar-nav\">\n        <li><a href=\"http://127.0.0.1:8000\">Home</a></li>\n        <li><a href=\"{{ url }}/about\">About</a></li>\n        <li><a href=\"#\">Reports</a></li>\n        <!-- <li class=\"dropdown\">\n          <a href=\"#\" class=\"dropdown-toggle\" data-toggle=\"dropdown\" role=\"button\" aria-haspopup=\"true\" aria-expanded=\"false\">Dropdown <span class=\"caret\"></span></a>\n          <ul class=\"dropdown-menu\">\n            <li><a href=\"#\">Action</a></li>\n            <li><a href=\"#\">Another action</a></li>\n            <li><a href=\"#\">Something else here</a></li>\n            <li role=\"separator\" class=\"divider\"></li>\n            <li><a href=\"#\">Separated link</a></li>\n            <li role=\"separator\" class=\"divider\"></li>\n            <li><a href=\"#\">One more separated link</a></li>\n          </ul>\n        </li> -->\n      </ul>\n      <ul class=\"nav navbar-nav navbar-right\">\n        <form class=\"navbar-form navbar-left\">\n          <div class=\"form-group\">\n            <input type=\"text\" class=\"form-control\" placeholder=\"Search\">\n          </div>\n          <button type=\"submit\" class=\"btn btn-default\">Search</button>\n        </form>\n        <li><a href=\"#\">Link</a></li>\n        <li class=\"dropdown\">\n          <a href=\"#\" class=\"dropdown-toggle\" data-toggle=\"dropdown\" role=\"button\" aria-haspopup=\"true\" aria-expanded=\"false\">Dropdown <span class=\"caret\"></span></a>\n          <ul class=\"dropdown-menu\">\n            <li><a href=\"#\">Action</a></li>\n            <li><a href=\"#\">Another action</a></li>\n            <li><a href=\"#\">Something else here</a></li>\n            <li role=\"separator\" class=\"divider\"></li>\n            <li><a href=\"#\">Separated link</a></li>\n          </ul>\n        </li>\n      </ul>\n    </div><!-- /.navbar-collapse -->\n  </div><!-- /.container-fluid -->\n</nav>\n\n<div class=\"content container\">\n        {% block content %}{% endblock %}\n</div>\n\n{% include \"footer.html\" %}\n"
  },
  {
    "path": "web/web/templates/index.html",
    "content": "{% extends \"header.html\" %}\n\n{% block content %}\n<table class=\"table table-hover\">\n  <thead>\n    <tr>\n      <th>#</th>\n      <th>Domain</th>\n      <th>Organization/Company</th>\n      <th>Time</th>\n      <th>Action</th>\n    </tr>\n  </thead>\n  <tbody>\n    {% for pro in project_data %}\n    <tr>\n      <th scope=\"row\">{{ pro.project_id }}</th>\n      <td>{{ pro.project_domain }}</td>\n      <td>{{ pro.project_org }}</td>\n      <td>{{ pro.started_time }}</td>\n      <td><button type=\"button\" class=\"btn btn-info\" onclick=\"location.href = 'projects/{{ pro.project_id }}/view';\">View</button></td>\n    </tr>\n    {% endfor %}\n  </tbody>\n</table>\n{% endblock %}\n</body>\n"
  },
  {
    "path": "web/web/templates/projects.html",
    "content": "{% extends \"header.html\" %}\n\n{% block content %}\n<script>\n$('a[data-toggle=\"tab\"]').on('shown.bs.tab', function (e) {\n  var target = $(e.target).attr(\"href\") // activated tab\n});\n\n</script>\n<ul id=\"myTab\" class=\"nav nav-tabs\">\n  <li class=\"active\"><a href=\"#project\" data-toggle=\"tab\">Projects</a></li>\n  <li class=\"\"><a href=\"#domain\" data-toggle=\"tab\">Domain Result</a></li>\n  <li class=\"\"><a href=\"#subdomain\" data-toggle=\"tab\">Subdomain</a></li>\n  <li class=\"\"><a href=\"#mail_harvest\" data-toggle=\"tab\">Mail Harvest</a></li>\n  <li class=\"\"><a href=\"#document\" data-toggle=\"tab\">Documents</a></li>\n  <li class=\"\"><a href=\"#linkedin\" data-toggle=\"tab\">LinkedIn</a></li>\n</ul>\n<div id=\"myTabContent\" class=\"tab-content\">\n  <div class=\"tab-pane fade active in\" id=\"project\">\n    <table class=\"table table-hover\">\n      <thead>\n        <tr>\n          <th>#</th>\n          <th>Domain</th>\n          <th>Organization/Company</th>\n          <th>Time</th>\n        </tr>\n      </thead>\n      <tbody>\n        {% for pro in project_data %}\n        <tr>\n          <th scope=\"row\">{{ pro.project_id }}</th>\n          <td>{{ pro.project_domain }}</td>\n          <td>{{ pro.project_org }}</td>\n          <td>{{ pro.started_time }}</td>\n        </tr>\n        {% endfor %}\n      </tbody>\n    </table>\n  </div>\n  <div class=\"tab-pane fade\" id=\"domain\">\n    <table class=\"table table-hover\">\n    <tbody>\n      {% for domain in main_domain %}\n      <tr>\n        <td scope=\"row\">Domain:</th>\n        <td>{{ domain.domain }}</td>\n      </tr>\n      <tr>\n        <td>Domain Whois:</td>\n        <td>\n          {{ domain.domain_whois }}\n        </td>\n      </tr>\n      <tr>\n        <td>Emails:</td>\n        <td>{{ domain.email }}</td>\n      </tr>\n      <tr>\n        <td>NS Record:</td>\n        <td>{{ domain.ns_record }}</td>\n      </tr>\n      <tr>\n        <td>MX Record:</td>\n        <td>{{ domain.ns_record }}</td>\n      </tr>\n      {% endfor %}\n    </tbody>\n  </table>\n  </div>\n  <div class=\"tab-pane fade\" id=\"subdomain\">\n    <table class=\"table table-hover\">\n      <thead>\n        <tr>\n          <th>#</th>\n          <th>Subdomain</th>\n          <th>IP Address</th>\n          <th>Banner</th>\n          <th>Wappalyzer</th>\n          <th>Robots</th>\n          <th>Git</th>\n          <th>SVN</th>\n        </tr>\n      </thead>\n      <tbody>\n        {% for subdomain in subdomain_results %}\n        <tr>\n          <th scope=\"row\">{{ subdomain.id }}</th>\n          <td>{{ subdomain.subdomain }}</td>\n          <td>{{ subdomain.ip_address }}</td>\n          <td>{{ subdomain.banner }}</td>\n          <td>{{ subdomain.wappalyzer }}</td>\n          <td>{{ subdomain.robots_txt }}</td>\n          <td>{{ subdomain.is_contain_git }}</td>\n          <td>{{ subdomain.is_contain_svn }}</td>\n        </tr>\n        {% endfor %}\n      </tbody>\n    </table>\n  </div>\n  <div class=\"tab-pane fade\" id=\"mail_harvest\">\n    <table class=\"table table-hover\">\n      <thead>\n        <tr>\n          <th>#</th>\n          <th>Mail Results</th>\n          <th>PGP Mail Results</th>\n        </tr>\n      </thead>\n      <tbody>\n        {% for mail_data in mail_harvest %}\n        <tr>\n          <th scope=\"row\">{{ mail_data.id }}</th>\n          <td>{{ mail_data.mail_results }}</td>\n          <td>{{ mail_data.mail_pgp_results }}</td>\n        </tr>\n        {% endfor %}\n      </tbody>\n    </table>\n  </div>\n  <div class=\"tab-pane fade\" id=\"document\">\n    <table class=\"table table-hover\">\n      <thead>\n        <tr>\n          <th>#</th>\n          <th>URL</th>\n          <th>File Location</th>\n          <th>Action</th>\n        </tr>\n      </thead>\n      <tbody>\n        {% for doc in doc_result %}\n        <tr>\n          <th scope=\"row\">{{ doc.id }}</th>\n          <td>{{ doc.doc_url }}</td>\n          <td>{{ doc.doc_location }}</td>\n          <td><button type=\"button\" class=\"btn btn-info\">View</button></td>\n        </tr>\n        {% endfor %}\n      </tbody>\n    </table>\n  </div>\n  <div class=\"tab-pane fade\" id=\"linkedin\">\n    <h3> Company Info </h3>\n    <hr/>\n    {% for info in company_info %}\n    Company Name: {{ info.company_name }} <br/>\n    LinkedIn URL: {{ info.company_linkedin_url }}\n    {% endfor %}\n    <h3> Company Employees </h3>\n    <table class=\"table table-hover\">\n      <thead>\n        <tr>\n          <th>#</th>\n          <th>Name</th>\n          <th>Job Title</th>\n          <th>LinkedIn URL</th>\n        </tr>\n      </thead>\n      <tbody>\n        {% for employee in company_employee %}\n        <tr>\n          <th scope=\"row\">{{ employee.id}}</th>\n          <td>{{ employee.name }}</td>\n          <td>{{ employee.job_title }}</td>\n          <td>{{ employee.linkedin_url }}</td>\n        </tr>\n        {% endfor %}\n      </tbody>\n    </table>\n  </div>\n</div>\n\n{% endblock %}\n"
  },
  {
    "path": "web/web/urls.py",
    "content": "\"\"\"web URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n    https://docs.djangoproject.com/en/1.11/topics/http/urls/\nExamples:\nFunction views\n    1. Add an import:  from my_app import views\n    2. Add a URL to urlpatterns:  url(r'^$', views.home, name='home')\nClass-based views\n    1. Add an import:  from other_app.views import Home\n    2. Add a URL to urlpatterns:  url(r'^$', Home.as_view(), name='home')\nIncluding another URLconf\n    1. Import the include() function: from django.conf.urls import url, include\n    2. Add a URL to urlpatterns:  url(r'^blog/', include('blog.urls'))\n\"\"\"\nfrom django.conf.urls import url\nfrom django.contrib import admin\n\nfrom views import index_page, view_projects, about_page\n\nurlpatterns = [\n    url(r'^admin/', admin.site.urls),\n    url(r'^$', index_page),\n    url(r'^projects/(?P<id>\\d+)/view/$', view_projects),\n    url(r'^about', about_page),\n]\n"
  },
  {
    "path": "web/web/views.py",
    "content": "from django.http import HttpResponse, HttpResponseRedirect, Http404\nfrom django.shortcuts import render_to_response, get_object_or_404\n\nfrom models import Projects, MainDomainResults, SubdomainResults, MailHarvestResults, DocResults, LinkedinCompanyInfo, LinkedinCompanyEmployees\n\ndef index_page(request):\n    project_list = Projects.objects.all().order_by('project_id')\n    return render_to_response('index.html',\n                          {'project_data': project_list})\n\n\ndef view_projects(request, id):\n    project_list = Projects.objects.filter(project_id=id)\n    main_domain = MainDomainResults.objects.filter(project_id=id)\n    subdomain = SubdomainResults.objects.filter(project_id=id)\n    mail_harvest = MailHarvestResults.objects.filter(project_id=id)\n    doc_result = DocResults.objects.filter(project_id=id)\n    company_info = LinkedinCompanyInfo.objects.filter(project_id=id)\n    company_employee = LinkedinCompanyEmployees.objects.filter(project_id=id)\n\n    return render_to_response('projects.html', {'project_data' : project_list, 'main_domain' : main_domain, 'subdomain_results' : subdomain, 'mail_harvest' : mail_harvest, 'doc_result' : doc_result, 'company_info' : company_info, 'company_employee' : company_employee})\n\ndef about_page(request):\n    return render_to_response('about.html')\n"
  },
  {
    "path": "web/web/wsgi.py",
    "content": "\"\"\"\nWSGI config for web project.\n\nIt exposes the WSGI callable as a module-level variable named ``application``.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/\n\"\"\"\n\nimport os\n\nfrom django.core.wsgi import get_wsgi_application\n\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"web.settings\")\n\napplication = get_wsgi_application()\n"
  }
]