Repository: aancw/Belati Branch: master Commit: 49577a161189 Files: 84 Total size: 284.6 KB Directory structure: gitextract_rrye733h/ ├── .github/ │ └── issue_template.md ├── .gitignore ├── .gitmodules ├── Belati.py ├── CHANGELOG.md ├── CONTRIBUTING.md ├── CONTRIBUTORS.md ├── Dockerfile ├── LICENSE ├── README.md ├── lib/ │ ├── __init__.py │ └── pywhois/ │ ├── .hg_archival.txt │ ├── .hgignore │ ├── MANIFEST.in │ ├── README.rst │ ├── __init__.py │ ├── setup.py │ ├── test/ │ │ ├── samples/ │ │ │ ├── expected/ │ │ │ │ ├── digg.com │ │ │ │ ├── google.com │ │ │ │ ├── imdb.com │ │ │ │ ├── microsoft.com │ │ │ │ ├── reddit.com │ │ │ │ └── urlowl.com │ │ │ └── whois/ │ │ │ ├── digg.com │ │ │ ├── google.com │ │ │ ├── imdb.com │ │ │ ├── microsoft.com │ │ │ ├── reddit.com │ │ │ ├── shazow.net │ │ │ ├── slashdot.org │ │ │ ├── squatter.net │ │ │ └── urlowl.com │ │ ├── test_main.py │ │ ├── test_nicclient.py │ │ ├── test_parser.py │ │ └── test_query.py │ └── whois/ │ ├── __init__.py │ ├── data/ │ │ └── tlds.txt │ ├── parser.py │ ├── time_zones.py │ └── whois.py ├── plugins/ │ ├── __init__.py │ ├── about_project.py │ ├── banner_grab.py │ ├── check_domain.py │ ├── common_service_check.py │ ├── config.py │ ├── database.py │ ├── dep_check.py │ ├── gather_company.py │ ├── git_finder.py │ ├── harvest_email.py │ ├── harvest_public_document.py │ ├── json_beautifier.py │ ├── logger.py │ ├── meta_exif_extractor.py │ ├── robots_scraper.py │ ├── scan_nmap.py │ ├── subdomain_enum.py │ ├── svn_finder.py │ ├── updater.py │ ├── url_request.py │ ├── user_agents.py │ ├── util.py │ └── wappalyzer.py ├── requirements.txt ├── version └── web/ ├── manage.py └── web/ ├── __init__.py ├── migrations/ │ ├── 0001_initial.py │ ├── 0002_auto_20170727_1741.py │ ├── 0003_docresults_doc_author.py │ └── __init__.py ├── models.py ├── settings.py ├── templates/ │ ├── about.html │ ├── base.html │ ├── footer.html │ ├── header.html │ ├── index.html │ └── projects.html ├── urls.py ├── views.py └── wsgi.py ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/issue_template.md ================================================ Please provide the following details. ### Host System - OS : - Python version (`python --version`) : - Pip version (`pip --version`) : - Output of `pip freeze` : [Upload the output to GitHub gists and provide link] ### Error Description Please provide the details of the error. Try to provide the **output** and also **steps to reproduce** if required. ================================================ FILE: .gitignore ================================================ __pycache__/ plugins/__pycache__/ *.pyc lib/*.pyc plugins/*.pyc belatiFiles/* logs/* .directory lib/.directory plugins/.directory proxy.txt belati.conf beta/* #remove comment after 0.2.3-dev web/db.sqlite3 ================================================ FILE: .gitmodules ================================================ [submodule "CheckMyUsername"] path = lib/CheckMyUsername url = https://github.com/aancw/CheckMyUsername ================================================ FILE: Belati.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # We need to check Dependency first from plugins.dep_check import DepCheck dep_check = DepCheck() dep_check.check_dependency() import argparse import datetime import urllib2 import sys, signal, socket, re import time import dns.resolver import tldextract import shlex, subprocess from plugins.about_project import AboutProject from plugins.banner_grab import BannerGrab from plugins.check_domain import CheckDomain from plugins.config import Config from plugins.common_service_check import CommonServiceCheck from plugins.database import Database from plugins.gather_company import GatherCompany from plugins.git_finder import GitFinder from plugins.harvest_email import HarvestEmail from plugins.harvest_public_document import HarvestPublicDocument from plugins.json_beautifier import JsonBeautifier from plugins.logger import Logger from plugins.meta_exif_extractor import MetaExifExtractor from plugins.robots_scraper import RobotsScraper from plugins.scan_nmap import ScanNmap from plugins.subdomain_enum import SubdomainEnum from plugins.svn_finder import SVNFinder from plugins.updater import Updater from plugins.url_request import URLRequest from plugins.util import Util from plugins.wappalyzer import Wappalyzer from lib.CheckMyUsername.check_my_username import CheckMyUsername from dnsknife.scanner import Scanner from urlparse import urlparse from cmd2 import Cmd from tabulate import tabulate from texttable import Texttable # Console color G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white UNDERLINE = '\033[4m' ENDC = '\033[0m' log = Logger() util = Util() class Belati(Cmd): def __init__(self): self.about = AboutProject() self.url_req = URLRequest() Cmd.doc_header = "Core Commands" Cmd.prompt = "{}belati{} > ".format(UNDERLINE, ENDC) Cmd.path_complete Cmd.__init__(self) self.list_parameter = ['domain', 'username', 'email', 'orgcomp', 'proxy', 'proxy_file'] self.parameters = {} self.multiple_proxy_list = [] self.current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') self.show_banner() self.conf = Config() self.db = Database() def show_banner(self): banner = """ {} /$$$$$$$ /$$$$$$$$ /$$ /$$$$$$ /$$$$$$$$ . | $$__ $$| $$_____/| $$ /$$__ $$|__ $$__/ J:L | $$ \ $$| $$ | $$ | $$ \ $$ | $$ |:| | $$$$$$$ | $$$$$ | $$ | $$$$$$$$ | $$ |:| | $$__ $$| $$__/ | $$ | $$__ $$ | $$ |:| | $$ \ $$| $$ | $$ | $$ | $$ | $$ |:| | $$$$$$$/| $$$$$$$$| $$$$$$$$| $$ | $$ | $$ /] |:| [\ |_______/ |________/|________/|__/ |__/ |__/ \:-'\"""'-:/ ""III"" III III III (___) The Traditional Swiss Army Knife for OSINT =[ {} {} by {}]= + -- --=[ {} ]=-- -- + + -- --=[ {} ]=-- -- + {} """ warning_message = """ {} This tool is for educational purposes only. Any damage you make will not affect the author. Do It With Your Own Risk! For Better Privacy, Please Use proxychains or other proxy service! {} """ log.console_log(banner.format(G, self.about.__name__, self.about.__version__, self.about.__author__, self.about.__info__, self.about.__authorurl__, W)) log.console_log(warning_message.format(R, W)) def do_help(self, line): 'print help message' print("\nCore commands") print("==============\n") print tabulate([["Name","Description"], ["?", "Help menu"], ["!", "Run OS Command"], ["history", "Show command history"], ["set", "Set parameters option value"], ["show", "Display list available parameter option"], ["start", "Start Automatic Scanning Belati"], ["startws", "Start Web Server Only Mode"], ["version", "Show application version number"], ["quit", "Exit the application"]], headers="firstrow") def do_set(self, arg, opts=None): '''Set Variable for Belati parameters.\nUsage: set [option] [value]\n\nAvailable options:\ndomain, username, email, orgcomp, proxy, proxy_file''' if not arg: log.console_log('{} Set Variable for Belati parameters.\nUsage: set [option] [value]\n\nAvailable options:\ndomain, username, email, orgcomp, proxy, proxy_file {}'.format(W, W)) else: param = shlex.split(arg) key = param[0] value = param[1] if key in self.list_parameter: self.parameters[key] = value log.console_log('{} => {}'.format(self.parameters[key], value)) else: log.console_log("Available parameters: domain, username, email, orgcomp, proxy, proxy_file") def do_show(self, arg, opts=None): 'Show available parameter options' domain_val = self.parameters['domain'] if 'domain' in self.parameters else None orgcomp = self.parameters['orgcomp'] if 'orgcomp' in self.parameters else None email = self.parameters['email'] if 'email' in self.parameters else None username = self.parameters['username'] if 'username' in self.parameters else None proxy = self.parameters['proxy'] if 'proxy' in self.parameters else None proxy_file = self.parameters['proxy_file'] if 'proxy_file' in self.parameters else None org_val = "" arg = shlex.split(arg) if not arg: print("Please use command 'show options' to see list of option parameters") elif arg[0] == "options": print tabulate([["Name","Value", "Required", "Description"], ["domain", domain_val, "Yes", "Domain name for OSINT"], ["orgcomp", orgcomp, "Yes", "Organization/Company name for OSINT"], ["email", email, "Optional", "Email address for OSINT"], ["username", username, "Optional", "Username for OSINT"], ["proxy", proxy, "Optional", "Proxy server(e.g http://127.0.0.1:8080)"], ["proxy_file", proxy_file, "Optional", "Proxy file list location"]], headers="firstrow") def do_startws(self, line): 'Start Belati in Web Server Only Mode' log.console_log("{}[*] Entering Web Server Only Mode...{}".format(Y,W)) self.start_web_server() sys.exit() def do_version(self, line): 'Check current Belati version' log.console_log('{} {} by {}\n'.format(self.about.__name__, self.about.__version__, self.about.__author__)) log.console_log('Project URL: {}'.format(self.about.__giturl__)) def do_start(self, line): 'Start automatic scanning' domain = self.parameters['domain'] if 'domain' in self.parameters else None orgcomp = self.parameters['orgcomp'] if 'orgcomp' in self.parameters else None email = self.parameters['email'] if 'email' in self.parameters else None username = self.parameters['username'] if 'username' in self.parameters else None proxy = self.parameters['proxy'] if 'proxy' in self.parameters else '' proxy_file = self.parameters['proxy_file'] if 'proxy_file' in self.parameters else '' if domain is None and orgcomp is None: log.console_log("{}[-] Please specify domain/organization {}".format(R,W)) sys.exit() log.console_log("{}[*] Starting at: {} {}".format(Y, self.current_time , W)) self.updater = Updater() self.updater.check_update(self.about.__version__) # Setup project self.project_id = self.db.create_new_project(domain, orgcomp, self.current_time) log.console_log("{}[+] Creating New Belati Project... {}".format(G, W)) log.console_log("---------------------------------------------------------") log.console_log("Project ID: {}".format(str(self.project_id))) log.console_log("Project Domain: {}".format(domain)) log.console_log("Project Organization/Company: {}".format(orgcomp)) log.console_log("---------------------------------------------------------") if domain is not None: if proxy is not '': log.console_log("{}[*] Checking Proxy Status... {}".format(G, W)) if self.check_single_proxy_status(proxy, "http://" + str(domain)) == 'ok': pass else: log.console_log('{}[-] Please use another proxy or disable proxy! {}'.format(R, W)) sys.exit() if proxy_file is not '': log.console_log("{}[*] Checking Proxy Status from file {}{}".format(G, proxy_file, W)) self.check_multiple_proxy_status(proxy_file, "http://" + str(domain)) proxy = self.multiple_proxy_list extract_domain = tldextract.extract(domain) self.check_domain(self.url_req.ssl_checker(domain), proxy) self.banner_grab(self.url_req.ssl_checker(domain), proxy) if extract_domain.subdomain == "": self.robots_scraper(self.url_req.ssl_checker(domain), proxy) self.enumerate_subdomains(domain, proxy) self.scan_DNS_zone(domain) self.harvest_email_search(domain, proxy) self.harvest_email_pgp(domain, proxy) else: domain = extract_domain.domain + '.' + extract_domain.suffix self.robots_scraper(self.url_req.ssl_checker(domain), proxy) self.enumerate_subdomains(domain, proxy) self.scan_DNS_zone(domain) self.harvest_email_search(domain, proxy) self.harvest_email_pgp(domain, proxy) self.harvest_document(domain, proxy) if username is not None: self.username_checker(username) if orgcomp is not None: self.gather_company(orgcomp, proxy) if email is not None: log.console_log("This feature will be coming soon. Be patient :)") log.console_log("{}All done sir! All logs saved in {}logs{} directory and dowloaded file saved in {}belatiFiles{} {}".format(Y, B, Y, B, Y, W)) self.start_web_server() def check_domain(self, domain_name, proxy_address): check = CheckDomain() log.console_log(G + "{}[*] Checking Domain Availability... {}".format(G, W) , 0) log.console_log(check.domain_checker(domain_name, proxy_address)) log.console_log("{}[*] Checking URL Alive... {}".format(G, W), 0) log.console_log(check.alive_check(domain_name, proxy_address)) log.console_log("{}[*] Perfoming Whois... {}".format(G, W)) whois_result = check.whois_domain(domain_name) log.console_log(whois_result) email = re.findall(r'[a-zA-Z0-9._+-]+@[a-zA-Z0-9._+-]+\s*', str(whois_result)) # JSON Beautifier json_bf = JsonBeautifier() json_whois = json_bf.beautifier(str(whois_result)) self.db.insert_domain_result(self.project_id, util.strip_scheme(domain_name), str(json_whois), util.clean_list_string(email)) def banner_grab(self, domain_name, proxy_address): banner = BannerGrab() log.console_log("{}[*] Perfoming HTTP Banner Grabbing... {}".format(G, W)) banner_info = banner.show_banner(domain_name, proxy_address) log.console_log(banner_info) self.db.insert_banner(domain_name, self.project_id, str(banner_info)) def enumerate_subdomains(self, domain_name, proxy): log.console_log("{}[*] Perfoming Subdomains Enumeration...{}".format(G, W)) sub_enum = SubdomainEnum() log.console_log("{}[+] Grabbing data from dnsdumpster...{}\n".format(B, W)) dnsdumpster = sub_enum.scan_dnsdumpster(domain_name) subdomain_list = [] data_table = [["Domain", "IP", "Provider", "Country"]] for entry in dnsdumpster['dns_records']['host']: data_table.extend([[entry['domain'], entry['ip'], entry['provider'], entry['country']]]) subdomain_list.append(entry['domain']) log.console_log( tabulate(data_table, headers='firstrow') ) log.console_log("{}[+] Grabbing data from crt.sh...{}\n".format(B, W)) crt_list = sub_enum.scan_crtsh(domain_name, proxy) if crt_list is not None: log.console_log("\n".join(crt_list)) subdomain_list = list(set(subdomain_list + crt_list)) log.console_log("{}[+] Grabbing data from findsubdomains.com...{}\n".format(B, W)) findsubdomains_list = sub_enum.scan_findsubdomainsCom(domain_name,proxy) if findsubdomains_list is not None: log.console_log("\n".join(findsubdomains_list)) subdomain_list = list(set(subdomain_list + findsubdomains_list)) subdomain_ip_list = [] for subdomain in subdomain_list: self.banner_grab(self.url_req.ssl_checker(subdomain), proxy) self.robots_scraper(self.url_req.ssl_checker(subdomain), proxy) self.wappalyzing_webpage(subdomain) self.public_git_finder(subdomain, proxy) self.public_svn_finder(subdomain, proxy) try: subdomain_ip_list.append(socket.gethostbyname(subdomain)) self.db.update_subdomain_ip(self.project_id, subdomain, str(socket.gethostbyname(subdomain))) except socket.gaierror: pass subdomain_ip_listFix = list(set(subdomain_ip_list)) # check common service port TODO #for ipaddress in subdomain_ip_listFix: #self.common_service_check(ipaddress) for ipaddress in subdomain_ip_listFix: self.service_scanning(ipaddress) def wappalyzing_webpage(self, domain): log.console_log("{}[*] Wapplyzing on domain {}{}".format(G, domain, W)) wappalyzing = Wappalyzer() targeturl = self.url_req.ssl_checker(domain) try: data = wappalyzing.run_wappalyze(targeturl) self.db.insert_wappalyzing(self.project_id, domain, data) except urllib2.URLError as exc: log.console_log('URL Error: {0}'.format(str(exc))) except urllib2.HTTPError as exc: log.console_log('HTTP Error: {0}'.format(str(exc))) except Exception as exc: log.console_log('Unknown error: {0}'.format(str(exc))) def service_scanning(self, ipaddress): scan_nm = ScanNmap() log.console_log("{}[*] Perfoming Nmap Full Scan on IP {}{}".format(G, ipaddress, W)) log.console_log("{}[*] nmap -sS -A -Pn {}{}".format(G, ipaddress, W)) scan_nm.run_scanning(ipaddress) def scan_DNS_zone(self, domain_name): log.console_log("{}[*] Perfoming DNS Zone Scanning... {}".format(G, W)) log.console_log("{}[*] Please wait, maximum timeout for checking is 1 minutes {}".format(G, W)) signal.signal(signal.SIGALRM, self.timeLimitHandler) signal.alarm(60) try: scan_list = str(list(Scanner(domain_name).scan())) ns_record_list = [] mx_record_list = [] log.console_log("{}{}{}".format(G, scan_list.replace(",","\n"), W)) log.console_log("{}DNS Server:{}".format(G, W)) for ns in dns.resolver.query(domain_name, 'NS'): log.console_log(G + ns.to_text() + W) ns_record_list.append(ns.to_text()) log.console_log("{}MX Record:{}".format(G, W)) for ns in dns.resolver.query(domain_name, 'MX'): log.console_log("{}{}{}".format(G, ns.to_text(), W)) mx_record_list.append(ns.to_text()) self.db.update_dns_zone(self.project_id, domain_name, util.clean_list_string(ns_record_list), util.clean_list_string(mx_record_list)) except Exception, exc: print("{}[*] No response from server... SKIP!{}".format(R, W)) def harvest_email_search(self, domain_name, proxy_address): log.console_log("{}[*] Perfoming Email Harvest from Google Search...{}".format(G, W) ) harvest = HarvestEmail() harvest_result = harvest.crawl_search(domain_name, proxy_address) try: log.console_log("{}[*] Found {} emails on domain {}{}".format(Y, str(len(harvest_result)), domain_name, W)) log.console_log("{}{}{}".format(R, '\n'.join(harvest_result), W)) self.db.insert_email_result(self.project_id, util.clean_list_string(harvest_result)) except Exception, exc: log.console_log("{}[-] Not found or Unavailable. {}{}".format(R, str(harvest_result), W )) def harvest_email_pgp(self, domain_name, proxy_address): log.console_log("{}[*] Perfoming Email Harvest from PGP Server...{}".format(G, W) ) harvest = HarvestEmail() harvest_result = harvest.crawl_pgp_mit_edu(domain_name, proxy_address) try: log.console_log("{}[*] Found {} emails on domain {}{}".format(Y, str(len(harvest_result)), domain_name, W)) log.console_log("{}{}{}".format(R, '\n'.join(harvest_result), W)) self.db.update_pgp_email(self.project_id, util.clean_list_string(harvest_result)) except Exception, exc: log.console_log("{}[-] Not found or Unavailable. {}{}".format(R, str(harvest_result), W )) def harvest_document(self, domain_name, proxy_address): log.console_log("{}[*] Perfoming Public Document Harvest from Google... {}".format(G, W)) public_doc = HarvestPublicDocument() public_doc.init_crawl(domain_name, proxy_address, self.project_id) def username_checker(self, username): log.console_log("{}[*] Perfoming Username Availability Checker... {}".format(G, W)) user_check = CheckMyUsername() username_status_result = user_check.check_username_availability(username) for result in username_status_result: log.console_log(G + "[+] " + result[0] + " => " + result[1] + ": " + result[2]) def check_single_proxy_status(self, proxy_address, domain_check): try: parse = urlparse(proxy_address) proxy_scheme = parse.scheme proxy = str(parse.hostname) + ':' + str(parse.port) proxy_handler = urllib2.ProxyHandler({ proxy_scheme: proxy}) opener = urllib2.build_opener(proxy_handler) opener.addheaders = [('User-agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36')] urllib2.install_opener(opener) req = urllib2.Request(domain_check) start_time = time.time() sock = urllib2.urlopen(req) end_time = time.time() diff_time = round(end_time - start_time, 3) log.console_log(Y + "{}[+] {} OK! Response Time : {}s".format(Y, proxy_address, str(diff_time), W )) return 'ok' except urllib2.HTTPError, e: print('Error code: ' + str(e.code)) return e.code except Exception, detail: print('ERROR ' + str(detail)) return 1 def check_multiple_proxy_status(self, file_location, domain_check): with open(file_location) as data: text = [line.rstrip('\n') for line in data] for proxy in text: if self.check_single_proxy_status(str(proxy), str(domain_check)) == 'ok': self.multiple_proxy_list.append(proxy) def public_git_finder(self, domain, proxy_address): log.console_log("{}[*] Checking Public GIT Directory on domain {}{}".format(G, domain, W)) git_finder = GitFinder() if git_finder.check_git(domain, proxy_address) == True: log.console_log("{}[+] Gotcha! You are in luck, boy![{}/.git/]{}".format(Y, domain, W)) self.db.update_git_finder(self.project_id, domain, "Yes") def public_svn_finder(self, domain, proxy_address): log.console_log("{}[*] Checking Public SVN Directory on domain {}{}".format(G, domain, W)) svn_finder = SVNFinder() if svn_finder.check_svn(domain, proxy_address) == 403: log.console_log("{}[+] Um... Forbidden :( {}".format(Y, W)) if svn_finder.check_svn(domain, proxy_address) == 200: log.console_log("{}[+] Gotcha! You are in luck, boy![{}/.svn/]{}".format(Y, domain, W)) self.db.update_svn_finder(self.project_id, domain, "Yes") def robots_scraper(self, domain, proxy_address): scraper = RobotsScraper() data = scraper.check_robots(domain, proxy_address) if data is not None and isinstance(data, int) == False and data.code == 200: log.console_log("{}[+] Found interesting robots.txt[ {} ] =>{}".format(Y, domain, W)) log.console_log(data.read()) self.db.insert_robots_txt(self.project_id, domain, str(data.read())) def gather_company(self, company_name, proxy_address): log.console_log("{}[+] Gathering Company Employee {} -> {}".format(G, W, company_name)) gather_company = GatherCompany() gather_company.crawl_company_employee(company_name, proxy_address, self.project_id) def start_web_server(self): log.console_log("{}Starting Django Web Server at http://127.0.0.1:8000/{}".format(Y, W)) py_bin = self.conf.get_config("Environment", "py_bin") command = "{} web/manage.py runserver 0.0.0.0:8000".format(py_bin) process = subprocess.Popen(shlex.split(command), stdout=subprocess.PIPE) while True: output = process.stdout.readline() if output == '' and process.poll() is not None: break if output: log.console_log(output.strip()) rc = process.poll() return rc def complete_set(self, text, line, start_index, end_index): if text: return [ param for param in self.list_parameter if param.startswith(text) ] else: return self.list_parameter def common_service_check(self, host): log.console_log("{}[*] Checking Common Service Check on host {}{}".format(G, host, W)) service_check = CommonServiceCheck() service_check.check_available_service(host) def timeLimitHandler(self, signum, frame): print("No Response...") if __name__ == '__main__': BelatiApp = Belati() BelatiApp.cmdloop() ================================================ FILE: CHANGELOG.md ================================================ Changelog: v0.2.0-dev: - Add Gather Public Company Employee - Add SVN Finder - Update URL Request - Rework Code - Fix small bug - Update Harvest Public Document Regex A-dd version for updater v0.2.1-dev: - Add Belati Configuration Wizard - [Core] - Add Database Support( On Progress ) - Rework Code - Update Database System - Update Creating Project Info - Update Sublist3r - Update Gather Company Plugin - Update README v0.2.2-dev: - Add Django Web Management - Update Auto Configuration - Add auto start Django - Update output result - Update Gather Company Info v0.2.3-dev - Add Metadata/exif info for document - Update Database Schema - Auto Update system - Update URL File validation checker - Clean Output Result - Update Banner - Check git control status for update checker - Add Web Server Only Mode - Rework Code v0.2.4 - Migrating argument parse to interactive command line shell - Implementing command line shell mode - Remove git branch version from Dockerfile - Update Dependencies Checker, more accurate with module version comparison - Migrating sublist3r to manual checking dnsdumpster, crtsh and other service will coming soon - Fixing bug for stability and improvement ================================================ FILE: CONTRIBUTING.md ================================================ # Contributing to Belati Belati welcomes contribution from everyone. # How to contribute I ❤️ pull requests. If you'd like to fix a bug, contribute a feature or just correct a typo, please feel free to do so. Belati have so many [TODO](https://github.com/aancw/Belati/issues/12) :) ## Getting Started To start working on the Belati, first fork the repo, then clone it: ``` git clone @github.com:your-username/Belati.git ``` *Note: replace "your-username" with your GitHub handle* ### On Progress.... ================================================ FILE: CONTRIBUTORS.md ================================================ # Belati Contributors * **[Chandrapal](https://github.com/Chan9390)** * Changed webserver's ip from loopback * Making docker version of Belati ================================================ FILE: Dockerfile ================================================ FROM debian:stretch-slim LABEL MAINTAINER "Chandrapal " RUN cd /home \ && apt-get update \ && apt-get install -y git python python-pip nmap exiftool \ && git clone https://github.com/aancw/Belati.git \ && cd Belati \ && git submodule update --init --recursive --remote \ && pip install --upgrade --force-reinstall -r requirements.txt \ && echo 'alias belati="python /home/Belati/Belati.py"' >> ~/.bashrc WORKDIR /home/Belati EXPOSE 8000 ================================================ FILE: LICENSE ================================================ GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. {description} Copyright (C) {year} {fullname} This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. {signature of Ty Coon}, 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. ================================================ FILE: README.md ================================================ # Belati ![Belati](/images/Belati-logo.png?raw=true "Belati Logo") [![Awesome OSINT](https://img.shields.io/badge/awesome-osint-brightgreen.svg)](https://github.com/jivoi/awesome-osint) [![OSINT Framework](https://img.shields.io/badge/osint-framework-brightgreen.svg)](http://osintframework.com) [![n0where](https://img.shields.io/badge/n0where-top%20100-lightgrey.svg)](https://n0where.net/best-cybersecurity-tools/) [![ToolsWatch](https://img.shields.io/badge/Tools-Watch-brightgreen.svg)](http://www.toolswatch.org/2017/07/belati-v-0-2-2-dev-swiss-army-knife-for-osint/) [![BlackArch Scanner](https://img.shields.io/badge/BlackArch-Scanner-red.svg)](https://blackarch.org/scanner.html) [![Echo Ezine 31](https://img.shields.io/badge/Echo-Ezine%2031-yellow.svg)](http://ezine.echo.or.id/issue31/005.txt) ### Belati - The Traditional Swiss Army Knife For OSINT Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. This tools is inspired by Foca and Datasploit for OSINT :) ## Current Version v0.2.4 ## Belati In Action [![Belati In Action 0.24-stable Preview](https://img.youtube.com/vi/yRSln6BSo-c/0.jpg)](https://www.youtube.com/watch?v=yRSln6BSo-c) ## Why I Made this? Just for learning stuff and OSINT purpose. Correct me if i'm wrong ## What Belati can do? - Interactive command line shell - Whois(Indonesian TLD Support) - Banner Grabbing - Subdomain Enumeration - Service Scanning for all Subdomain Machine - Web Appalyzer Support - DNS mapping / Zone Scanning - Mail Harvester from Website & Search Engine - Mail Harvester from MIT PGP Public Key Server - Scrapping Public Document for Domain from Search Engine - Fake and Random User Agent ( Prevent from blocking ) - Proxy Support for Harvesting Emails and Documents - Public Git Finder in domain/subdomain - Public SVN Finder in domain/subdomain - Robot.txt Scraper in domain/subdomain - Gather Public Company Info & Employee - SQLite3 Database Support for storing Belati Results - Setup Wizard/Configuration for Belati - Django Web Management - Webserver only mode - Auto Dependency Checker - Auto Update system - Document Metadata/Exif Extractor - Document Author Metadata - Graph Visualization( On Progress ) ## TODO Please see Belati TODO list here -> https://github.com/aancw/Belati/issues/12 ## Library - python-whois - Sublist3r - Subbrute ## Requirements - nmap - git - sqlite3 - exiftool ## Install/Usage ``` git clone https://github.com/aancw/Belati.git cd Belati git submodule update --init --recursive --remote pip install --upgrade pip pip install -r requirements.txt #please use pip with python v2 sudo su python Belati.py --help ``` ## Docker Installation - Download Dockerfile: ```bash wget https://raw.githubusercontent.com/aancw/Belati/master/Dockerfile ``` - Execute the following command to create a Docker image locally: ```bash docker build -t belati . #dot ``` - To create a container from the image, execute: ```bash docker run -p 8000:8000 -it belati /bin/bash ``` - Running Belati ```bash belati -h ``` For more info, please refer to this guide: https://github.com/espi0n/Dockerfiles/blob/master/Belati/README.md ## Tested On - Ubuntu 16.04 x86_64 - Arch Linux x86_64 - CentOS 7 - Debian Jessie - MacOS ## Python Requirements This tool not compatible with Python 3. I need to migrate this later. So use python v2.7 instead! ## Why Need Root Privilege? I've try to avoid using Root Privilege, but nmap need Root Privilege. You can add sudo or other way to run nmap without root privilege. It's your choice ;) Reference -> https://secwiki.org/w/Running_nmap_as_an_unprivileged_user Don't worry. Belati still running well when you are run with normal user ;) ## Dependencies - urllib2 - dnspython - requests - argparse - texttable - python-geoip-geolite2 - python-geoip - dnsknife - termcolor - colorama - validators - tqdm - tldextract - fake-useragent - python-wappalyzer - future - beautifulsoup4 - python-whois - futures - django - pyexifinfo - cmd2 - tabulate ## Missing Dependencies? If you are seeing this ``` $ python Belati.py You are missing a module required for Belati. In order to continue using Belati, please install them with: `pip install --upgrade --force-reinstall -r requirements.txt` or manually install missing modules with: `pip install --upgrade --force-reinstall dnspython requests termcolor colorama future beautifulsoup4 futures` ``` and this ``` You are using pip version 8.1.2, however version 9.0.1 is available. You should consider upgrading via the 'pip install --upgrade pip' command. ``` Please upgrade pip version and follow the instructions: ``` pip install --upgrade pip ``` ## System Dependencies For CentOS/Fedora user, please install this: ``` yum install gcc gmp gmp-devel python-devel ``` For Debian/Ubuntu user, please install this: ``` sudo apt-get install nmap git sqlite3 exiftool ``` ## Notice This tool is for educational purposes only. Any damage you make will not affect the author. Do It With Your Own Risk! ## Feedback/Suggestion Feel free to create Issue in this repository or email me at `cacaddv [at] gmail.com` . Your feedback and suggestion is useful for Belati development progress :) ## Contribution Belati welcomes contribution from everyone. Please see [CONTRIBUTING.md](https://github.com/aancw/Belati/blob/master/CONTRIBUTING.md) ## Contributors Please see [CONTRIBUTORS.md](https://github.com/aancw/Belati/blob/master/CONTRIBUTORS.md) and please add your name for credit in that file :) ## Thanks To Thanks to PyWhois Library, Sublist3r, MailHarvester, Emingoo for being part of my code. Also thanks to Hispagatos, Infosec-ninjas, eCHo, RNDC( Research and development center ) and all other people who are inspiring this project :) ## Publications Echo Ezine 31 : http://ezine.echo.or.id/issue31/005.txt - Belati : Collecting Public Data & Public Document for OSINT Purpose - Petruknisme IDSECCONF 2017 : https://www.slideshare.net/idsecconf/belati-the-traditional-swiss-army-knife-for-osint - Belati: The Traditional Swiss Army Knife for OSINT ## License **Author:** Aan Wahyu( https://petruknisme.com ) Belati is licensed under GPL V2. You can use, modify, or redistribute this tool under the terms of GNU General Public License (GPLv2). Please see [LICENSE](https://github.com/aancw/Belati/blob/master/LICENSE) for the full license text. ================================================ FILE: lib/__init__.py ================================================ ================================================ FILE: lib/pywhois/.hg_archival.txt ================================================ repo: ea0e45971cea31656dfa687dd701a201929ad830 node: ccad96890edda4b701762d22129f4436f111566d branch: default latesttag: null latesttagdistance: 95 changessincelatesttag: 111 ================================================ FILE: lib/pywhois/.hgignore ================================================ ^.eggs$ ^python_whois.egg-info$ \.pyc$ \.swp$ ================================================ FILE: lib/pywhois/MANIFEST.in ================================================ include whois/data/tlds.txt include README.rst ================================================ FILE: lib/pywhois/README.rst ================================================ Goal ==== - Create a simple importable Python module which will produce parsed WHOIS data for a given domain. - Able to extract data for all the popular TLDs (com, org, net, ...) - Query a WHOIS server directly instead of going through an intermediate web service like many others do. - Works with Python 2 & 3 Example ======= .. sourcecode:: python >>> import whois >>> w = whois.whois('webscraping.com') >>> w.expiration_date # dates converted to datetime object datetime.datetime(2013, 6, 26, 0, 0) >>> w.text # the content downloaded from whois server u'\nWhois Server Version 2.0\n\nDomain names in the .com and .net ...' >>> print w # print values of all found attributes creation_date: 2004-06-26 00:00:00 domain_name: [u'WEBSCRAPING.COM', u'WEBSCRAPING.COM'] emails: [u'WEBSCRAPING.COM@domainsbyproxy.com', u'WEBSCRAPING.COM@domainsbyproxy.com'] expiration_date: 2013-06-26 00:00:00 ... Install ======= Install from pypi: .. sourcecode:: bash pip install python-whois Or checkout latest version from repository: .. sourcecode:: bash hg clone https://bitbucket.org/richardpenman/pywhois Note that then you will need to manually install the futures module, which allows supporting both Python 2 & 3: .. sourcecode:: bash pip install futures Changelog ========= 0.6 - 2016-03-02: * support added for python 3 * updated TLD list 0.5 - 2015-09-05: * added native client, which now handles whois requests by default * added pretty formatting to string representation * return None instead of raising KeyError when an attribute does not exist * new TLD's: .mobi, .io, .kg, .su, .biz 0.4 - 2015-08-13: * new TLD's: .de, .nl, .ca, .be * migrated to bitbucket * added socket timeout 0.3 - 2015-03-31: * improved datetime parsing with python-dateutil when available * base WhoisEntry class inherits from dict * fixed TLD's: .org, .info Contact ======= You can post ideas or patches here: https://bitbucket.org/richardpenman/pywhois/issues Thanks to the many who have sent patches for additional domains! ================================================ FILE: lib/pywhois/__init__.py ================================================ ================================================ FILE: lib/pywhois/setup.py ================================================ import sys, os import setuptools version = '0.6.3' setuptools.setup( name='python-whois', version=version, description="Whois querying and parsing of domain registration information.", long_description='', install_requires=[ 'future', ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP' ], keywords='whois, python', author='Richard Penman', author_email='richard@webscraping.com', url='https://bitbucket.org/richardpenman/pywhois', license='MIT', packages=['whois'], package_dir={'whois':'whois'}, extras_require={ 'better date conversion': ["python-dateutil"] }, test_suite='nose.collector', tests_require=['nose', 'simplejson'], include_package_data=True, zip_safe=False ) ================================================ FILE: lib/pywhois/test/samples/expected/digg.com ================================================ {"domain_name": "DIGG.COM", "expiration_date": "2010-02-20 00:00:00", "updated_date": "2007-03-13 00:00:00", "status": ["clientDeleteProhibited", "clientRenewProhibited", "clientTransferProhibited", "clientUpdateProhibited"], "creation_date": "2000-02-20 00:00:00"} ================================================ FILE: lib/pywhois/test/samples/expected/google.com ================================================ {"domain_name": ["GOOGLE.COM", "google.com"], "expiration_date": "2011-09-14 00:00:00", "updated_date": "2006-04-10 00:00:00", "status": ["clientDeleteProhibited", "clientTransferProhibited", "clientUpdateProhibited"], "creation_date": "1997-09-15 00:00:00"} ================================================ FILE: lib/pywhois/test/samples/expected/imdb.com ================================================ {"domain_name": "IMDB.COM", "expiration_date": "2016-01-04 00:00:00", "updated_date": "2008-03-28 00:00:00", "status": "clientTransferProhibited", "creation_date": "1996-01-05 00:00:00"} ================================================ FILE: lib/pywhois/test/samples/expected/microsoft.com ================================================ {"domain_name": "MICROSOFT.COM", "expiration_date": "2014-05-03 00:00:00", "updated_date": "2006-10-10 00:00:00", "status": ["clientDeleteProhibited", "clientTransferProhibited", "clientUpdateProhibited"], "creation_date": "1991-05-02 00:00:00"} ================================================ FILE: lib/pywhois/test/samples/expected/reddit.com ================================================ {"domain_name": "REDDIT.COM", "expiration_date": "2009-04-29 00:00:00", "updated_date": "2008-06-04 00:00:00", "status": ["clientDeleteProhibited", "clientTransferProhibited", "clientUpdateProhibited"], "creation_date": "2005-04-29 00:00:00"} ================================================ FILE: lib/pywhois/test/samples/expected/urlowl.com ================================================ {"domain_name": ["URLOWL.COM", "urlowl.com"], "expiration_date": "2009-04-14 00:00:00", "updated_date": "2008-04-14 00:00:00", "status": "ok", "creation_date": "2008-04-14 00:00:00"} ================================================ FILE: lib/pywhois/test/samples/whois/digg.com ================================================ Whois Server Version 2.0 Domain names in the .com and .net domains can now be registered with many different competing registrars. Go to http://www.internic.net for detailed information. Domain Name: DIGG.COM Registrar: GODADDY.COM, INC. Whois Server: whois.godaddy.com Referral URL: http://registrar.godaddy.com Name Server: UDNS1.ULTRADNS.NET Name Server: UDNS2.ULTRADNS.NET Status: clientDeleteProhibited Status: clientRenewProhibited Status: clientTransferProhibited Status: clientUpdateProhibited Updated Date: 13-mar-2007 Creation Date: 20-feb-2000 Expiration Date: 20-feb-2010 >>> Last update of whois database: Thu, 26 Jun 2008 21:39:08 EDT <<< NOTICE: The expiration date displayed in this record is the date the registrar's sponsorship of the domain name registration in the registry is currently set to expire. This date does not necessarily reflect the expiration date of the domain name registrant's agreement with the sponsoring registrar. Users may consult the sponsoring registrar's Whois database to view the registrar's reported date of expiration for this registration. TERMS OF USE: You are not authorized to access or query our Whois database through the use of electronic processes that are high-volume and automated except as reasonably necessary to register domain names or modify existing registrations; the Data in VeriSign Global Registry Services' ("VeriSign") Whois database is provided by VeriSign for information purposes only, and to assist persons in obtaining information about or related to a domain name registration record. VeriSign does not guarantee its accuracy. By submitting a Whois query, you agree to abide by the following terms of use: You agree that you may use this Data only for lawful purposes and that under no circumstances will you use this Data to: (1) allow, enable, or otherwise support the transmission of mass unsolicited, commercial advertising or solicitations via e-mail, telephone, or facsimile; or (2) enable high volume, automated, electronic processes that apply to VeriSign (or its computer systems). The compilation, repackaging, dissemination or other use of this Data is expressly prohibited without the prior written consent of VeriSign. You agree not to use electronic processes that are automated and high-volume to access or query the Whois database except as reasonably necessary to register domain names or modify existing registrations. VeriSign reserves the right to restrict your access to the Whois database in its sole discretion to ensure operational stability. VeriSign may restrict or terminate your access to the Whois database for failure to abide by these terms of use. VeriSign reserves the right to modify these terms at any time. The Registry database contains ONLY .COM, .NET, .EDU domains and Registrars.The data contained in GoDaddy.com, Inc.'s WhoIs database, while believed by the company to be reliable, is provided "as is" with no guarantee or warranties regarding its accuracy. This information is provided for the sole purpose of assisting you in obtaining information about domain name registration records. Any use of this data for any other purpose is expressly forbidden without the prior written permission of GoDaddy.com, Inc. By submitting an inquiry, you agree to these terms of usage and limitations of warranty. In particular, you agree not to use this data to allow, enable, or otherwise make possible, dissemination or collection of this data, in part or in its entirety, for any purpose, such as the transmission of unsolicited advertising and and solicitations of any kind, including spam. You further agree not to use this data to enable high volume, automated or robotic electronic processes designed to collect or compile this data for any purpose, including mining this data for your own personal or commercial purposes. Please note: the registrant of the domain name is specified in the "registrant" field. In most cases, GoDaddy.com, Inc. is not the registrant of domain names listed in this database. Registrant: Domains by Proxy, Inc. Registered through: GoDaddy.com, Inc. (http://www.godaddy.com) Domain Name: DIGG.COM Domain servers in listed order: UDNS1.ULTRADNS.NET UDNS2.ULTRADNS.NET For complete domain details go to: http://who.godaddy.com/whoischeck.aspx?Domain=DIGG.COM ================================================ FILE: lib/pywhois/test/samples/whois/google.com ================================================ Whois Server Version 2.0 Domain names in the .com and .net domains can now be registered with many different competing registrars. Go to http://www.internic.net for detailed information. Server Name: GOOGLE.COM.ZZZZZ.GET.LAID.AT.WWW.SWINGINGCOMMUNITY.COM IP Address: 69.41.185.195 Registrar: INNERWISE, INC. D/B/A ITSYOURDOMAIN.COM Whois Server: whois.itsyourdomain.com Referral URL: http://www.itsyourdomain.com Server Name: GOOGLE.COM.ZOMBIED.AND.HACKED.BY.WWW.WEB-HACK.COM IP Address: 217.107.217.167 Registrar: ONLINENIC, INC. Whois Server: whois.35.com Referral URL: http://www.OnlineNIC.com Server Name: GOOGLE.COM.YAHOO.COM.MYSPACE.COM.YOUTUBE.COM.FACEBOOK.COM.THEYSUCK.DNSABOUT.COM IP Address: 72.52.190.30 Registrar: GODADDY.COM, INC. Whois Server: whois.godaddy.com Referral URL: http://registrar.godaddy.com Server Name: GOOGLE.COM.WORDT.DOOR.VEEL.WHTERS.GEBRUIKT.SERVERTJE.NET IP Address: 62.41.27.144 Registrar: KEY-SYSTEMS GMBH Whois Server: whois.rrpproxy.net Referral URL: http://www.key-systems.net Server Name: GOOGLE.COM.VN Registrar: ONLINENIC, INC. Whois Server: whois.35.com Referral URL: http://www.OnlineNIC.com Server Name: GOOGLE.COM.UY Registrar: DIRECTI INTERNET SOLUTIONS PVT. LTD. D/B/A PUBLICDOMAINREGISTRY.COM Whois Server: whois.PublicDomainRegistry.com Referral URL: http://www.PublicDomainRegistry.com Server Name: GOOGLE.COM.UA Registrar: DIRECTI INTERNET SOLUTIONS PVT. LTD. D/B/A PUBLICDOMAINREGISTRY.COM Whois Server: whois.PublicDomainRegistry.com Referral URL: http://www.PublicDomainRegistry.com Server Name: GOOGLE.COM.TW Registrar: WEB COMMERCE COMMUNICATIONS LIMITED DBA WEBNIC.CC Whois Server: whois.webnic.cc Referral URL: http://www.webnic.cc Server Name: GOOGLE.COM.TR Registrar: DIRECTI INTERNET SOLUTIONS PVT. LTD. D/B/A PUBLICDOMAINREGISTRY.COM Whois Server: whois.PublicDomainRegistry.com Referral URL: http://www.PublicDomainRegistry.com Server Name: GOOGLE.COM.SUCKS.FIND.CRACKZ.WITH.SEARCH.GULLI.COM IP Address: 80.190.192.24 Registrar: EPAG DOMAINSERVICES GMBH Whois Server: whois.enterprice.net Referral URL: http://www.enterprice.net Server Name: GOOGLE.COM.SPROSIUYANDEKSA.RU Registrar: MELBOURNE IT, LTD. D/B/A INTERNET NAMES WORLDWIDE Whois Server: whois.melbourneit.com Referral URL: http://www.melbourneit.com Server Name: GOOGLE.COM.SERVES.PR0N.FOR.ALLIYAH.NET IP Address: 84.255.209.69 Registrar: GODADDY.COM, INC. Whois Server: whois.godaddy.com Referral URL: http://registrar.godaddy.com Server Name: GOOGLE.COM.SA Registrar: OMNIS NETWORK, LLC Whois Server: whois.omnis.com Referral URL: http://domains.omnis.com Server Name: GOOGLE.COM.PLZ.GIVE.A.PR8.TO.AUDIOTRACKER.NET IP Address: 213.251.184.30 Registrar: OVH Whois Server: whois.ovh.com Referral URL: http://www.ovh.com Server Name: GOOGLE.COM.MX Registrar: DIRECTI INTERNET SOLUTIONS PVT. LTD. D/B/A PUBLICDOMAINREGISTRY.COM Whois Server: whois.PublicDomainRegistry.com Referral URL: http://www.PublicDomainRegistry.com Server Name: GOOGLE.COM.IS.NOT.HOSTED.BY.ACTIVEDOMAINDNS.NET IP Address: 217.148.161.5 Registrar: ENOM, INC. Whois Server: whois.enom.com Referral URL: http://www.enom.com Server Name: GOOGLE.COM.IS.HOSTED.ON.PROFITHOSTING.NET IP Address: 66.49.213.213 Registrar: NAME.COM LLC Whois Server: whois.name.com Referral URL: http://www.name.com Server Name: GOOGLE.COM.IS.APPROVED.BY.NUMEA.COM IP Address: 213.228.0.43 Registrar: GANDI SAS Whois Server: whois.gandi.net Referral URL: http://www.gandi.net Server Name: GOOGLE.COM.HAS.LESS.FREE.PORN.IN.ITS.SEARCH.ENGINE.THAN.SECZY.COM IP Address: 209.187.114.130 Registrar: INNERWISE, INC. D/B/A ITSYOURDOMAIN.COM Whois Server: whois.itsyourdomain.com Referral URL: http://www.itsyourdomain.com Server Name: GOOGLE.COM.DO Registrar: GODADDY.COM, INC. Whois Server: whois.godaddy.com Referral URL: http://registrar.godaddy.com Server Name: GOOGLE.COM.COLLEGELEARNER.COM IP Address: 72.14.207.99 IP Address: 64.233.187.99 IP Address: 64.233.167.99 Registrar: GODADDY.COM, INC. Whois Server: whois.godaddy.com Referral URL: http://registrar.godaddy.com Server Name: GOOGLE.COM.CO Registrar: NAMESECURE.COM Whois Server: whois.namesecure.com Referral URL: http://www.namesecure.com Server Name: GOOGLE.COM.BR Registrar: ENOM, INC. Whois Server: whois.enom.com Referral URL: http://www.enom.com Server Name: GOOGLE.COM.BEYONDWHOIS.COM IP Address: 203.36.226.2 Registrar: TUCOWS INC. Whois Server: whois.tucows.com Referral URL: http://domainhelp.opensrs.net Server Name: GOOGLE.COM.AU Registrar: PLANETDOMAIN PTY LTD. Whois Server: whois.planetdomain.com Referral URL: http://www.planetdomain.com Server Name: GOOGLE.COM.ACQUIRED.BY.CALITEC.NET IP Address: 85.190.27.2 Registrar: ENOM, INC. Whois Server: whois.enom.com Referral URL: http://www.enom.com Domain Name: GOOGLE.COM Registrar: MARKMONITOR INC. Whois Server: whois.markmonitor.com Referral URL: http://www.markmonitor.com Name Server: NS1.GOOGLE.COM Name Server: NS2.GOOGLE.COM Name Server: NS3.GOOGLE.COM Name Server: NS4.GOOGLE.COM Status: clientDeleteProhibited Status: clientTransferProhibited Status: clientUpdateProhibited Updated Date: 10-apr-2006 Creation Date: 15-sep-1997 Expiration Date: 14-sep-2011 >>> Last update of whois database: Thu, 26 Jun 2008 21:39:39 EDT <<< NOTICE: The expiration date displayed in this record is the date the registrar's sponsorship of the domain name registration in the registry is currently set to expire. This date does not necessarily reflect the expiration date of the domain name registrant's agreement with the sponsoring registrar. Users may consult the sponsoring registrar's Whois database to view the registrar's reported date of expiration for this registration. TERMS OF USE: You are not authorized to access or query our Whois database through the use of electronic processes that are high-volume and automated except as reasonably necessary to register domain names or modify existing registrations; the Data in VeriSign Global Registry Services' ("VeriSign") Whois database is provided by VeriSign for information purposes only, and to assist persons in obtaining information about or related to a domain name registration record. VeriSign does not guarantee its accuracy. By submitting a Whois query, you agree to abide by the following terms of use: You agree that you may use this Data only for lawful purposes and that under no circumstances will you use this Data to: (1) allow, enable, or otherwise support the transmission of mass unsolicited, commercial advertising or solicitations via e-mail, telephone, or facsimile; or (2) enable high volume, automated, electronic processes that apply to VeriSign (or its computer systems). The compilation, repackaging, dissemination or other use of this Data is expressly prohibited without the prior written consent of VeriSign. You agree not to use electronic processes that are automated and high-volume to access or query the Whois database except as reasonably necessary to register domain names or modify existing registrations. VeriSign reserves the right to restrict your access to the Whois database in its sole discretion to ensure operational stability. VeriSign may restrict or terminate your access to the Whois database for failure to abide by these terms of use. VeriSign reserves the right to modify these terms at any time. The Registry database contains ONLY .COM, .NET, .EDU domains and Registrars. MarkMonitor.com - The Leader in Corporate Domain Management ---------------------------------------------------------- For Global Domain Consolidation, Research & Intelligence, and Enterprise DNS, go to: www.markmonitor.com ---------------------------------------------------------- The Data in MarkMonitor.com's WHOIS database is provided by MarkMonitor.com for information purposes, and to assist persons in obtaining information about or related to a domain name registration record. MarkMonitor.com does not guarantee its accuracy. By submitting a WHOIS query, you agree that you will use this Data only for lawful purposes and that, under no circumstances will you use this Data to: (1) allow, enable, or otherwise support the transmission of mass unsolicited, commercial advertising or solicitations via e-mail (spam); or (2) enable high volume, automated, electronic processes that apply to MarkMonitor.com (or its systems). MarkMonitor.com reserves the right to modify these terms at any time. By submitting this query, you agree to abide by this policy. Registrant: Dns Admin Google Inc. Please contact contact-admin@google.com 1600 Amphitheatre Parkway Mountain View CA 94043 US dns-admin@google.com +1.6502530000 Fax: +1.6506188571 Domain Name: google.com Registrar Name: Markmonitor.com Registrar Whois: whois.markmonitor.com Registrar Homepage: http://www.markmonitor.com Administrative Contact: DNS Admin Google Inc. 1600 Amphitheatre Parkway Mountain View CA 94043 US dns-admin@google.com +1.6506234000 Fax: +1.6506188571 Technical Contact, Zone Contact: DNS Admin Google Inc. 2400 E. Bayshore Pkwy Mountain View CA 94043 US dns-admin@google.com +1.6503300100 Fax: +1.6506181499 Created on..............: 1997-09-15. Expires on..............: 2011-09-13. Record last updated on..: 2008-06-08. Domain servers in listed order: ns4.google.com ns3.google.com ns2.google.com ns1.google.com MarkMonitor.com - The Leader in Corporate Domain Management ---------------------------------------------------------- For Global Domain Consolidation, Research & Intelligence, and Enterprise DNS, go to: www.markmonitor.com ---------------------------------------------------------- -- ================================================ FILE: lib/pywhois/test/samples/whois/imdb.com ================================================ Whois Server Version 2.0 Domain names in the .com and .net domains can now be registered with many different competing registrars. Go to http://www.internic.net for detailed information. Server Name: IMDB.COM.MORE.INFO.AT.WWW.BEYONDWHOIS.COM IP Address: 203.36.226.2 Registrar: TUCOWS INC. Whois Server: whois.tucows.com Referral URL: http://domainhelp.opensrs.net Domain Name: IMDB.COM Registrar: NETWORK SOLUTIONS, LLC. Whois Server: whois.networksolutions.com Referral URL: http://www.networksolutions.com Name Server: UDNS1.ULTRADNS.NET Name Server: UDNS2.ULTRADNS.NET Status: clientTransferProhibited Updated Date: 28-mar-2008 Creation Date: 05-jan-1996 Expiration Date: 04-jan-2016 >>> Last update of whois database: Thu, 26 Jun 2008 21:40:25 EDT <<< NOTICE: The expiration date displayed in this record is the date the registrar's sponsorship of the domain name registration in the registry is currently set to expire. This date does not necessarily reflect the expiration date of the domain name registrant's agreement with the sponsoring registrar. Users may consult the sponsoring registrar's Whois database to view the registrar's reported date of expiration for this registration. TERMS OF USE: You are not authorized to access or query our Whois database through the use of electronic processes that are high-volume and automated except as reasonably necessary to register domain names or modify existing registrations; the Data in VeriSign Global Registry Services' ("VeriSign") Whois database is provided by VeriSign for information purposes only, and to assist persons in obtaining information about or related to a domain name registration record. VeriSign does not guarantee its accuracy. By submitting a Whois query, you agree to abide by the following terms of use: You agree that you may use this Data only for lawful purposes and that under no circumstances will you use this Data to: (1) allow, enable, or otherwise support the transmission of mass unsolicited, commercial advertising or solicitations via e-mail, telephone, or facsimile; or (2) enable high volume, automated, electronic processes that apply to VeriSign (or its computer systems). The compilation, repackaging, dissemination or other use of this Data is expressly prohibited without the prior written consent of VeriSign. You agree not to use electronic processes that are automated and high-volume to access or query the Whois database except as reasonably necessary to register domain names or modify existing registrations. VeriSign reserves the right to restrict your access to the Whois database in its sole discretion to ensure operational stability. VeriSign may restrict or terminate your access to the Whois database for failure to abide by these terms of use. VeriSign reserves the right to modify these terms at any time. The Registry database contains ONLY .COM, .NET, .EDU domains and Registrars.NOTICE AND TERMS OF USE: You are not authorized to access or query our WHOIS database through the use of high-volume, automated, electronic processes. The Data in Network Solutions' WHOIS database is provided by Network Solutions for information purposes only, and to assist persons in obtaining information about or related to a domain name registration record. Network Solutions does not guarantee its accuracy. By submitting a WHOIS query, you agree to abide by the following terms of use: You agree that you may use this Data only for lawful purposes and that under no circumstances will you use this Data to: (1) allow, enable, or otherwise support the transmission of mass unsolicited, commercial advertising or solicitations via e-mail, telephone, or facsimile; or (2) enable high volume, automated, electronic processes that apply to Network Solutions (or its computer systems). The compilation, repackaging, dissemination or other use of this Data is expressly prohibited without the prior written consent of Network Solutions. You agree not to use high-volume, automated, electronic processes to access or query the WHOIS database. Network Solutions reserves the right to terminate your access to the WHOIS database in its sole discretion, including without limitation, for excessive querying of the WHOIS database or for failure to otherwise abide by this policy. Network Solutions reserves the right to modify these terms at any time. Get a FREE domain name registration, transfer, or renewal with any annual hosting package. http://www.networksolutions.com Visit AboutUs.org for more information about IMDB.COM AboutUs: IMDB.COM Registrant: IMDb.com, Inc. Legal Dept, PO Box 81226 Seattle, WA 98108 US Domain Name: IMDB.COM ------------------------------------------------------------------------ Promote your business to millions of viewers for only $1 a month Learn how you can get an Enhanced Business Listing here for your domain name. Learn more at http://www.NetworkSolutions.com/ ------------------------------------------------------------------------ Administrative Contact, Technical Contact: Hostmaster, IMDb hostmaster@imdb.com IMDb.com, Inc. Legal Dept, PO Box 81226 Seattle, WA 98108 US +1.2062664064 fax: +1.2062667010 Record expires on 04-Jan-2016. Record created on 05-Jan-1996. Database last updated on 26-Jun-2008 21:38:42 EDT. Domain servers in listed order: UDNS1.ULTRADNS.NET UDNS2.ULTRADNS.NET ================================================ FILE: lib/pywhois/test/samples/whois/microsoft.com ================================================ Whois Server Version 2.0 Domain names in the .com and .net domains can now be registered with many different competing registrars. Go to http://www.internic.net for detailed information. Server Name: MICROSOFT.COM.ZZZZZZ.MORE.DETAILS.AT.WWW.BEYONDWHOIS.COM IP Address: 203.36.226.2 Registrar: TUCOWS INC. Whois Server: whois.tucows.com Referral URL: http://domainhelp.opensrs.net Server Name: MICROSOFT.COM.ZZZZZ.GET.LAID.AT.WWW.SWINGINGCOMMUNITY.COM IP Address: 69.41.185.194 Registrar: INNERWISE, INC. D/B/A ITSYOURDOMAIN.COM Whois Server: whois.itsyourdomain.com Referral URL: http://www.itsyourdomain.com Server Name: MICROSOFT.COM.ZZZOMBIED.AND.HACKED.BY.WWW.WEB-HACK.COM IP Address: 217.107.217.167 Registrar: ONLINENIC, INC. Whois Server: whois.35.com Referral URL: http://www.OnlineNIC.com Server Name: MICROSOFT.COM.ZZZ.IS.0WNED.AND.HAX0RED.BY.SUB7.NET IP Address: 207.44.240.96 Registrar: INNERWISE, INC. D/B/A ITSYOURDOMAIN.COM Whois Server: whois.itsyourdomain.com Referral URL: http://www.itsyourdomain.com Server Name: MICROSOFT.COM.WILL.LIVE.FOREVER.BECOUSE.UNIXSUCKS.COM IP Address: 185.3.4.7 Registrar: MELBOURNE IT, LTD. D/B/A INTERNET NAMES WORLDWIDE Whois Server: whois.melbourneit.com Referral URL: http://www.melbourneit.com Server Name: MICROSOFT.COM.WILL.BE.SLAPPED.IN.THE.FACE.BY.MY.BLUE.VEINED.SPANNER.NET IP Address: 216.127.80.46 Registrar: COMPUTER SERVICES LANGENBACH GMBH DBA JOKER.COM Whois Server: whois.joker.com Referral URL: http://www.joker.com Server Name: MICROSOFT.COM.WILL.BE.BEATEN.WITH.MY.SPANNER.NET IP Address: 216.127.80.46 Registrar: COMPUTER SERVICES LANGENBACH GMBH DBA JOKER.COM Whois Server: whois.joker.com Referral URL: http://www.joker.com Server Name: MICROSOFT.COM.WAREZ.AT.TOPLIST.GULLI.COM IP Address: 80.190.192.33 Registrar: EPAG DOMAINSERVICES GMBH Whois Server: whois.enterprice.net Referral URL: http://www.enterprice.net Server Name: MICROSOFT.COM.USERS.SHOULD.HOST.WITH.UNIX.AT.ITSHOSTED.COM IP Address: 74.52.88.132 Registrar: ENOM, INC. Whois Server: whois.enom.com Referral URL: http://www.enom.com Server Name: MICROSOFT.COM.TOTALLY.SUCKS.S3U.NET IP Address: 207.208.13.22 Registrar: ENOM, INC. Whois Server: whois.enom.com Referral URL: http://www.enom.com Server Name: MICROSOFT.COM.SOFTWARE.IS.NOT.USED.AT.REG.RU Registrar: MELBOURNE IT, LTD. D/B/A INTERNET NAMES WORLDWIDE Whois Server: whois.melbourneit.com Referral URL: http://www.melbourneit.com Server Name: MICROSOFT.COM.SHOULD.GIVE.UP.BECAUSE.LINUXISGOD.COM IP Address: 65.160.248.13 Registrar: GKG.NET, INC. Whois Server: whois.gkg.net Referral URL: http://www.gkg.net Server Name: MICROSOFT.COM.RAWKZ.MUH.WERLD.MENTALFLOSS.CA Registrar: TUCOWS INC. Whois Server: whois.tucows.com Referral URL: http://domainhelp.opensrs.net Server Name: MICROSOFT.COM.OHMYGODITBURNS.COM IP Address: 216.158.63.6 Registrar: DOTSTER, INC. Whois Server: whois.dotster.com Referral URL: http://www.dotster.com Server Name: MICROSOFT.COM.MORE.INFO.AT.WWW.BEYONDWHOIS.COM IP Address: 203.36.226.2 Registrar: TUCOWS INC. Whois Server: whois.tucows.com Referral URL: http://domainhelp.opensrs.net Server Name: MICROSOFT.COM.LOVES.ME.KOSMAL.NET IP Address: 65.75.198.123 Registrar: GODADDY.COM, INC. Whois Server: whois.godaddy.com Referral URL: http://registrar.godaddy.com Server Name: MICROSOFT.COM.LIVES.AT.SHAUNEWING.COM IP Address: 216.40.250.172 Registrar: ENOM, INC. Whois Server: whois.enom.com Referral URL: http://www.enom.com Server Name: MICROSOFT.COM.IS.NOT.YEPPA.ORG Registrar: OVH Whois Server: whois.ovh.com Referral URL: http://www.ovh.com Server Name: MICROSOFT.COM.IS.NOT.HOSTED.BY.ACTIVEDOMAINDNS.NET IP Address: 217.148.161.5 Registrar: ENOM, INC. Whois Server: whois.enom.com Referral URL: http://www.enom.com Server Name: MICROSOFT.COM.IS.IN.BED.WITH.CURTYV.COM IP Address: 216.55.187.193 Registrar: ABACUS AMERICA, INC. DBA NAMES4EVER Whois Server: whois.names4ever.com Referral URL: http://www.names4ever.com Server Name: MICROSOFT.COM.IS.HOSTED.ON.PROFITHOSTING.NET IP Address: 66.49.213.213 Registrar: NAME.COM LLC Whois Server: whois.name.com Referral URL: http://www.name.com Server Name: MICROSOFT.COM.IS.GOD.BECOUSE.UNIXSUCKS.COM IP Address: 161.16.56.24 Registrar: MELBOURNE IT, LTD. D/B/A INTERNET NAMES WORLDWIDE Whois Server: whois.melbourneit.com Referral URL: http://www.melbourneit.com Server Name: MICROSOFT.COM.IS.A.STEAMING.HEAP.OF.FUCKING-BULLSHIT.NET IP Address: 63.99.165.11 Registrar: THE NAME IT CORPORATION DBA NAMESERVICES.NET Whois Server: whois.aitdomains.com Referral URL: http://www.aitdomains.com Server Name: MICROSOFT.COM.IS.A.MESS.TIMPORTER.CO.UK Registrar: MELBOURNE IT, LTD. D/B/A INTERNET NAMES WORLDWIDE Whois Server: whois.melbourneit.com Referral URL: http://www.melbourneit.com Server Name: MICROSOFT.COM.HAS.ITS.OWN.CRACKLAB.COM IP Address: 209.26.95.44 Registrar: DOTSTER, INC. Whois Server: whois.dotster.com Referral URL: http://www.dotster.com Server Name: MICROSOFT.COM.HAS.A.PRESENT.COMING.FROM.HUGHESMISSILES.COM IP Address: 66.154.11.27 Registrar: TUCOWS INC. Whois Server: whois.tucows.com Referral URL: http://domainhelp.opensrs.net Server Name: MICROSOFT.COM.FILLS.ME.WITH.BELLIGERENCE.NET IP Address: 130.58.82.232 Registrar: CRONON AG BERLIN, NIEDERLASSUNG REGENSBURG Whois Server: whois.tmagnic.net Referral URL: http://nsi-robo.tmag.de Server Name: MICROSOFT.COM.CAN.GO.FUCK.ITSELF.AT.SECZY.COM IP Address: 209.187.114.147 Registrar: INNERWISE, INC. D/B/A ITSYOURDOMAIN.COM Whois Server: whois.itsyourdomain.com Referral URL: http://www.itsyourdomain.com Server Name: MICROSOFT.COM.ARE.GODDAMN.PIGFUCKERS.NET.NS-NOT-IN-SERVICE.COM IP Address: 216.127.80.46 Registrar: TUCOWS INC. Whois Server: whois.tucows.com Referral URL: http://domainhelp.opensrs.net Server Name: MICROSOFT.COM.AND.MINDSUCK.BOTH.SUCK.HUGE.ONES.AT.EXEGETE.NET IP Address: 63.241.136.53 Registrar: DOTSTER, INC. Whois Server: whois.dotster.com Referral URL: http://www.dotster.com Domain Name: MICROSOFT.COM Registrar: TUCOWS INC. Whois Server: whois.tucows.com Referral URL: http://domainhelp.opensrs.net Name Server: NS1.MSFT.NET Name Server: NS2.MSFT.NET Name Server: NS3.MSFT.NET Name Server: NS4.MSFT.NET Name Server: NS5.MSFT.NET Status: clientDeleteProhibited Status: clientTransferProhibited Status: clientUpdateProhibited Updated Date: 10-oct-2006 Creation Date: 02-may-1991 Expiration Date: 03-may-2014 >>> Last update of whois database: Thu, 26 Jun 2008 21:39:39 EDT <<< NOTICE: The expiration date displayed in this record is the date the registrar's sponsorship of the domain name registration in the registry is currently set to expire. This date does not necessarily reflect the expiration date of the domain name registrant's agreement with the sponsoring registrar. Users may consult the sponsoring registrar's Whois database to view the registrar's reported date of expiration for this registration. TERMS OF USE: You are not authorized to access or query our Whois database through the use of electronic processes that are high-volume and automated except as reasonably necessary to register domain names or modify existing registrations; the Data in VeriSign Global Registry Services' ("VeriSign") Whois database is provided by VeriSign for information purposes only, and to assist persons in obtaining information about or related to a domain name registration record. VeriSign does not guarantee its accuracy. By submitting a Whois query, you agree to abide by the following terms of use: You agree that you may use this Data only for lawful purposes and that under no circumstances will you use this Data to: (1) allow, enable, or otherwise support the transmission of mass unsolicited, commercial advertising or solicitations via e-mail, telephone, or facsimile; or (2) enable high volume, automated, electronic processes that apply to VeriSign (or its computer systems). The compilation, repackaging, dissemination or other use of this Data is expressly prohibited without the prior written consent of VeriSign. You agree not to use electronic processes that are automated and high-volume to access or query the Whois database except as reasonably necessary to register domain names or modify existing registrations. VeriSign reserves the right to restrict your access to the Whois database in its sole discretion to ensure operational stability. VeriSign may restrict or terminate your access to the Whois database for failure to abide by these terms of use. VeriSign reserves the right to modify these terms at any time. The Registry database contains ONLY .COM, .NET, .EDU domains and Registrars.Registrant: Microsoft Corporation One Microsoft Way Redmond, WA 98052 US Domain name: MICROSOFT.COM Administrative Contact: Administrator, Domain domains@microsoft.com One Microsoft Way Redmond, WA 98052 US +1.4258828080 Technical Contact: Hostmaster, MSN msnhst@microsoft.com One Microsoft Way Redmond, WA 98052 US +1.4258828080 Registration Service Provider: DBMS VeriSign, dbms-support@verisign.com 800-579-2848 x4 Please contact DBMS VeriSign for domain updates, DNS/Nameserver changes, and general domain support questions. Registrar of Record: TUCOWS, INC. Record last updated on 15-Nov-2007. Record expires on 03-May-2014. Record created on 02-May-1991. Registrar Domain Name Help Center: http://domainhelp.tucows.com Domain servers in listed order: NS2.MSFT.NET NS4.MSFT.NET NS1.MSFT.NET NS5.MSFT.NET NS3.MSFT.NET Domain status: clientDeleteProhibited clientTransferProhibited clientUpdateProhibited The Data in the Tucows Registrar WHOIS database is provided to you by Tucows for information purposes only, and may be used to assist you in obtaining information about or related to a domain name's registration record. Tucows makes this information available "as is," and does not guarantee its accuracy. By submitting a WHOIS query, you agree that you will use this data only for lawful purposes and that, under no circumstances will you use this data to: a) allow, enable, or otherwise support the transmission by e-mail, telephone, or facsimile of mass, unsolicited, commercial advertising or solicitations to entities other than the data recipient's own existing customers; or (b) enable high volume, automated, electronic processes that send queries or data to the systems of any Registry Operator or ICANN-Accredited registrar, except as reasonably necessary to register domain names or modify existing registrations. The compilation, repackaging, dissemination or other use of this Data is expressly prohibited without the prior written consent of Tucows. Tucows reserves the right to terminate your access to the Tucows WHOIS database in its sole discretion, including without limitation, for excessive querying of the WHOIS database or for failure to otherwise abide by this policy. Tucows reserves the right to modify these terms at any time. By submitting this query, you agree to abide by these terms. NOTE: THE WHOIS DATABASE IS A CONTACT DATABASE ONLY. LACK OF A DOMAIN RECORD DOES NOT SIGNIFY DOMAIN AVAILABILITY. ================================================ FILE: lib/pywhois/test/samples/whois/reddit.com ================================================ Whois Server Version 2.0 Domain names in the .com and .net domains can now be registered with many different competing registrars. Go to http://www.internic.net for detailed information. Domain Name: REDDIT.COM Registrar: DSTR ACQUISITION PA I, LLC DBA DOMAINBANK.COM Whois Server: rs.domainbank.net Referral URL: http://www.domainbank.net Name Server: ASIA1.AKAM.NET Name Server: ASIA9.AKAM.NET Name Server: AUS2.AKAM.NET Name Server: NS1-1.AKAM.NET Name Server: NS1-195.AKAM.NET Name Server: USE4.AKAM.NET Name Server: USW3.AKAM.NET Name Server: USW5.AKAM.NET Status: clientDeleteProhibited Status: clientTransferProhibited Status: clientUpdateProhibited Updated Date: 04-jun-2008 Creation Date: 29-apr-2005 Expiration Date: 29-apr-2009 >>> Last update of whois database: Fri, 27 Jun 2008 01:39:54 UTC <<< NOTICE: The expiration date displayed in this record is the date the registrar's sponsorship of the domain name registration in the registry is currently set to expire. This date does not necessarily reflect the expiration date of the domain name registrant's agreement with the sponsoring registrar. Users may consult the sponsoring registrar's Whois database to view the registrar's reported date of expiration for this registration. TERMS OF USE: You are not authorized to access or query our Whois database through the use of electronic processes that are high-volume and automated except as reasonably necessary to register domain names or modify existing registrations; the Data in VeriSign Global Registry Services' ("VeriSign") Whois database is provided by VeriSign for information purposes only, and to assist persons in obtaining information about or related to a domain name registration record. VeriSign does not guarantee its accuracy. By submitting a Whois query, you agree to abide by the following terms of use: You agree that you may use this Data only for lawful purposes and that under no circumstances will you use this Data to: (1) allow, enable, or otherwise support the transmission of mass unsolicited, commercial advertising or solicitations via e-mail, telephone, or facsimile; or (2) enable high volume, automated, electronic processes that apply to VeriSign (or its computer systems). The compilation, repackaging, dissemination or other use of this Data is expressly prohibited without the prior written consent of VeriSign. You agree not to use electronic processes that are automated and high-volume to access or query the Whois database except as reasonably necessary to register domain names or modify existing registrations. VeriSign reserves the right to restrict your access to the Whois database in its sole discretion to ensure operational stability. VeriSign may restrict or terminate your access to the Whois database for failure to abide by these terms of use. VeriSign reserves the right to modify these terms at any time. The Registry database contains ONLY .COM, .NET, .EDU domains and Registrars. The information in this whois database is provided for the sole purpose of assisting you in obtaining information about domain name registration records. This information is available "as is," and we do not guarantee its accuracy. By submitting a whois query, you agree that you will use this data only for lawful purposes and that, under no circumstances will you use this data to: (1) enable high volume, automated, electronic processes that stress or load this whois database system providing you this information; or (2) allow,enable, or otherwise support the transmission of mass, unsolicited, commercial advertising or solicitations via facsimile, electronic mail, or by telephone to entitites other than your own existing customers. The compilation, repackaging, dissemination or other use of this data is expressly prohibited without prior written consent from this company. We reserve the right to modify these terms at any time. By submitting an inquiry, you agree to these terms of usage and limitations of warranty. Please limit your queries to 10 per minute and one connection. Domain Services Provided By: Domain Bank, support@domainbank.com http:///www.domainbank.com Registrant: CONDENET INC Four Times Square New York, NY 10036 US Registrar: DOMAINBANK Domain Name: REDDIT.COM Created on: 29-APR-05 Expires on: 29-APR-09 Last Updated on: 04-JUN-08 Administrative Contact: , domain_admin@advancemags.com Advance Magazine Group 4 Times Square 23rd Floor New York, New York 10036 US 2122862860 Technical Contact: , domains@advancemags.com Advance Magazine Group 1201 N. Market St Wilmington, DE 19801 US 3028304630 Domain servers in listed order: ASIA1.AKAM.NET ASIA9.AKAM.NET AUS2.AKAM.NET NS1-1.AKAM.NET NS1-195.AKAM.NET USE4.AKAM.NET USW3.AKAM.NET USW5.AKAM.NET End of Whois Information ================================================ FILE: lib/pywhois/test/samples/whois/shazow.net ================================================ Whois Server Version 2.0 Domain names in the .com and .net domains can now be registered with many different competing registrars. Go to http://www.internic.net for detailed information. Domain Name: SHAZOW.NET Registrar: NEW DREAM NETWORK, LLC Whois Server: whois.dreamhost.com Referral URL: http://www.dreamhost.com Name Server: NS1.DREAMHOST.COM Name Server: NS2.DREAMHOST.COM Name Server: NS3.DREAMHOST.COM Status: ok Updated Date: 08-aug-2007 Creation Date: 13-sep-2003 Expiration Date: 13-sep-2009 >>> Last update of whois database: Thu, 26 Jun 2008 21:39:08 EDT <<< NOTICE: The expiration date displayed in this record is the date the registrar's sponsorship of the domain name registration in the registry is currently set to expire. This date does not necessarily reflect the expiration date of the domain name registrant's agreement with the sponsoring registrar. Users may consult the sponsoring registrar's Whois database to view the registrar's reported date of expiration for this registration. TERMS OF USE: You are not authorized to access or query our Whois database through the use of electronic processes that are high-volume and automated except as reasonably necessary to register domain names or modify existing registrations; the Data in VeriSign Global Registry Services' ("VeriSign") Whois database is provided by VeriSign for information purposes only, and to assist persons in obtaining information about or related to a domain name registration record. VeriSign does not guarantee its accuracy. By submitting a Whois query, you agree to abide by the following terms of use: You agree that you may use this Data only for lawful purposes and that under no circumstances will you use this Data to: (1) allow, enable, or otherwise support the transmission of mass unsolicited, commercial advertising or solicitations via e-mail, telephone, or facsimile; or (2) enable high volume, automated, electronic processes that apply to VeriSign (or its computer systems). The compilation, repackaging, dissemination or other use of this Data is expressly prohibited without the prior written consent of VeriSign. You agree not to use electronic processes that are automated and high-volume to access or query the Whois database except as reasonably necessary to register domain names or modify existing registrations. VeriSign reserves the right to restrict your access to the Whois database in its sole discretion to ensure operational stability. VeriSign may restrict or terminate your access to the Whois database for failure to abide by these terms of use. VeriSign reserves the right to modify these terms at any time. The Registry database contains ONLY .COM, .NET, .EDU domains and Registrars. Legal Stuff: The information in DreamHost's whois database is to be used for informational purposes only, and to obtain information on a domain name registration. DreamHost does not guarantee its accuracy. You are not authorized to query or access DreamHost's whois database using high-volume, automated means without written permission from DreamHost. You are not authorized to query or access DreamHost's whois database in order to facilitate illegal activities, or to facilitate the use of unsolicited bulk email, telephone, or facsimile communications. You are not authorized to collect, repackage, or redistribute the information in DreamHost's whois database. DreamHost may, at its sole discretion, restrict your access to the whois database at any time, with or without notice. DreamHost may modify these Terms of Service at any time, with or without notice. +++++++++++++++++++++++++++++++++++++++++++ Domain Name: shazow.net Registrant Contact: shazow.net Private Registrant shazow.net@proxy.dreamhost.com DreamHost Web Hosting 417 Associated Rd #324 Brea, CA 92821 US +1.2139471032 Administrative Contact: shazow.net Private Registrant shazow.net@proxy.dreamhost.com DreamHost Web Hosting 417 Associated Rd #324 Brea, CA 92821 US +1.2139471032 Technical Contact: shazow.net Private Registrant shazow.net@proxy.dreamhost.com DreamHost Web Hosting 417 Associated Rd #324 Brea, CA 92821 US +1.2139471032 Billing Contact: shazow.net Private Registrant shazow.net@proxy.dreamhost.com DreamHost Web Hosting 417 Associated Rd #324 Brea, CA 92821 US +1.2139471032 Record created on 2003-09-12 21:43:11. Record expires on 2009-09-12 21:43:11. Domain servers in listed order: ns1.dreamhost.com ns2.dreamhost.com ns3.dreamhost.com DreamHost whois server terms of service: http://whois.dreamhost.com/terms.html ================================================ FILE: lib/pywhois/test/samples/whois/slashdot.org ================================================ NOTICE: Access to .ORG WHOIS information is provided to assist persons in determining the contents of a domain name registration record in the Public Interest Registry registry database. The data in this record is provided by Public Interest Registry for informational purposes only, and Public Interest Registry does not guarantee its accuracy. This service is intended only for query-based access. You agree that you will use this data only for lawful purposes and that, under no circumstances will you use this data to: (a) allow, enable, or otherwise support the transmission by e-mail, telephone, or facsimile of mass unsolicited, commercial advertising or solicitations to entities other than the data recipient's own existing customers; or (b) enable high volume, automated, electronic processes that send queries or data to the systems of Registry Operator or any ICANN-Accredited Registrar, except as reasonably necessary to register domain names or modify existing registrations. All rights reserved. Public Interest Registry reserves the right to modify these terms at any time. By submitting this query, you agree to abide by this policy. Domain ID:D2289308-LROR Domain Name:SLASHDOT.ORG Created On:05-Oct-1997 04:00:00 UTC Last Updated On:23-Jun-2008 20:00:11 UTC Expiration Date:04-Oct-2008 04:00:00 UTC Sponsoring Registrar:Tucows Inc. (R11-LROR) Status:OK Registrant ID:tuIIldggGKu3HogX Registrant Name:DNS Administration Registrant Organization:SourceForge, Inc. Registrant Street1:650 Castro St. Registrant Street2:Suite 450 Registrant Street3: Registrant City:Mountain View Registrant State/Province:CA Registrant Postal Code:94041 Registrant Country:US Registrant Phone:+1.6506942100 Registrant Phone Ext.: Registrant FAX: Registrant FAX Ext.: Registrant Email:dns-admin@corp.sourceforge.com Admin ID:tupyrGGXKEFJLdE5 Admin Name:DNS Administration Admin Organization:SourceForge, Inc. Admin Street1:650 Castro St. Admin Street2:Suite 450 Admin Street3: Admin City:Mountain View Admin State/Province:CA Admin Postal Code:94041 Admin Country:US Admin Phone:+1.6506942100 Admin Phone Ext.: Admin FAX: Admin FAX Ext.: Admin Email:dns-admin@corp.sourceforge.com Tech ID:tuLQk02WUyJi47SS Tech Name:DNS Technical Tech Organization:SourceForge, Inc. Tech Street1:650 Castro St. Tech Street2:Suite 450 Tech Street3: Tech City:Mountain View Tech State/Province:CA Tech Postal Code:94041 Tech Country:US Tech Phone:+1.6506942100 Tech Phone Ext.: Tech FAX: Tech FAX Ext.: Tech Email:dns-tech@corp.sourceforge.com Name Server:NS-1.CH3.SOURCEFORGE.COM Name Server:NS-2.CH3.SOURCEFORGE.COM Name Server:NS-3.CORP.SOURCEFORGE.COM Name Server: Name Server: Name Server: Name Server: Name Server: Name Server: Name Server: Name Server: Name Server: Name Server: ================================================ FILE: lib/pywhois/test/samples/whois/squatter.net ================================================ Whois Server Version 2.0 Domain names in the .com and .net domains can now be registered with many different competing registrars. Go to http://www.internic.net for detailed information. Domain Name: SQUATTER.NET Registrar: DOMAINDISCOVER Whois Server: whois.domaindiscover.com Referral URL: http://www.domaindiscover.com Name Server: NS1.SBRACK.COM Name Server: NS2.SBRACK.COM Status: clientTransferProhibited Updated Date: 07-nov-2007 Creation Date: 06-nov-1999 Expiration Date: 06-nov-2008 >>> Last update of whois database: Thu, 26 Jun 2008 21:40:25 EDT <<< NOTICE: The expiration date displayed in this record is the date the registrar's sponsorship of the domain name registration in the registry is currently set to expire. This date does not necessarily reflect the expiration date of the domain name registrant's agreement with the sponsoring registrar. Users may consult the sponsoring registrar's Whois database to view the registrar's reported date of expiration for this registration. TERMS OF USE: You are not authorized to access or query our Whois database through the use of electronic processes that are high-volume and automated except as reasonably necessary to register domain names or modify existing registrations; the Data in VeriSign Global Registry Services' ("VeriSign") Whois database is provided by VeriSign for information purposes only, and to assist persons in obtaining information about or related to a domain name registration record. VeriSign does not guarantee its accuracy. By submitting a Whois query, you agree to abide by the following terms of use: You agree that you may use this Data only for lawful purposes and that under no circumstances will you use this Data to: (1) allow, enable, or otherwise support the transmission of mass unsolicited, commercial advertising or solicitations via e-mail, telephone, or facsimile; or (2) enable high volume, automated, electronic processes that apply to VeriSign (or its computer systems). The compilation, repackaging, dissemination or other use of this Data is expressly prohibited without the prior written consent of VeriSign. You agree not to use electronic processes that are automated and high-volume to access or query the Whois database except as reasonably necessary to register domain names or modify existing registrations. VeriSign reserves the right to restrict your access to the Whois database in its sole discretion to ensure operational stability. VeriSign may restrict or terminate your access to the Whois database for failure to abide by these terms of use. VeriSign reserves the right to modify these terms at any time. The Registry database contains ONLY .COM, .NET, .EDU domains and Registrars. This WHOIS database is provided for information purposes only. We do not guarantee the accuracy of this data. The following uses of this system are expressly prohibited: (1) use of this system for unlawful purposes; (2) use of this system to collect information used in the mass transmission of unsolicited commercial messages in any medium; (3) use of high volume, automated, electronic processes against this database. By submitting this query, you agree to abide by this policy. Registrant: CustomPC 4047 N Bayberry St Wichita, KS 67226-2418 US Domain Name: SQUATTER.NET Administrative Contact: CustomPC Derryl Brack 4047 N Bayberry St Wichita, KS 67226-2418 US 3166402868 dbrack@cpcsales.com Technical Contact, Zone Contact: CustomPC Brack, Derryl 4047 N Bayberry St Wichita, KS 67226-2418 US 316-683-5010 316-683-5010 [fax] brack@cpcsales.com Domain created on 06-Nov-1999 Domain expires on 06-Nov-2008 Last updated on 05-Nov-2007 Domain servers in listed order: NS1.SBRACK.COM NS2.SBRACK.COM Domain registration and hosting powered by DomainDiscover As low as $9/year, including FREE: responsive toll-free support, URL/frame/email forwarding, easy management system, and full featured DNS. ================================================ FILE: lib/pywhois/test/samples/whois/urlowl.com ================================================ Whois Server Version 2.0 Domain names in the .com and .net domains can now be registered with many different competing registrars. Go to http://www.internic.net for detailed information. Domain Name: URLOWL.COM Registrar: NEW DREAM NETWORK, LLC Whois Server: whois.dreamhost.com Referral URL: http://www.dreamhost.com Name Server: NS1.LINODE.COM Name Server: NS2.LINODE.COM Status: ok Updated Date: 14-apr-2008 Creation Date: 14-apr-2008 Expiration Date: 14-apr-2009 >>> Last update of whois database: Sun, 31 Aug 2008 00:18:23 UTC <<< NOTICE: The expiration date displayed in this record is the date the registrar's sponsorship of the domain name registration in the registry is currently set to expire. This date does not necessarily reflect the expiration date of the domain name registrant's agreement with the sponsoring registrar. Users may consult the sponsoring registrar's Whois database to view the registrar's reported date of expiration for this registration. TERMS OF USE: You are not authorized to access or query our Whois database through the use of electronic processes that are high-volume and automated except as reasonably necessary to register domain names or modify existing registrations; the Data in VeriSign Global Registry Services' ("VeriSign") Whois database is provided by VeriSign for information purposes only, and to assist persons in obtaining information about or related to a domain name registration record. VeriSign does not guarantee its accuracy. By submitting a Whois query, you agree to abide by the following terms of use: You agree that you may use this Data only for lawful purposes and that under no circumstances will you use this Data to: (1) allow, enable, or otherwise support the transmission of mass unsolicited, commercial advertising or solicitations via e-mail, telephone, or facsimile; or (2) enable high volume, automated, electronic processes that apply to VeriSign (or its computer systems). The compilation, repackaging, dissemination or other use of this Data is expressly prohibited without the prior written consent of VeriSign. You agree not to use electronic processes that are automated and high-volume to access or query the Whois database except as reasonably necessary to register domain names or modify existing registrations. VeriSign reserves the right to restrict your access to the Whois database in its sole discretion to ensure operational stability. VeriSign may restrict or terminate your access to the Whois database for failure to abide by these terms of use. VeriSign reserves the right to modify these terms at any time. The Registry database contains ONLY .COM, .NET, .EDU domains and Registrars. Legal Stuff: The information in DreamHost's whois database is to be used for informational purposes only, and to obtain information on a domain name registration. DreamHost does not guarantee its accuracy. You are not authorized to query or access DreamHost's whois database using high-volume, automated means without written permission from DreamHost. You are not authorized to query or access DreamHost's whois database in order to facilitate illegal activities, or to facilitate the use of unsolicited bulk email, telephone, or facsimile communications. You are not authorized to collect, repackage, or redistribute the information in DreamHost's whois database. DreamHost may, at its sole discretion, restrict your access to the whois database at any time, with or without notice. DreamHost may modify these Terms of Service at any time, with or without notice. +++++++++++++++++++++++++++++++++++++++++++ Domain Name: urlowl.com Registrant Contact: urlowl.com Private Registrant urlowl.com@proxy.dreamhost.com A Happy DreamHost Customer 417 Associated Rd #324 Brea, CA 92821 US +1.2139471032 Administrative Contact: urlowl.com Private Registrant urlowl.com@proxy.dreamhost.com A Happy DreamHost Customer 417 Associated Rd #324 Brea, CA 92821 US +1.2139471032 Technical Contact: urlowl.com Private Registrant urlowl.com@proxy.dreamhost.com A Happy DreamHost Customer 417 Associated Rd #324 Brea, CA 92821 US +1.2139471032 Billing Contact: urlowl.com Private Registrant urlowl.com@proxy.dreamhost.com A Happy DreamHost Customer 417 Associated Rd #324 Brea, CA 92821 US +1.2139471032 Record created on 2008-04-14 14:34:20. Record expires on 2009-04-14 14:34:20. Domain servers in listed order: ns1.linode.com ns2.linode.com DreamHost whois server terms of service: http://whois.dreamhost.com/terms.html ================================================ FILE: lib/pywhois/test/test_main.py ================================================ # coding=utf-8 from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() from builtins import * import unittest from whois import extract_domain class TestExtractDomain(unittest.TestCase): def test_simple_ascii_domain(self): url = 'google.com' domain = url self.assertEqual(domain, extract_domain(url)) def test_ascii_with_schema_path_and_query(self): url = 'https://www.google.com/search?q=why+is+domain+whois+such+a+mess' domain = 'www.google.com' self.assertEqual(domain, extract_domain(url)) def test_simple_unicode_domain(self): url = 'http://нарояци.com/' domain = 'нарояци.com' self.assertEqual(domain, extract_domain(url)) def test_unicode_domain_and_tld(self): url = 'http://россия.рф/' domain = 'россия.рф' self.assertEqual(domain, extract_domain(url)) ================================================ FILE: lib/pywhois/test/test_nicclient.py ================================================ # coding=utf-8 from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() from builtins import * import unittest from whois.whois import NICClient class TestNICClient(unittest.TestCase): def setUp(self): self.client = NICClient() def test_choose_server(self): domain = 'рнидс.срб' chosen = self.client.choose_server(domain) suffix = domain.split('.')[-1].encode('idna').decode('utf-8') correct = '{}.whois-servers.net'.format(suffix) self.assertEqual(chosen, correct) ================================================ FILE: lib/pywhois/test/test_parser.py ================================================ from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() from builtins import * import unittest import os import sys sys.path.append('../') import datetime import simplejson from glob import glob from whois.parser import WhoisEntry, cast_date class TestParser(unittest.TestCase): def test_com_expiration(self): data = """ Status: ok Updated Date: 14-apr-2008 Creation Date: 14-apr-2008 Expiration Date: 14-apr-2009 >>> Last update of whois database: Sun, 31 Aug 2008 00:18:23 UTC <<< """ w = WhoisEntry.load('urlowl.com', data) expires = w.expiration_date.strftime('%Y-%m-%d') self.assertEqual(expires, '2009-04-14') def test_cast_date(self): dates = ['14-apr-2008', '2008-04-14'] for d in dates: r = cast_date(d).strftime('%Y-%m-%d') self.assertEqual(r, '2008-04-14') def test_com_allsamples(self): """ Iterate over all of the sample/whois/*.com files, read the data, parse it, and compare to the expected values in sample/expected/. Only keys defined in keys_to_test will be tested. To generate fresh expected value dumps, see NOTE below. """ keys_to_test = ['domain_name', 'expiration_date', 'updated_date', 'creation_date', 'status'] fail = 0 total = 0 for path in glob('test/samples/whois/*.com'): # Parse whois data domain = os.path.basename(path) with open(path) as whois_fp: data = whois_fp.read() w = WhoisEntry.load(domain, data) results = {key: w.get(key) for key in keys_to_test} # NOTE: Toggle condition below to write expected results from the # parse results This will overwrite the existing expected results. # Only do this if you've manually confirmed that the parser is # generating correct values at its current state. if False: def date2str4json(obj): if isinstance(obj, datetime.datetime): return str(obj) raise TypeError( '{} is not JSON serializable'.format(repr(obj))) outfile_name = os.path.join('test/samples/expected/', domain) with open(outfile_name, 'w') as outfil: expected_results = simplejson.dump(results, outfil, default=date2str4json) continue # Load expected result with open(os.path.join('test/samples/expected/', domain)) as infil: expected_results = simplejson.load(infil) # Compare each key for key in results: total += 1 result = results.get(key) if isinstance(result, datetime.datetime): result = str(result) expected = expected_results.get(key) if expected != result: print("%s \t(%s):\t %s != %s" % (domain, key, result, expected)) fail += 1 if fail: self.fail("%d/%d sample whois attributes were not parsed properly!" % (fail, total)) def test_ca_parse(self): data = """ Domain name: testdomain.ca Domain status: registered Creation date: 2000/11/20 Expiry date: 2020/03/08 Updated date: 2016/04/29 DNSSEC: Unsigned Registrar: Name: Webnames.ca Inc. Number: 70 Registrant: Name: Test Industries Administrative contact: Name: Test Person1 Postal address: Test Address Test City, TestVille Phone: +1.1235434123x123 Fax: +1.123434123 Email: testperson1@testcompany.ca Technical contact: Name: Test Persion2 Postal address: Other TestAddress TestTown OCAS Canada Phone: +1.09876545123 Fax: +1.12312993873 Email: testpersion2@testcompany.ca Name servers: ns1.testserver1.net ns2.testserver2.net """ results = WhoisEntry.load('testcompany.ca', data) expected_results = { "updated_date": "2016-04-29 00:00:00", "registrant_name": [ "Webnames.ca Inc.", "Test Industries", "Test Person1", "Test Persion2" ], "fax": [ "+1.123434123", "+1.12312993873" ], "dnssec": "Unsigned", "registrant_number": "70", "expiration_date": "2020-03-08 00:00:00", "domain_name": "testdomain.ca", "creation_date": "2000-11-20 00:00:00", "phone": [ "+1.1235434123x123", "+1.09876545123" ], "domain_status": "registered", "emails": [ "testperson1@testcompany.ca", "testpersion2@testcompany.ca" ] } fail = 0 total = 0 # Compare each key for key in expected_results: total += 1 result = results.get(key) if isinstance(result, datetime.datetime): result = str(result) expected = expected_results.get(key) if expected != result: print("%s \t(%s):\t %s != %s" % (domain, key, result, expected)) fail += 1 if fail: self.fail("%d/%d sample whois attributes were not parsed properly!" % (fail, total)) if __name__ == '__main__': unittest.main() ================================================ FILE: lib/pywhois/test/test_query.py ================================================ # coding=utf-8 from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() from builtins import * import unittest from whois import whois class TestQuery(unittest.TestCase): def test_simple_ascii_domain(self): domain = 'google.com' whois(domain) def test_simple_unicode_domain(self): domain = 'нарояци.com' whois(domain) def test_unicode_domain_and_tld(self): domain = 'россия.рф' whois(domain) ================================================ FILE: lib/pywhois/whois/__init__.py ================================================ from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals from __future__ import division from future import standard_library standard_library.install_aliases() from builtins import * import re import sys import os import subprocess import socket from .parser import WhoisEntry from .whois import NICClient def whois(url, command=False): # clean domain to expose netloc ip_match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", url) if ip_match: domain = url try: result = socket.gethostbyaddr(url) except socket.herror as e: pass else: domain = result[0] else: domain = extract_domain(url) if command: # try native whois command r = subprocess.Popen(['whois', domain], stdout=subprocess.PIPE) text = r.stdout.read() else: # try builtin client nic_client = NICClient() text = nic_client.whois_lookup(None, domain, 0) return WhoisEntry.load(domain, text) def extract_domain(url): """Extract the domain from the given URL >>> extract_domain('http://www.google.com.au/tos.html') 'google.com.au' >>> extract_domain('www.webscraping.com') 'webscraping.com' >>> extract_domain('198.252.206.140') 'stackoverflow.com' >>> extract_domain('102.112.2O7.net') '2o7.net' >>> extract_domain('1-0-1-1-1-0-1-1-1-1-1-1-1-.0-0-0-0-0-0-0-0-0-0-0-0-0-10-0-0-0-0-0-0-0-0-0-0-0-0-0.info') '0-0-0-0-0-0-0-0-0-0-0-0-0-10-0-0-0-0-0-0-0-0-0-0-0-0-0.info' """ if re.match(r'\d+\.\d+\.\d+\.\d+', url): # this is an IP address return socket.gethostbyaddr(url)[0] tlds_path = os.path.join(os.getcwd(), os.path.dirname(__file__), 'data', 'tlds.txt') with open(tlds_path) as tlds_fil: suffixes = [line.lower().encode('utf-8') for line in (x.strip() for x in tlds_fil) if not line.startswith('#')] suff = 'xn--p1ai' if not isinstance(url, str): url = url.decode('utf-8') url = re.sub('^.*://', '', url) url = url.split('/')[0].lower().encode('idna') domain = [] for section in url.split(b'.'): if section in suffixes: domain.append(section) else: domain = [section] return b'.'.join(domain).decode('idna') if __name__ == '__main__': try: url = sys.argv[1] except IndexError: print('Usage: %s url' % sys.argv[0]) else: print(whois(url)) ================================================ FILE: lib/pywhois/whois/data/tlds.txt ================================================ # Version 2016011500, Last Updated Fri Jan 15 07:07:01 2016 UTC AAA AARP ABB ABBOTT ABOGADO AC ACADEMY ACCENTURE ACCOUNTANT ACCOUNTANTS ACO ACTIVE ACTOR AD ADS ADULT AE AEG AERO AF AFL AG AGENCY AI AIG AIRFORCE AIRTEL AL ALLFINANZ ALSACE AM AMICA AMSTERDAM ANALYTICS ANDROID AO APARTMENTS APP APPLE AQ AQUARELLE AR ARAMCO ARCHI ARMY ARPA ARTE AS ASIA ASSOCIATES AT ATTORNEY AU AUCTION AUDI AUDIO AUTHOR AUTO AUTOS AW AX AXA AZ AZURE BA BAIDU BAND BANK BAR BARCELONA BARCLAYCARD BARCLAYS BARGAINS BAUHAUS BAYERN BB BBC BBVA BCN BD BE BEATS BEER BENTLEY BERLIN BEST BET BF BG BH BHARTI BI BIBLE BID BIKE BING BINGO BIO BIZ BJ BLACK BLACKFRIDAY BLOOMBERG BLUE BM BMS BMW BN BNL BNPPARIBAS BO BOATS BOEHRINGER BOM BOND BOO BOOK BOOTS BOSCH BOSTIK BOT BOUTIQUE BR BRADESCO BRIDGESTONE BROADWAY BROKER BROTHER BRUSSELS BS BT BUDAPEST BUGATTI BUILD BUILDERS BUSINESS BUY BUZZ BV BW BY BZ BZH CA CAB CAFE CAL CALL CAMERA CAMP CANCERRESEARCH CANON CAPETOWN CAPITAL CAR CARAVAN CARDS CARE CAREER CAREERS CARS CARTIER CASA CASH CASINO CAT CATERING CBA CBN CC CD CEB CENTER CEO CERN CF CFA CFD CG CH CHANEL CHANNEL CHAT CHEAP CHLOE CHRISTMAS CHROME CHURCH CI CIPRIANI CIRCLE CISCO CITIC CITY CITYEATS CK CL CLAIMS CLEANING CLICK CLINIC CLINIQUE CLOTHING CLOUD CLUB CLUBMED CM CN CO COACH CODES COFFEE COLLEGE COLOGNE COM COMMBANK COMMUNITY COMPANY COMPARE COMPUTER COMSEC CONDOS CONSTRUCTION CONSULTING CONTACT CONTRACTORS COOKING COOL COOP CORSICA COUNTRY COUPONS COURSES CR CREDIT CREDITCARD CREDITUNION CRICKET CROWN CRS CRUISES CSC CU CUISINELLA CV CW CX CY CYMRU CYOU CZ DABUR DAD DANCE DATE DATING DATSUN DAY DCLK DE DEALER DEALS DEGREE DELIVERY DELL DELTA DEMOCRAT DENTAL DENTIST DESI DESIGN DEV DIAMONDS DIET DIGITAL DIRECT DIRECTORY DISCOUNT DJ DK DM DNP DO DOCS DOG DOHA DOMAINS DOOSAN DOWNLOAD DRIVE DUBAI DURBAN DVAG DZ EARTH EAT EC EDU EDUCATION EE EG EMAIL EMERCK ENERGY ENGINEER ENGINEERING ENTERPRISES EPSON EQUIPMENT ER ERNI ES ESQ ESTATE ET EU EUROVISION EUS EVENTS EVERBANK EXCHANGE EXPERT EXPOSED EXPRESS FAGE FAIL FAIRWINDS FAITH FAMILY FAN FANS FARM FASHION FAST FEEDBACK FERRERO FI FILM FINAL FINANCE FINANCIAL FIRESTONE FIRMDALE FISH FISHING FIT FITNESS FJ FK FLIGHTS FLORIST FLOWERS FLSMIDTH FLY FM FO FOO FOOTBALL FORD FOREX FORSALE FORUM FOUNDATION FOX FR FRESENIUS FRL FROGANS FUND FURNITURE FUTBOL FYI GA GAL GALLERY GAME GARDEN GB GBIZ GD GDN GE GEA GENT GENTING GF GG GGEE GH GI GIFT GIFTS GIVES GIVING GL GLASS GLE GLOBAL GLOBO GM GMAIL GMO GMX GN GOLD GOLDPOINT GOLF GOO GOOG GOOGLE GOP GOT GOV GP GQ GR GRAINGER GRAPHICS GRATIS GREEN GRIPE GROUP GS GT GU GUCCI GUGE GUIDE GUITARS GURU GW GY HAMBURG HANGOUT HAUS HEALTHCARE HELP HERE HERMES HIPHOP HITACHI HIV HK HM HN HOCKEY HOLDINGS HOLIDAY HOMEDEPOT HOMES HONDA HORSE HOST HOSTING HOTELES HOTMAIL HOUSE HOW HR HSBC HT HU HYUNDAI IBM ICBC ICE ICU ID IE IFM IINET IL IM IMMO IMMOBILIEN IN INDUSTRIES INFINITI INFO ING INK INSTITUTE INSURANCE INSURE INT INTERNATIONAL INVESTMENTS IO IPIRANGA IQ IR IRISH IS ISELECT IST ISTANBUL IT ITAU IWC JAGUAR JAVA JCB JE JETZT JEWELRY JLC JLL JM JMP JO JOBS JOBURG JOT JOY JP JPRS JUEGOS KAUFEN KDDI KE KFH KG KH KI KIA KIM KINDER KITCHEN KIWI KM KN KOELN KOMATSU KP KPN KR KRD KRED KW KY KYOTO KZ LA LACAIXA LAMBORGHINI LAMER LANCASTER LAND LANDROVER LASALLE LAT LATROBE LAW LAWYER LB LC LDS LEASE LECLERC LEGAL LEXUS LGBT LI LIAISON LIDL LIFE LIFESTYLE LIGHTING LIKE LIMITED LIMO LINCOLN LINDE LINK LIVE LIVING LIXIL LK LOAN LOANS LOL LONDON LOTTE LOTTO LOVE LR LS LT LTD LTDA LU LUPIN LUXE LUXURY LV LY MA MADRID MAIF MAISON MAKEUP MAN MANAGEMENT MANGO MARKET MARKETING MARKETS MARRIOTT MBA MC MD ME MED MEDIA MEET MELBOURNE MEME MEMORIAL MEN MENU MEO MG MH MIAMI MICROSOFT MIL MINI MK ML MM MMA MN MO MOBI MOBILY MODA MOE MOI MOM MONASH MONEY MONTBLANC MORMON MORTGAGE MOSCOW MOTORCYCLES MOV MOVIE MOVISTAR MP MQ MR MS MT MTN MTPC MTR MU MUSEUM MUTUELLE MV MW MX MY MZ NA NADEX NAGOYA NAME NAVY NC NE NEC NET NETBANK NETWORK NEUSTAR NEW NEWS NEXUS NF NG NGO NHK NI NICO NINJA NISSAN NL NO NOKIA NORTON NOWRUZ NP NR NRA NRW NTT NU NYC NZ OBI OFFICE OKINAWA OM OMEGA ONE ONG ONL ONLINE OOO ORACLE ORANGE OR ORG ORGANIC ORIGINS OSAKA OTSUKA OVH PA PAGE PANERAI PARIS PARS PARTNERS PARTS PARTY PE PET PF PG PH PHARMACY PHILIPS PHOTO PHOTOGRAPHY PHOTOS PHYSIO PIAGET PICS PICTET PICTURES PID PIN PING PINK PIZZA PK PL PLACE PLAY PLAYSTATION PLUMBING PLUS PM PN POHL POKER PORN POST PR PRAXI PRESS PRO PROD PRODUCTIONS PROF PROMO PROPERTIES PROPERTY PROTECTION PS PT PUB PW PY QA QPON QUEBEC RACING RE READ REALTOR REALTY RECIPES RED REDSTONE REDUMBRELLA REHAB REISE REISEN REIT REN RENT RENTALS REPAIR REPORT REPUBLICAN REST RESTAURANT REVIEW REVIEWS REXROTH RICH RICOH RIO RIP RO ROCHER ROCKS RODEO ROOM RS RSVP RU RUHR RUN RW RWE RYUKYU SA SAARLAND SAFE SAFETY SAKURA SALE SALON SAMSUNG SANDVIK SANDVIKCOROMANT SANOFI SAP SAPO SARL SAS SAXO SB SBS SC SCA SCB SCHAEFFLER SCHMIDT SCHOLARSHIPS SCHOOL SCHULE SCHWARZ SCIENCE SCOR SCOT SD SE SEAT SECURITY SEEK SELECT SENER SERVICES SEVEN SEW SEX SEXY SFR SG SH SHARP SHELL SHIA SHIKSHA SHOES SHOW SHRIRAM SI SINGLES SITE SJ SK SKI SKIN SKY SKYPE SL SM SMILE SN SNCF SO SOCCER SOCIAL SOFTWARE SOHU SOLAR SOLUTIONS SONY SOY SPACE SPIEGEL SPREADBETTING SR SRL ST STADA STAR STARHUB STATEFARM STATOIL STC STCGROUP STOCKHOLM STORAGE STUDIO STUDY STYLE SU SUCKS SUPPLIES SUPPLY SUPPORT SURF SURGERY SUZUKI SV SWATCH SWISS SX SY SYDNEY SYMANTEC SYSTEMS SZ TAB TAIPEI TATAMOTORS TATAR TATTOO TAX TAXI TC TCI TD TEAM TECH TECHNOLOGY TEL TELEFONICA TEMASEK TENNIS TF TG TH THD THEATER THEATRE TICKETS TIENDA TIPS TIRES TIROL TJ TK TL TM TN TO TODAY TOKYO TOOLS TOP TORAY TOSHIBA TOURS TOWN TOYOTA TOYS TR TRADE TRADING TRAINING TRAVEL TRAVELERS TRAVELERSINSURANCE TRUST TRV TT TUBE TUI TUSHU TV TW TZ UA UBS UG UK UNIVERSITY UNO UOL US UY UZ VA VACATIONS VANA VC VE VEGAS VENTURES VERISIGN VERSICHERUNG VET VG VI VIAJES VIDEO VILLAS VIN VIP VIRGIN VISION VISTA VISTAPRINT VIVA VLAANDEREN VN VODKA VOLKSWAGEN VOTE VOTING VOTO VOYAGE VU WALES WALTER WANG WANGGOU WATCH WATCHES WEATHER WEBCAM WEBER WEBSITE WED WEDDING WEIR WF WHOSWHO WIEN WIKI WILLIAMHILL WIN WINDOWS WINE WME WORK WORKS WORLD WS WTC WTF XBOX XEROX XIN XN--11B4C3D XN--1QQW23A XN--30RR7Y XN--3BST00M XN--3DS443G XN--3E0B707E XN--3PXU8K XN--42C2D9A XN--45BRJ9C XN--45Q11C XN--4GBRIM XN--55QW42G XN--55QX5D XN--6FRZ82G XN--6QQ986B3XL XN--80ADXHKS XN--80AO21A XN--80ASEHDB XN--80ASWG XN--90A3AC XN--90AIS XN--9DBQ2A XN--9ET52U XN--B4W605FERD XN--C1AVG XN--C2BR7G XN--CG4BKI XN--CLCHC0EA0B2G2A9GCD XN--CZR694B XN--CZRS0T XN--CZRU2D XN--D1ACJ3B XN--D1ALF XN--ECKVDTC9D XN--EFVY88H XN--ESTV75G XN--FHBEI XN--FIQ228C5HS XN--FIQ64B XN--FIQS8S XN--FIQZ9S XN--FJQ720A XN--FLW351E XN--FPCRJ9C3D XN--FZC2C9E2C XN--GECRJ9C XN--H2BRJ9C XN--HXT814E XN--I1B6B1A6A2E XN--IMR513N XN--IO0A7I XN--J1AEF XN--J1AMH XN--J6W193G XN--JLQ61U9W7B XN--KCRX77D1X4A XN--KPRW13D XN--KPRY57D XN--KPU716F XN--KPUT3I XN--L1ACC XN--LGBBAT1AD8J XN--MGB9AWBF XN--MGBA3A3EJT XN--MGBA3A4F16A XN--MGBAAM7A8H XN--MGBAB2BD XN--MGBAYH7GPA XN--MGBB9FBPOB XN--MGBBH1A71E XN--MGBC0A9AZCG XN--MGBERP4A5D4AR XN--MGBPL2FH XN--MGBT3DHD XN--MGBTX2B XN--MGBX4CD0AB XN--MK1BU44C XN--MXTQ1M XN--NGBC5AZD XN--NGBE9E0A XN--NODE XN--NQV7F XN--NQV7FS00EMA XN--NYQY26A XN--O3CW4H XN--OGBPF8FL XN--P1ACF XN--P1AI XN--PBT977C XN--PGBS0DH XN--PSSY2U XN--Q9JYB4C XN--QCKA1PMC XN--QXAM XN--RHQV96G XN--S9BRJ9C XN--SES554G XN--T60B56A XN--TCKWE XN--UNUP4Y XN--VERMGENSBERATER-CTB XN--VERMGENSBERATUNG-PWB XN--VHQUV XN--VUQ861B XN--WGBH1C XN--WGBL6A XN--XHQ521B XN--XKC2AL3HYE2A XN--XKC2DL3A5EE0H XN--Y9A3AQ XN--YFRO4I67O XN--YGBI2AMMX XN--ZFR164B XPERIA XXX XYZ YACHTS YAMAXUN YANDEX YE YODOBASHI YOGA YOKOHAMA YOUTUBE YT ZA ZARA ZERO ZIP ZM ZONE ZUERICH ZW ================================================ FILE: lib/pywhois/whois/parser.py ================================================ # coding=utf-8 # parser.py - Module for parsing whois response data # Copyright (c) 2008 Andrey Petrov # # This module is part of pywhois and is released under # the MIT license: http://www.opensource.org/licenses/mit-license.php from __future__ import absolute_import from __future__ import unicode_literals from __future__ import print_function from __future__ import division from future import standard_library standard_library.install_aliases() from builtins import * from builtins import str from past.builtins import basestring import json from datetime import datetime import re try: import dateutil.parser as dp from .time_zones import tz_data DATEUTIL = True except ImportError: DATEUTIL = False EMAIL_REGEX = "[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?" KNOWN_FORMATS = [ '%d-%b-%Y', # 02-jan-2000 '%Y-%m-%d', # 2000-01-02 '%d.%m.%Y', # 2.1.2000 '%Y.%m.%d', # 2000.01.02 '%Y/%m/%d', # 2000/01/02 '%d/%m/%Y', # 02/01/2013 '%Y. %m. %d.', # 2000. 01. 02. '%Y.%m.%d %H:%M:%S', # 2014.03.08 10:28:24 '%d-%b-%Y %H:%M:%S %Z', # 24-Jul-2009 13:20:03 UTC '%a %b %d %H:%M:%S %Z %Y', # Tue Jun 21 23:59:59 GMT 2011 '%Y-%m-%dT%H:%M:%SZ', # 2007-01-26T19:10:31Z '%Y-%m-%dT%H:%M:%S%z', # 2013-12-06T08:17:22-0800 '%Y-%m-%d %H:%M:%SZ', # 2000-08-22 18:55:20Z '%Y-%m-%d %H:%M:%S', # 2000-08-22 18:55:20 '%d %b %Y %H:%M:%S', # 08 Apr 2013 05:44:00 '%d/%m/%Y %H:%M:%S', # 23/04/2015 12:00:07 EEST '%d/%m/%Y %H:%M:%S %Z', # 23/04/2015 12:00:07 EEST '%d/%m/%Y %H:%M:%S.%f %Z', # 23/04/2015 12:00:07.619546 EEST ] class PywhoisError(Exception): pass def datetime_parse(s): for known_format in KNOWN_FORMATS: try: s = datetime.strptime(s, known_format) break except ValueError as e: pass # Wrong format, keep trying return s def cast_date(s, dayfirst=False, yearfirst=False): """Convert any date string found in WHOIS to a datetime object. """ if DATEUTIL: try: return dp.parse( s, tzinfos=tz_data, dayfirst=dayfirst, yearfirst=yearfirst ).replace(tzinfo=None) except Exception: return datetime_parse(s) else: return datetime_parse(s) class WhoisEntry(dict): """Base class for parsing a Whois entries. """ # regular expressions to extract domain data from whois profile # child classes will override this _regex = { 'domain_name': 'Domain Name: *(.+)', 'registrar': 'Registrar: *(.+)', 'whois_server': 'Whois Server: *(.+)', 'referral_url': 'Referral URL: *(.+)', # http url of whois_server 'updated_date': 'Updated Date: *(.+)', 'creation_date': 'Creation Date: *(.+)', 'expiration_date': 'Expir\w+ Date: *(.+)', 'name_servers': 'Name Server: *(.+)', # list of name servers 'status': 'Status: *(.+)', # list of statuses 'emails': EMAIL_REGEX, # list of email s 'dnssec': 'dnssec: *([\S]+)', 'name': 'Registrant Name: *(.+)', 'org': 'Registrant\s*Organization: *(.+)', 'address': 'Registrant Street: *(.+)', 'city': 'Registrant City: *(.+)', 'state': 'Registrant State/Province: *(.+)', 'zipcode': 'Registrant Postal Code: *(.+)', 'country': 'Registrant Country: *(.+)', } dayfirst = False yearfirst = False def __init__(self, domain, text, regex=None): if 'This TLD has no whois server, but you can access the whois database at' in text: raise PywhoisError(text) else: self.domain = domain self.text = text if regex is not None: self._regex = regex self.parse() def parse(self): """The first time an attribute is called it will be calculated here. The attribute is then set to be accessed directly by subsequent calls. """ for attr, regex in list(self._regex.items()): if regex: values = [] for value in re.findall(regex, self.text, re.IGNORECASE): value = value.strip() if value and isinstance(value, basestring) and not value.isdigit(): # try casting to date format value = cast_date( value, dayfirst=self.dayfirst, yearfirst=self.yearfirst) if value and value not in values: # avoid duplicates values.append(value) if values and attr in ('registrar', 'whois_server', 'referral_url'): values = values[-1] # ignore junk if len(values) == 1: values = values[0] elif not values: values = None self[attr] = values def __setitem__(self, name, value): super(WhoisEntry, self).__setitem__(name, value) def __getattr__(self, name): return self.get(name) def __str__(self): handler = lambda e: str(e) return json.dumps(self, indent=2, default=handler) def __getstate__(self): return self.__dict__ def __setstate__(self, state): self.__dict__ = state @staticmethod def load(domain, text): """Given whois output in ``text``, return an instance of ``WhoisEntry`` that represents its parsed contents. """ if text.strip() == 'No whois server is known for this kind of object.': raise PywhoisError(text) if domain.endswith('.com'): return WhoisCom(domain, text) elif domain.endswith('.net'): return WhoisNet(domain, text) elif domain.endswith('.org'): return WhoisOrg(domain, text) elif domain.endswith('.name'): return WhoisName(domain, text) elif domain.endswith('.me'): return WhoisMe(domain, text) elif domain.endswith('.au'): return WhoisAU(domain, text) elif domain.endswith('.ru'): return WhoisRu(domain, text) elif domain.endswith('.us'): return WhoisUs(domain, text) elif domain.endswith('.uk'): return WhoisUk(domain, text) elif domain.endswith('.fr'): return WhoisFr(domain, text) elif domain.endswith('.nl'): return WhoisNl(domain, text) elif domain.endswith('.fi'): return WhoisFi(domain, text) elif domain.endswith('.jp'): return WhoisJp(domain, text) elif domain.endswith('.pl'): return WhoisPl(domain, text) elif domain.endswith('.br'): return WhoisBr(domain, text) elif domain.endswith('.eu'): return WhoisEu(domain, text) elif domain.endswith('.ee'): return WhoisEe(domain, text) elif domain.endswith('.kr'): return WhoisKr(domain, text) elif domain.endswith('.pt'): return WhoisPt(domain, text) elif domain.endswith('.bg'): return WhoisBg(domain, text) elif domain.endswith('.de'): return WhoisDe(domain, text) elif domain.endswith('.at'): return WhoisAt(domain, text) elif domain.endswith('.ca'): return WhoisCa(domain, text) elif domain.endswith('.be'): return WhoisBe(domain, text) elif domain.endswith('.рф'): return WhoisRf(domain, text) elif domain.endswith('.info'): return WhoisInfo(domain, text) elif domain.endswith('.su'): return WhoisSu(domain, text) elif domain.endswith('.kg'): return WhoisKg(domain, text) elif domain.endswith('.io'): return WhoisIo(domain, text) elif domain.endswith('.biz'): return WhoisBiz(domain, text) elif domain.endswith('.mobi'): return WhoisMobi(domain, text) elif domain.endswith('.ch'): return WhoisChLi(domain, text) elif domain.endswith('.li'): return WhoisChLi(domain, text) elif domain.endswith('.id'): return WhoisID(domain, text) else: return WhoisEntry(domain, text) class WhoisCom(WhoisEntry): """Whois parser for .com domains """ def __init__(self, domain, text): if 'No match for "' in text: raise PywhoisError(text) else: WhoisEntry.__init__(self, domain, text) class WhoisNet(WhoisEntry): """Whois parser for .net domains """ def __init__(self, domain, text): if 'No match for "' in text: raise PywhoisError(text) else: WhoisEntry.__init__(self, domain, text) class WhoisOrg(WhoisEntry): """Whois parser for .org domains """ regex = { 'domain_name': 'Domain Name: *(.+)', 'registrar': 'Registrar: *(.+)', 'whois_server': 'Whois Server: *(.+)', # empty usually 'referral_url': 'Referral URL: *(.+)', # http url of whois_server: empty usually 'updated_date': 'Updated Date: *(.+)', 'creation_date': 'Creation Date: *(.+)', 'expiration_date': 'Registry Expiry Date: *(.+)', 'name_servers': 'Name Server: *(.+)', # list of name servers 'status': 'Status: *(.+)', # list of statuses 'emails': EMAIL_REGEX, # list of email addresses } def __init__(self, domain, text): if text.strip() == 'NOT FOUND': raise PywhoisError(text) else: WhoisEntry.__init__(self, domain, text) class WhoisRu(WhoisEntry): """Whois parser for .ru domains """ regex = { 'domain_name': 'domain: *(.+)', 'registrar': 'registrar: *(.+)', 'creation_date': 'created: *(.+)', 'expiration_date': 'paid-till: *(.+)', 'name_servers': 'nserver: *(.+)', # list of name servers 'status': 'state: *(.+)', # list of statuses 'emails': EMAIL_REGEX, # list of email addresses 'org': 'org: *(.+)' } def __init__(self, domain, text): if text.strip() == 'No entries found': raise PywhoisError(text) else: WhoisEntry.__init__(self, domain, text, self.regex) class WhoisNl(WhoisEntry): """Whois parser for .nl domains """ regex = { 'name': None, 'address': None, 'zip_code': None, 'city': None, 'country': None } def __init__(self, domain, text): if text.endswith('is free'): raise PywhoisError(text) else: WhoisEntry.__init__(self, domain, text, self.regex) match = re.compile('Registrar:(.*?)DNSSEC', re.DOTALL).search(text) if match: lines = [line.strip() for line in match.groups()[0].strip().splitlines()] self['name'] = lines[0] self['address'] = lines[1] if len(lines) == 4: self['zip_code'], _, self['city'] = lines[2].partition(' ') self['country'] = lines[-1] class WhoisName(WhoisEntry): """Whois parser for .name domains """ regex = { 'domain_name_id': 'Domain Name ID: *(.+)', 'domain_name': 'Domain Name: *(.+)', 'registrar_id': 'Sponsoring Registrar ID: *(.+)', 'registrar': 'Sponsoring Registrar: *(.+)', 'registrant_id': 'Registrant ID: *(.+)', 'admin_id': 'Admin ID: *(.+)', 'technical_id': 'Tech ID: *(.+)', 'billing_id': 'Billing ID: *(.+)', 'creation_date': 'Created On: *(.+)', 'expiration_date': 'Expires On: *(.+)', 'updated_date': 'Updated On: *(.+)', 'name_server_ids': 'Name Server ID: *(.+)', # list of name server ids 'name_servers': 'Name Server: *(.+)', # list of name servers 'status': 'Domain Status: *(.+)', # list of statuses } def __init__(self, domain, text): if 'No match for ' in text: raise PywhoisError(text) else: WhoisEntry.__init__(self, domain, text, self.regex) class WhoisUs(WhoisEntry): """Whois parser for .us domains """ regex = { 'domain_name': 'Domain Name: *(.+)', 'domain__id': 'Domain ID: *(.+)', 'registrar': 'Sponsoring Registrar: *(.+)', 'registrar_id': 'Sponsoring Registrar IANA ID: *(.+)', 'registrar_url': 'Registrar URL \(registration services\): *(.+)', 'status': 'Domain Status: *(.+)', # list of statuses 'registrant_id': 'Registrant ID: *(.+)', 'registrant_name': 'Registrant Name: *(.+)', 'registrant_address1': 'Registrant Address1: *(.+)', 'registrant_address2': 'Registrant Address2: *(.+)', 'registrant_city': 'Registrant City: *(.+)', 'registrant_state_province': 'Registrant State/Province: *(.+)', 'registrant_postal_code': 'Registrant Postal Code: *(.+)', 'registrant_country': 'Registrant Country: *(.+)', 'registrant_country_code': 'Registrant Country Code: *(.+)', 'registrant_phone_number': 'Registrant Phone Number: *(.+)', 'registrant_email': 'Registrant Email: *(.+)', 'registrant_application_purpose': 'Registrant Application Purpose: *(.+)', 'registrant_nexus_category': 'Registrant Nexus Category: *(.+)', 'admin_id': 'Administrative Contact ID: *(.+)', 'admin_name': 'Administrative Contact Name: *(.+)', 'admin_address1': 'Administrative Contact Address1: *(.+)', 'admin_address2': 'Administrative Contact Address2: *(.+)', 'admin_city': 'Administrative Contact City: *(.+)', 'admin_state_province': 'Administrative Contact State/Province: *(.+)', 'admin_postal_code': 'Administrative Contact Postal Code: *(.+)', 'admin_country': 'Administrative Contact Country: *(.+)', 'admin_country_code': 'Administrative Contact Country Code: *(.+)', 'admin_phone_number': 'Administrative Contact Phone Number: *(.+)', 'admin_email': 'Administrative Contact Email: *(.+)', 'admin_application_purpose': 'Administrative Application Purpose: *(.+)', 'admin_nexus_category': 'Administrative Nexus Category: *(.+)', 'billing_id': 'Billing Contact ID: *(.+)', 'billing_name': 'Billing Contact Name: *(.+)', 'billing_address1': 'Billing Contact Address1: *(.+)', 'billing_address2': 'Billing Contact Address2: *(.+)', 'billing_city': 'Billing Contact City: *(.+)', 'billing_state_province': 'Billing Contact State/Province: *(.+)', 'billing_postal_code': 'Billing Contact Postal Code: *(.+)', 'billing_country': 'Billing Contact Country: *(.+)', 'billing_country_code': 'Billing Contact Country Code: *(.+)', 'billing_phone_number': 'Billing Contact Phone Number: *(.+)', 'billing_email': 'Billing Contact Email: *(.+)', 'billing_application_purpose': 'Billing Application Purpose: *(.+)', 'billing_nexus_category': 'Billing Nexus Category: *(.+)', 'tech_id': 'Technical Contact ID: *(.+)', 'tech_name': 'Technical Contact Name: *(.+)', 'tech_address1': 'Technical Contact Address1: *(.+)', 'tech_address2': 'Technical Contact Address2: *(.+)', 'tech_city': 'Technical Contact City: *(.+)', 'tech_state_province': 'Technical Contact State/Province: *(.+)', 'tech_postal_code': 'Technical Contact Postal Code: *(.+)', 'tech_country': 'Technical Contact Country: *(.+)', 'tech_country_code': 'Technical Contact Country Code: *(.+)', 'tech_phone_number': 'Technical Contact Phone Number: *(.+)', 'tech_email': 'Technical Contact Email: *(.+)', 'tech_application_purpose': 'Technical Application Purpose: *(.+)', 'tech_nexus_category': 'Technical Nexus Category: *(.+)', 'name_servers': 'Name Server: *(.+)', # list of name servers 'created_by_registrar': 'Created by Registrar: *(.+)', 'last_updated_by_registrar': 'Last Updated by Registrar: *(.+)', 'creation_date': 'Domain Registration Date: *(.+)', 'expiration_date': 'Domain Expiration Date: *(.+)', 'updated_date': 'Domain Last Updated Date: *(.+)', } def __init__(self, domain, text): if 'Not found:' in text: raise PywhoisError(text) else: WhoisEntry.__init__(self, domain, text, self.regex) class WhoisPl(WhoisEntry): """Whois parser for .pl domains """ regex = { 'domain_name': 'DOMAIN NAME: *(.+)\n', 'registrar': 'REGISTRAR:\n\s*(.+)', 'registrar_url': 'URL: *(.+)', # not available 'status': 'Registration status:\n\s*(.+)', # not available 'registrant_name': 'Registrant:\n\s*(.+)', # not available 'creation_date': 'created: *(.+)\n', 'expiration_date': 'renewal date: *(.+)', 'updated_date': 'last modified: *(.+)\n', } def __init__(self, domain, text): if 'No information available about domain name' in text: raise PywhoisError(text) else: WhoisEntry.__init__(self, domain, text, self.regex) class WhoisCa(WhoisEntry): """Whois parser for .ca domains """ regex = { 'domain_name': 'Domain name: *(.+)', 'registrant_name': '(? 0: nhost = None elif hostname == NICClient.ANICHOST: for nichost in NICClient.ip_whois: if buf.find(nichost) != -1: nhost = nichost break return nhost def whois(self, query, hostname, flags, many_results=False): """Perform initial lookup with TLD whois server then, if the quick flag is false, search that result for the region-specifc whois server and do a lookup there for contact details """ response = b'' try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(10) s.connect((hostname, 43)) try: query = query.decode('utf-8') except UnicodeEncodeError: pass # Already Unicode (python2's error) except AttributeError: pass # Already Unicode (python3's error) if hostname == NICClient.DENICHOST: query_bytes = "-T dn,ace -C UTF-8 " + query elif hostname.endswith(NICClient.QNICHOST_TAIL) and many_results: query_bytes = '=' + query else: query_bytes = query s.send((query_bytes).encode('idna') + b"\r\n") # recv returns bytes while True: d = s.recv(4096) response += d if not d: break s.close() except socket.error as socketerror: print('Socket Error:', socketerror) nhost = None response = response.decode('utf-8', errors='replace') if 'with "=xxx"' in response: return self.whois(query, hostname, flags, True) if flags & NICClient.WHOIS_RECURSE and nhost is None: nhost = self.findwhois_server(response, hostname, query) if nhost is not None: response += self.whois(query, nhost, 0) return response def choose_server(self, domain): """Choose initial lookup NIC host""" try: domain = domain.encode('idna').decode('utf-8') except TypeError: domain = domain.decode('utf-8').encode('idna').decode('utf-8') if domain.endswith("-NORID"): return NICClient.NORIDHOST if domain.endswith("id"): return NICClient.PANDIHOST domain = domain.split('.') if len(domain) < 2: return None tld = domain[-1] if tld[0].isdigit(): return NICClient.ANICHOST return tld + NICClient.QNICHOST_TAIL def whois_lookup(self, options, query_arg, flags): """Main entry point: Perform initial lookup on TLD whois server, or other server to get region-specific whois server, then if quick flag is false, perform a second lookup on the region-specific server for contact records""" nichost = None # whoud happen when this function is called by other than main if options is None: options = {} if ('whoishost' not in options or options['whoishost'] is None) \ and ('country' not in options or options['country'] is None): self.use_qnichost = True options['whoishost'] = NICClient.NICHOST if not (flags & NICClient.WHOIS_QUICK): flags |= NICClient.WHOIS_RECURSE if 'country' in options and options['country'] is not None: result = self.whois( query_arg, options['country'] + NICClient.QNICHOST_TAIL, flags ) elif self.use_qnichost: nichost = self.choose_server(query_arg) if nichost is not None: result = self.whois(query_arg, nichost, flags) else: result = '' else: result = self.whois(query_arg, options['whoishost'], flags) return result def parse_command_line(argv): """Options handling mostly follows the UNIX whois(1) man page, except long-form options can also be used. """ flags = 0 usage = "usage: %prog [options] name" parser = optparse.OptionParser(add_help_option=False, usage=usage) parser.add_option("-a", "--arin", action="store_const", const=NICClient.ANICHOST, dest="whoishost", help="Lookup using host " + NICClient.ANICHOST) parser.add_option("-A", "--apnic", action="store_const", const=NICClient.PNICHOST, dest="whoishost", help="Lookup using host " + NICClient.PNICHOST) parser.add_option("-b", "--abuse", action="store_const", const=NICClient.ABUSEHOST, dest="whoishost", help="Lookup using host " + NICClient.ABUSEHOST) parser.add_option("-c", "--country", action="store", type="string", dest="country", help="Lookup using country-specific NIC") parser.add_option("-d", "--mil", action="store_const", const=NICClient.DNICHOST, dest="whoishost", help="Lookup using host " + NICClient.DNICHOST) parser.add_option("-g", "--gov", action="store_const", const=NICClient.GNICHOST, dest="whoishost", help="Lookup using host " + NICClient.GNICHOST) parser.add_option("-h", "--host", action="store", type="string", dest="whoishost", help="Lookup using specified whois host") parser.add_option("-i", "--nws", action="store_const", const=NICClient.INICHOST, dest="whoishost", help="Lookup using host " + NICClient.INICHOST) parser.add_option("-I", "--iana", action="store_const", const=NICClient.IANAHOST, dest="whoishost", help="Lookup using host " + NICClient.IANAHOST) parser.add_option("-l", "--lcanic", action="store_const", const=NICClient.LNICHOST, dest="whoishost", help="Lookup using host " + NICClient.LNICHOST) parser.add_option("-m", "--ra", action="store_const", const=NICClient.MNICHOST, dest="whoishost", help="Lookup using host " + NICClient.MNICHOST) parser.add_option("-p", "--port", action="store", type="int", dest="port", help="Lookup using specified tcp port") parser.add_option("-Q", "--quick", action="store_true", dest="b_quicklookup", help="Perform quick lookup") parser.add_option("-r", "--ripe", action="store_const", const=NICClient.RNICHOST, dest="whoishost", help="Lookup using host " + NICClient.RNICHOST) parser.add_option("-R", "--ru", action="store_const", const="ru", dest="country", help="Lookup Russian NIC") parser.add_option("-6", "--6bone", action="store_const", const=NICClient.SNICHOST, dest="whoishost", help="Lookup using host " + NICClient.SNICHOST) parser.add_option("-n", "--ina", action="store_const", const=NICClient.PANDIHOST, dest="whoishost", help="Lookup using host " + NICClient.PANDIHOST) parser.add_option("-?", "--help", action="help") return parser.parse_args(argv) if __name__ == "__main__": flags = 0 nic_client = NICClient() options, args = parse_command_line(sys.argv) if options.b_quicklookup: flags = flags | NICClient.WHOIS_QUICK print(nic_client.whois_lookup(options.__dict__, args[1], flags)) ================================================ FILE: plugins/__init__.py ================================================ ================================================ FILE: plugins/about_project.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project class AboutProject(object): def __init__(self): self.__info__ = 'Collecting Public Data & Public Document for OSINT purpose' self.__author__ = 'Petruknisme' self.__version__ = 'v0.2.4' self.__name__= "Belati" self.__giturl__ = "https://github.com/aancw/Belati" self.__authorurl__ = "https://petruknisme.com" if __name__ == '__main__': AboutProjectApp = AboutProject() AboutProjectApp ================================================ FILE: plugins/banner_grab.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import sys from url_request import URLRequest url_req = URLRequest() class BannerGrab(object): def show_banner(self, domain_name, proxy_address): try: data = url_req.header_info(domain_name, proxy_address) return data except: pass if __name__ == '__main__': BannerGrabApp = BannerGrab() BannerGrabApp ================================================ FILE: plugins/check_domain.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import sys from lib.pywhois import whois from url_request import URLRequest url_req = URLRequest() class CheckDomain(object): def domain_checker(self, domain_name, proxy_address): try: data = url_req.just_url_open(domain_name, proxy_address) if data is not "" and data is not "notexist" and not "ERROR" in data: return "OK!" except: return "NOT OK!" def alive_check(self, domain_name, proxy_address): try: data = url_req.just_url_open(domain_name, proxy_address) if data is not "" and data is not "notexist" and not "ERROR" in data: return "OK!" except: return "NOT OK!" def whois_domain(self, domain_name): response = whois.whois(domain_name) return response if __name__ == '__main__': CheckDomainApp = CheckDomain() CheckDomainApp ================================================ FILE: plugins/common_service_check.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import sys, socket, errno from logger import Logger from url_request import URLRequest # Console color G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white url_req = URLRequest() log = Logger() class CommonServiceCheck(object): ## STILL NOT ACCURATE! def check_available_service(self, host): list_available_port = [] list_common_port = [21,22,23,25,53,80,110,111,135,139,143,443,445,993,995,1723,3306,3389,5900,8080] for port in list_common_port: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.connect((host, port)) if port == 80: data = url_req.header_info("http://" + host, "") log.console_log("Found HTPP Service : ({} OPEN)".format(str(port)) ) log.console_log("\n{}".format(data)) elif port == 443: data = url_req.header_info("https://" + host, "") else: print("port :" + str(port) + " OPEN! " + s.recv(4096)) except socket.error as e: if e.errno == errno.ECONNREFUSED or e.errno == 113: pass else: print("port :" + str(port) + str(e) + "closed") s.close() ================================================ FILE: plugins/config.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import sys, os, errno import ConfigParser from logger import Logger from util import Util # Console color G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white log = Logger() config = ConfigParser.ConfigParser() util = Util() class Config(object): def __init__(self): self.config_file = "belati.conf" if os.path.isfile(self.config_file): db_string = self.get_config("Database", "db_location") if db_string == "belati.db": log.console_log("{}[-] Hm... You are using old Belati configuration{}".format(Y, W)) self.init_config_file() else: log.console_log("{}[-] No Configuration file found. Setting up...{}".format(Y, W)) self.init_config_file() def get_config(self, conf_section, conf_key): config.read(self.config_file) value = config.get(conf_section, conf_key) return value def set_config(self, conf_section, conf_key, conf_value): config.read(self.config_file) config.set(conf_section, conf_key, conf_value) with open(self.config_file, "wb") as conf_file: config.write(conf_file) def init_config_file(self): log.console_log("\n{} -----> Initiating Configuration <-----\n{}".format(Y, W)) if config.has_section("Database"): pass else: config.add_section("Database") self.set_config("Database", "db_location", "web/db.sqlite3") log.console_log("\n{} Setting database location to {}\n{}".format(Y,"web/db.sqlite3", W)) if config.has_section("Environment"): pass else: config.add_section("Environment") log.console_log("{} Setting Current Directory to {} {}".format(Y, util.get_current_work_dir(), W)) python_binary = raw_input("\nPlease enter Python v2.x Binary name [python]:") or "python" self.set_config("Environment", "py_bin", python_binary) self.set_config("Environment", "curr_dir", util.get_current_work_dir()) ================================================ FILE: plugins/database.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import sys, os, errno import sqlite3 as db from logger import Logger from config import Config # Console color G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white log = Logger() class Database(object): def __init__(self): conf = Config() self.conn = None db_location = conf.get_config("Database", "db_location") try: self.conn = db.connect(db_location) self.conn.text_factory = str except db.Error, e: print("Error: " + str(e.args[0])) sys.exit() def create_new_project(self, project_domain, project_org, time): cur = self.conn.cursor() cur.execute("INSERT INTO projects(`project_domain`, `project_org`, `started_time`) VALUES (?, ?, ?)", (project_domain, project_org, time)) self.conn.commit() return cur.lastrowid def check_subdomain_exist(self, project_id, subdomain): cur = self.conn.cursor() cur.execute("SELECT id from subdomain_results WHERE project_id = ? AND subdomain = ?",(project_id, subdomain)) data = cur.fetchone() return data def insert_banner(self, domain, project_id, banner_info): cur = self.conn.cursor() subdomain_exist = self.check_subdomain_exist(project_id, domain) if subdomain_exist == None: cur.execute("INSERT INTO subdomain_results(`project_id`, `subdomain`, `banner`) VALUES(?, ?, ?)", (project_id, domain, banner_info)) else: cur.execute("UPDATE subdomain_results SET `banner` = ? WHERE project_id = ? AND subdomain = ? ", (banner_info, project_id, domain)) self.conn.commit() def insert_robots_txt(self, project_id, domain, robots_txt): cur = self.conn.cursor() subdomain_exist = self.check_subdomain_exist(project_id, domain) if subdomain_exist == None: cur.execute("INSERT INTO subdomain_results(`project_id`, `subdomain`, `robots_txt`) VALUES(?, ?, ?)", (project_id, domain, robots_txt)) else: cur.execute("UPDATE subdomain_results SET `robots_txt` = ? WHERE project_id = ? AND subdomain = ? ", (robots_txt, project_id, domain)) self.conn.commit() def insert_wappalyzing(self, project_id, domain, wappalyzing_result): cur = self.conn.cursor() subdomain_exist = self.check_subdomain_exist(project_id, domain) if subdomain_exist == None: cur.execute("INSERT INTO subdomain_results(`project_id`, `subdomain`, `wappalyzer`) VALUES(?, ?, ?)", (project_id, domain, wappalyzing_result)) else: cur.execute("UPDATE subdomain_results SET `wappalyzer` = ? WHERE project_id = ? AND subdomain = ? ", (wappalyzing_result, project_id, domain)) self.conn.commit() def update_subdomain_ip(self, project_id, subdomain, ipaddress): cur = self.conn.cursor() subdomain_exist = self.check_subdomain_exist(project_id, subdomain) if subdomain_exist: cur.execute("UPDATE subdomain_results SET ip_address = ? WHERE project_id = ? AND subdomain = ?", (ipaddress, project_id, subdomain)) self.conn.commit() def update_git_finder(self, project_id, subdomain, status): cur = self.conn.cursor() subdomain_exist = self.check_subdomain_exist(project_id, subdomain) status_fix = "Yes" if status == "Yes" else "No" if subdomain_exist: cur.execute("UPDATE subdomain_results SET is_contain_git = ? WHERE project_id = ? AND subdomain = ?", (status_fix, project_id, subdomain)) self.conn.commit() def update_svn_finder(self, project_id, subdomain, status): cur = self.conn.cursor() subdomain_exist = self.check_subdomain_exist(project_id, subdomain) status_fix = "Yes" if status == "Yes" else "No" if subdomain_exist: cur.execute("UPDATE subdomain_results SET is_contain_svn = ? WHERE project_id = ? AND subdomain = ?", (status_fix, project_id, subdomain)) self.conn.commit() def insert_domain_result(self, project_id, domain, domain_whois, email): cur = self.conn.cursor() cur.execute("INSERT INTO main_domain_results(`project_id`, `domain`, `domain_whois`, `email`) VALUES(?, ?, ?, ?)", (project_id, domain, domain_whois, email)) self.conn.commit() def update_dns_zone(self, project_id, domain, ns_record, mx_record): cur = self.conn.cursor() cur.execute("UPDATE main_domain_results SET NS_record = ?, MX_record = ? WHERE project_id = ? AND domain = ?", (ns_record, mx_record, project_id, domain)) self.conn.commit() def insert_email_result(self, project_id, mail_results): cur = self.conn.cursor() cur.execute("INSERT INTO mail_harvest_results(`project_id`, `mail_results`) VALUES(?, ?)", (project_id, mail_results)) self.conn.commit() def update_pgp_email(self, project_id, mail_pgp_results): cur = self.conn.cursor() cur.execute("UPDATE mail_harvest_results SET mail_pgp_results = ? WHERE project_id = ?", (mail_pgp_results, project_id)) self.conn.commit() def insert_public_doc(self, project_id, doc_ext, doc_url, doc_location, doc_full_location, doc_meta_exif): cur = self.conn.cursor() cur.execute("INSERT INTO doc_results(`project_id`, `doc_ext`, `doc_url`, `doc_location`, `doc_full_location`, `doc_meta_exif` ) VALUES(?, ?, ?, ?, ?, ?)", (project_id, doc_ext, doc_url, doc_location, doc_full_location, doc_meta_exif)) self.conn.commit() def insert_linkedin_company_info(self, project_id, company_name, company_linkedin_url, company_description): cur = self.conn.cursor() cur.execute("INSERT INTO linkedin_company_info(`project_id`, `company_name`, `company_linkedin_url`, `company_description`) VALUES (?, ?, ?, ?)",(project_id, company_name, company_linkedin_url, company_description)) self.conn.commit() return cur.lastrowid def insert_company_employees(self, project_id, name, job_title, linkedin_url ): cur = self.conn.cursor() cur.execute("INSERT INTO linkedin_company_employees(`project_id`, `name`, `job_title`, `linkedin_url`) VALUES(?, ?, ?, ?)", (project_id, name, job_title, linkedin_url)) self.conn.commit() # def read(table, **kwargs): # """ Generates SQL for a SELECT statement matching the kwargs passed. """ # sql = list() # sql.append("SELECT * FROM %s " % table) # if kwargs: # sql.append("WHERE " + " AND ".join("%s = '%s'" % (k, v) for k, v in kwargs.iteritems())) # sql.append(";") # return "".join(sql) # cursor.execute("INSERT INTO table VALUES ?", args) # cursor.execute('INSERT INTO media_files (%s) VALUES (%%s, %%s, %%s, %%s, ...)' % ','.join(fieldlist), valuelist) ================================================ FILE: plugins/dep_check.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import sys, os, operator, pkg_resources from logger import Logger # Console color G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white log = Logger() try: # Check if pip module is installed import pip except ImportError: log.console_log("{}[-] Sorry, please install pip before using Belati : https://pip.pypa.io/en/stable/installing/ {}".format(R, W)) sys.exit(1) try: # Check for older pip version from pip._vendor.packaging.version import Version except ImportError: log.console_log("{}[-] Old pip version detected, please upgrade using: sudo pip install --upgrade pip {}".format(Y, W)) sys.exit(1) class DepCheck(object): def check_dependency(self): list_deps = [] missing_deps = [] with open('requirements.txt') as f: list_deps = f.read().splitlines() dists = [d for d in pkg_resources.working_set] pip_list = sorted([(i.key) for i in dists]) #pip_list = sorted([(i.key) for i in pip.get_installed_distributions()]) for req_dep in list_deps: compare_char = ["==", ">=", "<=", ">", "<", "!="] for c in compare_char: if c in req_dep: pkg = req_dep.split(c) if pkg[0] not in pip_list: missing_deps.append(req_dep) break else: installed_ver = pkg_resources.get_distribution(pkg[0]).version if self.get_truth(installed_ver, c, pkg[1]): break else: missing_deps.append(req_dep) else: if req_dep not in pip_list: # Why this package is not in get_installed_distributions ? if str(req_dep) == "argparse": pass else: missing_deps.append(req_dep) missing_deps = set(missing_deps) if missing_deps: missing_deps_warning =""" You are missing a module required for Belati. In order to continue using Belati, please install them with: {}`pip install --upgrade --force-reinstall -r requirements.txt`{} or manually install missing modules with: {}`pip install --upgrade --force-reinstall {}`{} """ log.console_log(missing_deps_warning.format(Y, W, Y, ' '.join(missing_deps), W)) sys.exit() def get_truth(self, inp, relate, cut): ops = {'>': operator.gt, '<': operator.lt, '>=': operator.ge, '<=': operator.le, '==': operator.eq} return ops[relate](inp, cut) ================================================ FILE: plugins/gather_company.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import re,sys from bs4 import BeautifulSoup from database import Database from logger import Logger from url_request import URLRequest # Console color G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white url_req = URLRequest() log = Logger() class GatherCompany(object): def crawl_company_employee(self, company_name, proxy_address, project_id): self.db = Database() self.project_id = project_id self.company_id = 0 comp_strip = company_name.replace(" ", "+") url = 'https://www.google.com/search?q="Current+*+{}+*"+site:linkedin.com&num=200'.format(comp_strip) data = url_req.standart_request(url, proxy_address) soup = BeautifulSoup( data, 'html.parser') company_linkedin_url_list = [] #Getting all h3 tags with class 'r' scrap_container = soup.find_all('div', class_='rc') for rc in scrap_container: soup2 = BeautifulSoup( str(rc), 'html.parser' ) url = soup2.find_all('h3', class_= 'r') url_fix = re.findall(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', str(url)) linkedin_url = re.findall(r'(http[s]?://.*\.linkedin\.com/in/.*)', str(url_fix).strip("\'[]")) # filter only *.linked.com company_linkedin_url = re.findall(r'(http[s]?://.*\.linkedin\.com/company/.*)', str(url_fix).strip("\'[]")) # filter only *.linked.com/company job_title = soup2.find_all('div', class_='slp f') url_tag = soup2.find_all("a")[0].string # Check if URL is match with one of the string from company name(?) if company_linkedin_url: is_contain_name = 0 for x in company_name.split(): if x in url_tag: is_contain_name = 1 break if is_contain_name == 1: company_linkedin_url_list.append(company_linkedin_url) self.company_id = self.db.insert_linkedin_company_info(self.project_id, str(company_name), str(company_linkedin_url), "Lorem ipsum") # Get data when linkedin url is like this : *.linkedin.com/in if not linkedin_url: pass else: name_result = re.sub('<[^<]+?>', '', str(rc.h3.a)) # strip all html tags like job_title_result = re.sub('<[^<]+?>', '', str(job_title)) # strip all html tags like name_fix = str(name_result.replace('| LinkedIn', '')) job_title_fix = str(job_title_result.replace('\u200e', ' ')).strip("\'[]") linkedin_url_fix = str(linkedin_url).strip("\'[]") log.console_log("{}[+] --------------------------------------------------- [+]{}".format(Y, W)) log.console_log("Name: {}".format( name_fix )) log.console_log("Job Title: {}".format( job_title_fix )) log.console_log("Url: {}".format( linkedin_url_fix )) log.console_log("{}[+] --------------------------------------------------- [+]{}\n".format(Y, W)) self.db.insert_company_employees(self.project_id, name_fix, job_title_fix, linkedin_url_fix) log.console_log("\n\n{}[+] --------------------------------------------------- [+]{}".format(Y, W)) log.console_log("{}[+] Found LinkedIn Company URL: {}".format(Y, W)) for url in company_linkedin_url_list: log.console_log("{} {} {}".format(Y, str(url), W)) ================================================ FILE: plugins/git_finder.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import sys, re, time from url_request import URLRequest # Console color G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white url_req = URLRequest() class GitFinder(object): def check_git(self, domain, proxy_address): try: data = url_req.just_url_open(url_req.ssl_checker(domain) + "/.git/HEAD", proxy_address) if data is not None and data is not "notexist": decode_data = data.read(200).decode() if not 'refs/heads' in decode_data: return False else: return True except: pass ================================================ FILE: plugins/harvest_email.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project # This is part of MailHarvester and EMINGOO regex # Thanks to pantuts and maldevel import sys, re, time from url_request import URLRequest url_req = URLRequest() class HarvestEmail(object): def crawl_search(self, domain, proxy_address): url = 'https://www.google.com/search?num=200&start=0&filter=0&hl=en&q=@' + domain try: data = url_req.standart_request(url, proxy_address) dataStrip = re.sub('<[^<]+?>', '', data) # strip all html tags like dataStrip1 = re.findall(r'[a-zA-Z0-9._+-]+@[a-zA-Z0-9._+-]+' + domain, dataStrip) dataStrip2 = re.findall(r'[a-zA-Z0-9._+-]+@' + domain, dataStrip) dataEmail = set(dataStrip1 + dataStrip2) dataFix = [x for x in dataEmail if not x.startswith('x22') and not x.startswith('3D') and not x.startswith('x3d') and not x.startswith('Cached') and not x.startswith('page')] # ignore email because bad parsing return list(dataFix) except: pass def crawl_pgp_mit_edu(self, domain, proxy_address): url = 'http://pgp.mit.edu:11371/pks/lookup?op=index&search=' + domain try: data = url_req.standart_request(url, proxy_address, 'Googlebot/3.1 (+http://www.googlebot.com/bot.html)') dataStrip = re.sub('<[^<]+?>', '', data) # strip all html tags like dataStrip1 = re.findall(r'[a-zA-Z0-9._+-]+@[a-zA-Z0-9._+-]+' + domain, dataStrip) dataStrip2 = re.findall(r'[a-zA-Z0-9._+-]+@' + domain, dataStrip) dataEmail = set(dataStrip1 + dataStrip2) return list(dataEmail) except: pass if __name__ == '__main__': HarvestEmailApp = HarvestEmail() HarvestEmailApp ================================================ FILE: plugins/harvest_public_document.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import re, os, errno import urllib from database import Database from logger import Logger from tqdm import tqdm import requests from url_request import URLRequest from meta_exif_extractor import MetaExifExtractor from util import Util # Console color G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white url_req = URLRequest() log = Logger() util = Util() class HarvestPublicDocument(object): def __init__(self): self.db = Database() self.project_id = 0 def init_crawl(self, domain, proxy_address, project_id): self.project_id = project_id log.console_log("{}[*] Gather Link from Google Search for domain {}{}".format(G, domain, W)) self.harvest_public_doc(domain, "pdf", proxy_address) self.harvest_public_doc(domain, "doc", proxy_address) self.harvest_public_doc(domain, "xls", proxy_address) self.harvest_public_doc(domain, "odt", proxy_address) self.harvest_public_doc(domain, "ppt", proxy_address) self.harvest_public_doc(domain, "rtf", proxy_address) self.harvest_public_doc(domain, "txt", proxy_address) #https://www.google.com/search?q=site:domain.com%20ext:pdf&filter=0&num=100#q=site:domain.com+ext:txt&start=100&filter=0 def harvest_public_doc(self, domain, extension, proxy_address): log.console_log("{}[*] Searching {} Document... {}".format(G, extension.upper(), W)) total_files = 0 url = 'https://www.google.com/search?q=site:' + domain + '%20ext:' + extension + '&filter=0&num=200' try: data = url_req.standart_request(url, proxy_address) # Rehttps?:\/\/[A-Za-z0-9\-\?&#_~@=\.\/%\[\]\+]+.pdf # (?Phttps?://[A-Za-z0-9\-\?&#_~@=\.\/%\[\]\+]+\.pdf) # "(?Phttps?://[^:]+\.%s)" % extension regex = "(?Phttps?://[A-Za-z0-9\-\?&#_~@=\.\/%\[\]\+]+\.{})".format(extension) if type(data)==str: data = re.findall(regex, data) list_files_download = list(set(data)) total_files = str(len(list_files_download)) if total_files != "0": log.console_log("{}[*] Found {} {} files!".format(G, total_files, extension.upper(), W) ) log.console_log("{}[*] Please wait, lemme download it for you ;) {}[NO PROXY] {}".format(G, Y, W)) for files_download in list_files_download: log.no_console_log(files_download.split('/')[-1]) self.download_files(files_download, domain) else: log.console_log("{}[!] Error: Google probably now is blocking our requests{}".format(R,W)) except: pass def download_files(self, url, folder_domain): filename = url.split('/')[-1] full_filename = 'belatiFiles/{}/{}'.format(folder_domain, filename) full_filename_location = '{}/belatiFiles/{}/{}'.format(util.get_current_work_dir(), folder_domain, filename) meta = MetaExifExtractor() if not os.path.exists(os.path.dirname(full_filename)): try: os.makedirs(os.path.dirname(full_filename)) except OSError as exc: # Guard against race condition if exc.errno != errno.EEXIST: raise with tqdm(unit='B', unit_scale=True, miniters=1,desc=filename) as t: try: urllib.urlretrieve(url, filename=full_filename,reporthook=self.my_hook(t), data=None) except: pass meta_exif_json = meta.extract_json(full_filename_location) self.db.insert_public_doc(self.project_id, str(os.path.splitext(filename)[1]), str(url), str(full_filename), str(full_filename_location), str(meta_exif_json)) def my_hook(self,t): """ Wraps tqdm instance. Don't forget to close() or __exit__() the tqdm instance once you're done with it (easiest using `with` syntax). Example ------- >>> with tqdm(...) as t: ... reporthook = my_hook(t) ... urllib.urlretrieve(..., reporthook=reporthook) """ last_b = [0] def inner(b=1, bsize=1, tsize=None): """ b : int, optional Number of blocks just transferred [default: 1]. bsize : int, optional Size of each block (in tqdm units) [default: 1]. tsize : int, optional Total size (in tqdm units). If [default: None] remains unchanged. """ if tsize is not None: t.total = tsize t.update((b - last_b[0]) * bsize) last_b[0] = b return inner if __name__ == '__main__': HarvestPublicDocumentApp = HarvestPublicDocument() HarvestPublicDocumentApp ================================================ FILE: plugins/json_beautifier.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import json class JsonBeautifier(object): def beautifier(self, json_data): parsed = json.loads(json_data) return json.dumps(parsed, indent=4, sort_keys=True) ================================================ FILE: plugins/logger.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import sys, os, errno import logging import time class Logger(object): def __init__(self): timestamp = int(time.time()) datetime = time.strftime("%d%m%Y") log_dir = "logs/" log_filename = log_dir + "Belati-" + datetime + "-" + str(timestamp) + ".log" if not os.path.exists(os.path.dirname(log_filename)): try: os.makedirs(os.path.dirname(log_filename)) except OSError as exc: # Guard against race condition if exc.errno != errno.EEXIST: raise logging.basicConfig(filename=log_filename, format='%(message)s') def console_log(self, log_word, newline=1): logging.warning(log_word) if newline == 1: print(log_word) else: sys.stdout.write(log_word) def no_console_log(self, log_word, newline=1): logging.warning(log_word) if __name__ == '__main__': LoggerApp = Logger() LoggerApp ================================================ FILE: plugins/meta_exif_extractor.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import pyexifinfo as p import json class MetaExifExtractor(object): def extract_json(self, filename): data = p.get_json(filename) json_data = json.dumps(data, sort_keys=True, indent=4, separators=(',', ': ')) return json_data def extract_xml(self, filename): data = p.get_xml(filename) return data ================================================ FILE: plugins/robots_scraper.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project from url_request import URLRequest url_req = URLRequest() class RobotsScraper(object): def check_robots(self, domain_name, proxy_address): try: url_request = "{}/robots.txt".format(domain_name) data = url_req.just_url_open(url_request, proxy_address) if data is not "" and data is not "notexist": # We need to check if file is valid, no redirect, no reload, or something if data.getcode() == 200 and data.getcode() != 302 and url_request in data.geturl() : return data except: pass ================================================ FILE: plugins/scan_nmap.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project from logger import Logger import shlex, subprocess log = Logger() class ScanNmap(object): def run_scanning(self, ipaddress): command = "nmap -sS -A -Pn " + ipaddress process = subprocess.Popen(shlex.split(command), stdout=subprocess.PIPE) while True: output = process.stdout.readline() if output == '' and process.poll() is not None: break if output: log.console_log(output.strip()) rc = process.poll() return rc if __name__ == '__main__': ScanNmapApp = ScanNmap() ScanNmapApp ================================================ FILE: plugins/subdomain_enum.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import sys from bs4 import BeautifulSoup from dnsdumpster.DNSDumpsterAPI import DNSDumpsterAPI from url_request import URLRequest url_req = URLRequest() class SubdomainEnum(object): def scan_dnsdumpster(self, domain_name): results = DNSDumpsterAPI().search(domain_name) return results def scan_crtsh(self, domain_name, proxy_address): try: url = "https://crt.sh/?q=%25." + domain_name data = url_req.standart_request(url, proxy_address) soup = BeautifulSoup( data, 'lxml') subdomain_list = [] try: table = soup.findAll('table')[2] rows = table.find_all(['tr']) for row in rows: cells = row.find_all('td', limit=5) if cells: name = cells[4].text # we don't need wildcard domain if "*." not in name: subdomain_list.append(name) return list(set(subdomain_list)) except: pass except: pass def scan_findsubdomainsCom(self,domain_name,proxy_address): try: url = "https://findsubdomains.com/subdomains-of/{}".format(domain_name) data = url_req.standart_request(url, proxy_address) soup = BeautifulSoup( data, 'lxml') subdomain_list = [] try: tmp = soup.findAll("a",attrs={"href" : "javascript:void(0);","class" : "desktop-hidden"}) for raw in tmp: subdomain_list.append(raw.text) return list(set(subdomain_list)) except: pass except: pass ================================================ FILE: plugins/svn_finder.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import sys, re, time from url_request import URLRequest # Console color G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white url_req = URLRequest() class SVNFinder(object): def check_svn(self, domain, proxy_address): try: data = url_req.just_url_open(url_req.ssl_checker(domain) + "/.svn/", proxy_address) if data is not None and data is not "notexist": if data == 403: return data if data.getcode() == 200 and data.getcode() != 302 and url_request in data.geturl(): return data.getcode() except: pass ================================================ FILE: plugins/updater.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project # All Utilities Function will be here ;) import sys, os import shlex, subprocess from logger import Logger from config import Config from distutils.version import LooseVersion, StrictVersion from urlparse import urlparse from url_request import URLRequest from util import Util # Console color G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white log = Logger() url_req = URLRequest() util = Util() conf = Config() class Updater(object): def check_update(self, version): # If git repo available it will result 0 and 32768 when no repo available if not (os.path.isdir(".git") and os.system('git rev-parse 2> /dev/null > /dev/null')) == 0: log.console_log("{}[-] No Git Control. Skip update check... {}".format(Y, W)) else: connection_status = url_req.connection_test() remote_version_url = "https://raw.githubusercontent.com/aancw/Belati/master/version" log.console_log("{}[+] Checking Network Connection... {} {}".format(G, "OK" if connection_status else "FAILED" ,W)) if not connection_status: log.console_log("{}[-] Belati can't be used in Offline Mode. Please check your network connection {}".format(R, W)) sys.exit() else: log.console_log("{}[+] Checking Version Update for Belati... {}".format(G, W)) remote_version = str(url_req.just_url_open(remote_version_url, "").read()) if self.update_version(version, remote_version): log.console_log("{}[+] Update is available for version {}{}".format(G, remote_version, W)) log.console_log("{}[*] Updating from master repo") self.do_update() self.migrate_db() else: log.console_log("{}[+] Belati version is uptodate \m/{}".format(Y, W)) def update_version(self, local_version, remote_version): return LooseVersion(util.clean_version_string(local_version)) < LooseVersion(util.clean_version_string(remote_version)) def do_update(self): util.do_command("git", "pull") def migrate_db(self): py_bin = conf.get_config("Environment", "py_bin") command = "{} web/manage.py".format(py_bin) util.do_command(command,"makemigrations web") util.do_command(command,"migrate web") ================================================ FILE: plugins/url_request.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project import sys, socket import ssl import urllib2, httplib from user_agents import UserAgents from urlparse import urlparse from logger import Logger import random # Console color G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white log = Logger() ua = UserAgents() class URLRequest(object): def standart_request(self, url_request, proxy_address, user_agents=None): try: if type(proxy_address) is list: # Get random proxy from list proxy_address_fix = random.choice(proxy_address) else: proxy_address_fix = proxy_address if proxy_address is not "": log.console_log("{}[*] Using Proxy Address : {}{}".format(Y, proxy_address_fix, W)) if user_agents is not None: user_agent_fix = user_agents else: user_agent_fix = ua.get_user_agent() parse = urlparse(proxy_address_fix) proxy_scheme = parse.scheme proxy = str(parse.hostname) + ':' + str(parse.port) proxy_handler = urllib2.ProxyHandler({ proxy_scheme: proxy}) opener = urllib2.build_opener(proxy_handler) opener.addheaders = [('User-agent', user_agent_fix )] urllib2.install_opener(opener) req = urllib2.Request(url_request) data = urllib2.urlopen(req).read() return data except urllib2.HTTPError, e: log.console_log('Error code: {}'.format( str(e.code))) return e.code except Exception, detail: log.console_log('ERROR {}'.format( str(detail))) return 1 def header_info(self, url_request, proxy_address): try: if type(proxy_address) is list: # Get random proxy from list proxy_address_fix = random.choice(proxy_address) else: proxy_address_fix = proxy_address if proxy_address is not "": log.console_log("{}[*] Using Proxy Address : {}{}".format(Y, proxy_address_fix, W)) parse = urlparse(proxy_address_fix) proxy_scheme = parse.scheme proxy = str(parse.hostname) + ':' + str(parse.port) proxy_handler = urllib2.ProxyHandler({ proxy_scheme: proxy}) opener = urllib2.build_opener(proxy_handler) opener.addheaders = [('User-agent', ua.get_user_agent() )] urllib2.install_opener(opener) req = urllib2.Request(url_request) data = urllib2.urlopen(req).info() return data except urllib2.HTTPError, e: log.console_log('Error code: {}'.format( str(e.code))) return e.code except Exception, detail: log.console_log('ERROR {}'.format( str(detail))) return 1 except httplib.BadStatusLine: pass def just_url_open(self, url_request, proxy_address): try: if type(proxy_address) is list: # Get random proxy from list proxy_address_fix = random.choice(proxy_address) else: proxy_address_fix = proxy_address if proxy_address is not "": log.console_log("{}[*] Using Proxy Address : {}{}".format(Y, proxy_address_fix, W)) parse = urlparse(proxy_address_fix) proxy_scheme = parse.scheme proxy = str(parse.hostname) + ':' + str(parse.port) proxy_handler = urllib2.ProxyHandler({ proxy_scheme: proxy}) opener = urllib2.build_opener(proxy_handler) opener.addheaders = [('User-agent', ua.get_user_agent() )] urllib2.install_opener(opener) req = urllib2.Request(url_request) data = urllib2.urlopen(req, timeout=25) return data except urllib2.HTTPError, e: return e.code except urllib2.URLError, e: if str(e.reason) == "[Errno -2] Name or service not known": log.console_log("Not EXIST!") log.console_log("Check your internet connection or check your target domain") return "notexist" def ssl_checker(self, domain): domain_fix = "https://{}".format(domain) try: # Skip SSL Verification Check! # https://stackoverflow.com/questions/27835619/ssl-certificate-verify-failed-error gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1) # Only for gangstars data = urllib2.urlopen("https://{}".format(domain), timeout=25, context=gcontext) if "ERROR" in data or "Errno" in data: domain_fix = "http://{}".format(domain) except urllib2.HTTPError, e: pass except urllib2.URLError, e: domain_fix = "http://{}".format(domain) except ssl.SSLError as e: domain_fix = "http://{}".format(domain) except httplib.BadStatusLine: domain_fix = "http://{}".format(domain) return domain_fix def connection_test(self): server_test = "github.com" try: host = socket.gethostbyname(server_test) s = socket.create_connection((host, 80), 2) return True except: pass return False ================================================ FILE: plugins/user_agents.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project from fake_useragent import UserAgent from fake_useragent import FakeUserAgentError ua = UserAgent() class UserAgents(object): def get_user_agent(self): try: return ua.random except FakeUserAgentError: return "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:19.0) Gecko/20100101 Firefox/49.0" def update_user_agent(self): ua.update() if __name__ == '__main__': UserAgentsApp = UserAgents() UserAgentsApp ================================================ FILE: plugins/util.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project # All Utilities Function will be here ;) import sys, os import shlex, subprocess from logger import Logger from distutils.version import LooseVersion, StrictVersion from urlparse import urlparse from url_request import URLRequest # Console color G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white log = Logger() url_req = URLRequest() class Util(object): def check_python_version(self): if sys.version[:3] == "2.7" or "2" in sys.version[:3]: log.console_log("{}[*] Python version OK! {}{}".format(G, sys.version[:6], W)) elif "3" in sys.version[:3]: log.console_log("{}[-] Nope. This system not yet compatible for Python 3!{}".format(Y, W)) sys.exit() else: log.console_log("{}[-] Duh. Your python version too old for running this :({}".format(Y, W)) sys.exit() def do_command(self, command, parameter): full_command = "{} {}".format(command, parameter) process = subprocess.Popen(shlex.split(full_command), stdout=subprocess.PIPE) while True: output = process.stdout.readline() if output == '' and process.poll() is not None: break if output: log.console_log(output.strip()) rc = process.poll() return rc def clean_version_string(self, text): # strip v0.2.2-dev strip_dev = text.strip("-dev\n") return strip_dev def get_current_work_dir(self): return os.getcwd() def clean_list_string(self, text): return str(", ".join(text)) def strip_scheme(self, url): parsed = urlparse(url) scheme = "%s://" % parsed.scheme return parsed.geturl().replace(scheme, '', 1) ================================================ FILE: plugins/wappalyzer.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # # Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. # This tools is inspired by Foca and Datasploit for OSINT # Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # This file is part of Belati project from Wappalyzer import Wappalyzer, WebPage from user_agents import UserAgents from logger import Logger log = Logger() ua = UserAgents() analyzer = Wappalyzer.latest() class Wappalyzer(object): def run_wappalyze(self, domain): webpage = WebPage.new_from_url(domain) analyze_result = analyzer.analyze(webpage) if analyze_result: for result in analyze_result: log.console_log(result) else: log.console_log("Result Not Found") return str(analyze_result) if __name__ == '__main__': wappalyzerApp = wappalyzer() wappalyzerApp ================================================ FILE: requirements.txt ================================================ dnspython requests argparse texttable python-geoip-geolite2 python-geoip dnsknife termcolor colorama validators tqdm tldextract fake-useragent python-wappalyzer future beautifulsoup4 python-whois futures django==1.11.6 pyexifinfo cmd2==0.8.0 tabulate dnsdumpster ================================================ FILE: version ================================================ v0.2.4.2 ================================================ FILE: web/manage.py ================================================ #!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "web.settings") try: from django.core.management import execute_from_command_line except ImportError: # The above import may fail for some other reason. Ensure that the # issue is really that Django is missing to avoid masking other # exceptions on Python 2. try: import django except ImportError: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) raise execute_from_command_line(sys.argv) ================================================ FILE: web/web/__init__.py ================================================ ================================================ FILE: web/web/migrations/0001_initial.py ================================================ # -*- coding: utf-8 -*- # Generated by Django 1.9.8 on 2017-07-27 17:37 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='AuthGroup', fields=[ ('id', models.IntegerField(primary_key=True, serialize=False)), ('name', models.CharField(max_length=80, unique=True)), ], options={ 'db_table': 'auth_group', 'managed': False, }, ), migrations.CreateModel( name='AuthGroupPermissions', fields=[ ('id', models.IntegerField(primary_key=True, serialize=False)), ], options={ 'db_table': 'auth_group_permissions', 'managed': False, }, ), migrations.CreateModel( name='AuthPermission', fields=[ ('id', models.IntegerField(primary_key=True, serialize=False)), ('codename', models.CharField(max_length=100)), ('name', models.CharField(max_length=255)), ], options={ 'db_table': 'auth_permission', 'managed': False, }, ), migrations.CreateModel( name='AuthUser', fields=[ ('id', models.IntegerField(primary_key=True, serialize=False)), ('password', models.CharField(max_length=128)), ('last_login', models.DateTimeField(blank=True, null=True)), ('is_superuser', models.BooleanField()), ('first_name', models.CharField(max_length=30)), ('last_name', models.CharField(max_length=30)), ('email', models.CharField(max_length=254)), ('is_staff', models.BooleanField()), ('is_active', models.BooleanField()), ('date_joined', models.DateTimeField()), ('username', models.CharField(max_length=150, unique=True)), ], options={ 'db_table': 'auth_user', 'managed': False, }, ), migrations.CreateModel( name='AuthUserGroups', fields=[ ('id', models.IntegerField(primary_key=True, serialize=False)), ], options={ 'db_table': 'auth_user_groups', 'managed': False, }, ), migrations.CreateModel( name='AuthUserUserPermissions', fields=[ ('id', models.IntegerField(primary_key=True, serialize=False)), ], options={ 'db_table': 'auth_user_user_permissions', 'managed': False, }, ), migrations.CreateModel( name='DjangoAdminLog', fields=[ ('id', models.IntegerField(primary_key=True, serialize=False)), ('object_id', models.TextField(blank=True, null=True)), ('object_repr', models.CharField(max_length=200)), ('action_flag', models.PositiveSmallIntegerField()), ('change_message', models.TextField()), ('action_time', models.DateTimeField()), ], options={ 'db_table': 'django_admin_log', 'managed': False, }, ), migrations.CreateModel( name='DjangoContentType', fields=[ ('id', models.IntegerField(primary_key=True, serialize=False)), ('app_label', models.CharField(max_length=100)), ('model', models.CharField(max_length=100)), ], options={ 'db_table': 'django_content_type', 'managed': False, }, ), migrations.CreateModel( name='DjangoMigrations', fields=[ ('id', models.IntegerField(primary_key=True, serialize=False)), ('app', models.CharField(max_length=255)), ('name', models.CharField(max_length=255)), ('applied', models.DateTimeField()), ], options={ 'db_table': 'django_migrations', 'managed': False, }, ), migrations.CreateModel( name='DjangoSession', fields=[ ('session_key', models.CharField(max_length=40, primary_key=True, serialize=False)), ('session_data', models.TextField()), ('expire_date', models.DateTimeField()), ], options={ 'db_table': 'django_session', 'managed': False, }, ), migrations.CreateModel( name='DocResults', fields=[ ('id', models.IntegerField(blank=True, primary_key=True, serialize=False)), ('project_id', models.IntegerField(blank=True, null=True)), ('doc_ext', models.TextField(blank=True, null=True)), ('doc_url', models.TextField(blank=True, null=True)), ('doc_location', models.TextField(blank=True, null=True)), ], options={ 'db_table': 'doc_results', 'managed': True, }, ), migrations.CreateModel( name='LinkedinCompanyEmployees', fields=[ ('id', models.IntegerField(blank=True, primary_key=True, serialize=False)), ('project_id', models.IntegerField(blank=True, null=True)), ('name', models.TextField(blank=True, null=True)), ('job_title', models.TextField(blank=True, null=True)), ('linkedin_url', models.IntegerField(blank=True, null=True)), ], options={ 'db_table': 'linkedin_company_employees', 'managed': True, }, ), migrations.CreateModel( name='LinkedinCompanyInfo', fields=[ ('id', models.IntegerField(blank=True, primary_key=True, serialize=False)), ('project_id', models.IntegerField(blank=True, null=True)), ('company_name', models.IntegerField(blank=True, null=True)), ('company_linkedin_url', models.IntegerField(blank=True, null=True)), ('company_description', models.IntegerField(blank=True, null=True)), ], options={ 'db_table': 'linkedin_company_info', 'managed': True, }, ), migrations.CreateModel( name='MailHarvestResults', fields=[ ('id', models.IntegerField(blank=True, primary_key=True, serialize=False)), ('project_id', models.IntegerField(blank=True, null=True)), ('mail_results', models.TextField(blank=True, null=True)), ('mail_pgp_results', models.TextField(blank=True, null=True)), ], options={ 'db_table': 'mail_harvest_results', 'managed': True, }, ), migrations.CreateModel( name='MainDomainResults', fields=[ ('id', models.IntegerField(blank=True, primary_key=True, serialize=False)), ('project_id', models.IntegerField(blank=True, null=True)), ('domain', models.TextField(blank=True, null=True)), ('domain_whois', models.TextField(blank=True, null=True)), ('email', models.TextField(blank=True, null=True)), ('domain_reputation', models.TextField(blank=True, null=True)), ('domain_blacklist', models.TextField(blank=True, null=True)), ('ns_record', models.TextField(blank=True, db_column='NS_record', null=True)), ('mx_record', models.TextField(blank=True, db_column='MX_record', null=True)), ], options={ 'db_table': 'main_domain_results', 'managed': True, }, ), migrations.CreateModel( name='Projects', fields=[ ('project_id', models.IntegerField(blank=True, primary_key=True, serialize=False)), ('project_domain', models.TextField(blank=True, null=True)), ('project_org', models.TextField(blank=True, null=True)), ('started_time', models.TextField(blank=True, null=True)), ], options={ 'db_table': 'projects', 'managed': True, }, ), migrations.CreateModel( name='SubdomainResults', fields=[ ('id', models.IntegerField(blank=True, primary_key=True, serialize=False)), ('project_id', models.IntegerField(blank=True, null=True)), ('subdomain', models.TextField(blank=True, null=True)), ('ip_address', models.TextField(blank=True, null=True)), ('banner', models.TextField(blank=True, null=True)), ('wappalyzer', models.TextField(blank=True, null=True)), ('robots_txt', models.TextField(blank=True, null=True)), ('is_contain_git', models.IntegerField(blank=True, null=True)), ('is_contain_svn', models.IntegerField(blank=True, null=True)), ], options={ 'db_table': 'subdomain_results', 'managed': True, }, ), ] ================================================ FILE: web/web/migrations/0002_auto_20170727_1741.py ================================================ # -*- coding: utf-8 -*- # Generated by Django 1.9.8 on 2017-07-27 17:41 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('web', '0001_initial'), ] operations = [ migrations.AddField( model_name='docresults', name='doc_full_location', field=models.TextField(blank=True, null=True), ), migrations.AddField( model_name='docresults', name='doc_meta_exif', field=models.TextField(blank=True, null=True), ), ] ================================================ FILE: web/web/migrations/0003_docresults_doc_author.py ================================================ # -*- coding: utf-8 -*- # Generated by Django 1.9.8 on 2017-08-01 07:50 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('web', '0002_auto_20170727_1741'), ] operations = [ migrations.AddField( model_name='docresults', name='doc_author', field=models.TextField(blank=True, null=True), ), ] ================================================ FILE: web/web/migrations/__init__.py ================================================ ================================================ FILE: web/web/models.py ================================================ # This is an auto-generated Django model module. # You'll have to do the following manually to clean this up: # * Rearrange models' order # * Make sure each model has one field with primary_key=True # * Make sure each ForeignKey has `on_delete` set to the desired behavior. # * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table # Feel free to rename the models, but don't rename db_table values or field names. from __future__ import unicode_literals from django.db import models class AuthGroup(models.Model): id = models.IntegerField(primary_key=True) # AutoField? name = models.CharField(unique=True, max_length=80) class Meta: managed = False db_table = 'auth_group' class AuthGroupPermissions(models.Model): id = models.IntegerField(primary_key=True) # AutoField? group = models.ForeignKey(AuthGroup, models.DO_NOTHING) permission = models.ForeignKey('AuthPermission', models.DO_NOTHING) class Meta: managed = False db_table = 'auth_group_permissions' unique_together = (('group', 'permission'),) class AuthPermission(models.Model): id = models.IntegerField(primary_key=True) # AutoField? content_type = models.ForeignKey('DjangoContentType', models.DO_NOTHING) codename = models.CharField(max_length=100) name = models.CharField(max_length=255) class Meta: managed = False db_table = 'auth_permission' unique_together = (('content_type', 'codename'),) class AuthUser(models.Model): id = models.IntegerField(primary_key=True) # AutoField? password = models.CharField(max_length=128) last_login = models.DateTimeField(blank=True, null=True) is_superuser = models.BooleanField() first_name = models.CharField(max_length=30) last_name = models.CharField(max_length=30) email = models.CharField(max_length=254) is_staff = models.BooleanField() is_active = models.BooleanField() date_joined = models.DateTimeField() username = models.CharField(unique=True, max_length=150) class Meta: managed = False db_table = 'auth_user' class AuthUserGroups(models.Model): id = models.IntegerField(primary_key=True) # AutoField? user = models.ForeignKey(AuthUser, models.DO_NOTHING) group = models.ForeignKey(AuthGroup, models.DO_NOTHING) class Meta: managed = False db_table = 'auth_user_groups' unique_together = (('user', 'group'),) class AuthUserUserPermissions(models.Model): id = models.IntegerField(primary_key=True) # AutoField? user = models.ForeignKey(AuthUser, models.DO_NOTHING) permission = models.ForeignKey(AuthPermission, models.DO_NOTHING) class Meta: managed = False db_table = 'auth_user_user_permissions' unique_together = (('user', 'permission'),) class DjangoAdminLog(models.Model): id = models.IntegerField(primary_key=True) # AutoField? object_id = models.TextField(blank=True, null=True) object_repr = models.CharField(max_length=200) action_flag = models.PositiveSmallIntegerField() change_message = models.TextField() content_type = models.ForeignKey('DjangoContentType', models.DO_NOTHING, blank=True, null=True) user = models.ForeignKey(AuthUser, models.DO_NOTHING) action_time = models.DateTimeField() class Meta: managed = False db_table = 'django_admin_log' class DjangoContentType(models.Model): id = models.IntegerField(primary_key=True) # AutoField? app_label = models.CharField(max_length=100) model = models.CharField(max_length=100) class Meta: managed = False db_table = 'django_content_type' unique_together = (('app_label', 'model'),) class DjangoMigrations(models.Model): id = models.IntegerField(primary_key=True) # AutoField? app = models.CharField(max_length=255) name = models.CharField(max_length=255) applied = models.DateTimeField() class Meta: managed = False db_table = 'django_migrations' class DjangoSession(models.Model): session_key = models.CharField(primary_key=True, max_length=40) session_data = models.TextField() expire_date = models.DateTimeField() class Meta: managed = False db_table = 'django_session' class DocResults(models.Model): id = models.IntegerField(blank=True, primary_key=True) project_id = models.IntegerField(blank=True, null=True) doc_ext = models.TextField(blank=True, null=True) doc_url = models.TextField(blank=True, null=True) doc_location = models.TextField(blank=True, null=True) doc_full_location = models.TextField(blank=True, null=True) doc_meta_exif = models.TextField(blank=True, null=True) doc_author = models.TextField(blank=True, null=True) class Meta: managed = True db_table = 'doc_results' class LinkedinCompanyEmployees(models.Model): id = models.IntegerField(blank=True, primary_key=True) project_id = models.IntegerField(blank=True, null=True) name = models.TextField(blank=True, null=True) job_title = models.TextField(blank=True, null=True) linkedin_url = models.IntegerField(blank=True, null=True) class Meta: managed = True db_table = 'linkedin_company_employees' class LinkedinCompanyInfo(models.Model): id = models.IntegerField(blank=True, primary_key=True) project_id = models.IntegerField(blank=True, null=True) company_name = models.IntegerField(blank=True, null=True) company_linkedin_url = models.IntegerField(blank=True, null=True) company_description = models.IntegerField(blank=True, null=True) class Meta: managed = True db_table = 'linkedin_company_info' class MailHarvestResults(models.Model): id = models.IntegerField(blank=True, primary_key=True) project_id = models.IntegerField(blank=True, null=True) mail_results = models.TextField(blank=True, null=True) mail_pgp_results = models.TextField(blank=True, null=True) class Meta: managed = True db_table = 'mail_harvest_results' class MainDomainResults(models.Model): id = models.IntegerField(blank=True, primary_key=True) project_id = models.IntegerField(blank=True, null=True) domain = models.TextField(blank=True, null=True) domain_whois = models.TextField(blank=True, null=True) email = models.TextField(blank=True, null=True) domain_reputation = models.TextField(blank=True, null=True) domain_blacklist = models.TextField(blank=True, null=True) ns_record = models.TextField(db_column='NS_record', blank=True, null=True) # Field name made lowercase. mx_record = models.TextField(db_column='MX_record', blank=True, null=True) # Field name made lowercase. class Meta: managed = True db_table = 'main_domain_results' class Projects(models.Model): project_id = models.IntegerField(blank=True, primary_key=True) project_domain = models.TextField(blank=True, null=True) project_org = models.TextField(blank=True, null=True) started_time = models.TextField(blank=True, null=True) class Meta: managed = True db_table = 'projects' def __unicode__(self): return self.project_domain class SubdomainResults(models.Model): id = models.IntegerField(blank=True, primary_key=True) project_id = models.IntegerField(blank=True, null=True) subdomain = models.TextField(blank=True, null=True) ip_address = models.TextField(blank=True, null=True) banner = models.TextField(blank=True, null=True) wappalyzer = models.TextField(blank=True, null=True) robots_txt = models.TextField(blank=True, null=True) is_contain_git = models.IntegerField(blank=True, null=True) is_contain_svn = models.IntegerField(blank=True, null=True) class Meta: managed = True db_table = 'subdomain_results' ================================================ FILE: web/web/settings.py ================================================ """ Django settings for web project. Generated by 'django-admin startproject' using Django 1.11.2. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'i+2-zy6rl=!w+z8zai5un*ta$!+^lmlogkid*a6j0*ssfq&f_@' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'web', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'web.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'web.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' ================================================ FILE: web/web/templates/about.html ================================================ {% extends "header.html" %} {% block content %}

Belati - The Traditional Swiss Army Knife For OSINT


Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. This tools is inspired by Foca and Datasploit for OSINT :)

Why I Made this?


Just for learning stuff and OSINT purpose.

What Belati can do?


  • Whois(Indonesian TLD Support)
  • Banner Grabbing
  • Subdomain Enumeration
  • Service Scanning for all Subdomain Machine
  • Web Appalyzer Support
  • DNS mapping / Zone Scanning
  • Mail Harvester from Website & Search Engine
  • Mail Harvester from MIT PGP Public Key Server
  • Scrapping Public Document for Domain from Search Engine
  • Fake and Random User Agent ( Prevent from blocking )
  • Proxy Support for Harvesting Emails and Documents
  • Public Git Finder in domain/subdomain
  • Public SVN Finder in domain/subdomain
  • Robot.txt Scraper in domain/subdomain
  • Gather Public Company Info & Employee
  • SQLite3 Database Support for storing Belati Results
  • Setup Wizard/Configuration for Belati

Author


Aan Wahyu a.k.a Petruknisme(https://petruknisme.com)

Thanks To


Thanks to PyWhois Library, Sublist3r, MailHarvester, Emingoo for being part of my code. Also thanks to Hispagatos, Infosec-ninjas, eCHo, RNDC( Research and development center ) and all other people who are inspiring this project :)

Thanks to Echo-Zine Staff for approving my Ezine : http://ezine.echo.or.id/issue31/005.txt - Belati : Collecting Public Data & Public Document for OSINT Purpose - Petruknisme

Feedback/Suggestion


Feel free to create Issue in this repository or email me at cacaddv [at] gmail.com . Your feedback and suggestion is useful for Belati development progress :)

License


Belati is licensed under GPL V2. You can use, modify, or redistribute this tool under the terms of GNU General Public License (GPLv2).
Belati is tool for Collecting Public Data & Public Document from Website and other service for OSINT purpose. This tools is inspired by Foca and Datasploit for OSINT Copyright (C) 2017 cacaddv@gmail.com (Petruknisme a.k.a Aan Wahyu) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see .
{% endblock %} ================================================ FILE: web/web/templates/base.html ================================================ {% block header %}{% endblock %} {% block navbar %}{% endblock %} {% block content %}{% endblock %} {% block footer %}{% endblock %} ================================================ FILE: web/web/templates/footer.html ================================================
Belati - Copyright 2018
================================================ FILE: web/web/templates/header.html ================================================
{% block content %}{% endblock %}
{% include "footer.html" %} ================================================ FILE: web/web/templates/index.html ================================================ {% extends "header.html" %} {% block content %} {% for pro in project_data %} {% endfor %}
# Domain Organization/Company Time Action
{{ pro.project_id }} {{ pro.project_domain }} {{ pro.project_org }} {{ pro.started_time }}
{% endblock %} ================================================ FILE: web/web/templates/projects.html ================================================ {% extends "header.html" %} {% block content %}
{% for pro in project_data %} {% endfor %}
# Domain Organization/Company Time
{{ pro.project_id }} {{ pro.project_domain }} {{ pro.project_org }} {{ pro.started_time }}
{% for domain in main_domain %} {% endfor %}
Domain: {{ domain.domain }}
Domain Whois: {{ domain.domain_whois }}
Emails: {{ domain.email }}
NS Record: {{ domain.ns_record }}
MX Record: {{ domain.ns_record }}
{% for subdomain in subdomain_results %} {% endfor %}
# Subdomain IP Address Banner Wappalyzer Robots Git SVN
{{ subdomain.id }} {{ subdomain.subdomain }} {{ subdomain.ip_address }} {{ subdomain.banner }} {{ subdomain.wappalyzer }} {{ subdomain.robots_txt }} {{ subdomain.is_contain_git }} {{ subdomain.is_contain_svn }}
{% for mail_data in mail_harvest %} {% endfor %}
# Mail Results PGP Mail Results
{{ mail_data.id }} {{ mail_data.mail_results }} {{ mail_data.mail_pgp_results }}
{% for doc in doc_result %} {% endfor %}
# URL File Location Action
{{ doc.id }} {{ doc.doc_url }} {{ doc.doc_location }}

Company Info


{% for info in company_info %} Company Name: {{ info.company_name }}
LinkedIn URL: {{ info.company_linkedin_url }} {% endfor %}

Company Employees

{% for employee in company_employee %} {% endfor %}
# Name Job Title LinkedIn URL
{{ employee.id}} {{ employee.name }} {{ employee.job_title }} {{ employee.linkedin_url }}
{% endblock %} ================================================ FILE: web/web/urls.py ================================================ """web URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.11/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from views import index_page, view_projects, about_page urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^$', index_page), url(r'^projects/(?P\d+)/view/$', view_projects), url(r'^about', about_page), ] ================================================ FILE: web/web/views.py ================================================ from django.http import HttpResponse, HttpResponseRedirect, Http404 from django.shortcuts import render_to_response, get_object_or_404 from models import Projects, MainDomainResults, SubdomainResults, MailHarvestResults, DocResults, LinkedinCompanyInfo, LinkedinCompanyEmployees def index_page(request): project_list = Projects.objects.all().order_by('project_id') return render_to_response('index.html', {'project_data': project_list}) def view_projects(request, id): project_list = Projects.objects.filter(project_id=id) main_domain = MainDomainResults.objects.filter(project_id=id) subdomain = SubdomainResults.objects.filter(project_id=id) mail_harvest = MailHarvestResults.objects.filter(project_id=id) doc_result = DocResults.objects.filter(project_id=id) company_info = LinkedinCompanyInfo.objects.filter(project_id=id) company_employee = LinkedinCompanyEmployees.objects.filter(project_id=id) return render_to_response('projects.html', {'project_data' : project_list, 'main_domain' : main_domain, 'subdomain_results' : subdomain, 'mail_harvest' : mail_harvest, 'doc_result' : doc_result, 'company_info' : company_info, 'company_employee' : company_employee}) def about_page(request): return render_to_response('about.html') ================================================ FILE: web/web/wsgi.py ================================================ """ WSGI config for web project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "web.settings") application = get_wsgi_application()