diff --git a/README.md b/README.md index dfbedee..fd181ac 100644 --- a/README.md +++ b/README.md @@ -170,9 +170,9 @@ If you have problems with starting installer.sh, you should try to use `dos2unix # Tasks to complete before new release - [ ] CLI rework (more fancy and user-friendly) -- [ ] Report storage database rework -- [ ] HTML report rework - +- [ ] Report storage database rework (more information to store) +- [ ] HTML report rework (modern style and look; functionality expansion) + # DPULSE mentions in social medias ## Honorable mentions: diff --git a/datagather_modules/crawl_processor.py b/datagather_modules/crawl_processor.py index b0930ed..d47aa11 100644 --- a/datagather_modules/crawl_processor.py +++ b/datagather_modules/crawl_processor.py @@ -1,22 +1,17 @@ import sys +import socket +import re +import urllib +from collections import defaultdict +from urllib.parse import urlparse +import whois +import requests +from bs4 import BeautifulSoup +from colorama import Fore, Style + sys.path.append('service') from logs_processing import logging -try: - import socket - import whois - import re - import requests - import urllib.parse - from colorama import Fore, Style - from urllib.parse import urlparse - from collections import defaultdict - from bs4 import BeautifulSoup - import random -except ImportError as e: - print(Fore.RED + "Import error appeared. Reason: {}".format(e) + Style.RESET_ALL) - sys.exit() - def ip_gather(short_domain): ip_address = socket.gethostbyname(short_domain) return ip_address diff --git a/datagather_modules/data_assembler.py b/datagather_modules/data_assembler.py index 5089c8d..74313ca 100644 --- a/datagather_modules/data_assembler.py +++ b/datagather_modules/data_assembler.py @@ -1,34 +1,24 @@ import sys -sys.path.append('service') -sys.path.append('pagesearch') -sys.path.append('dorking') -sys.path.append('snapshotting') +from datetime import datetime +import os +from colorama import Fore, Style +sys.path.extend(['service', 'pagesearch', 'dorking', 'snapshotting']) + +from logs_processing import logging +from config_processing import read_config +from db_creator import get_dorking_query import crawl_processor as cp import dorking_handler as dp import networking_processor as np from pagesearch_parsers import subdomains_parser -from logs_processing import logging from api_virustotal import api_virustotal_check from api_securitytrails import api_securitytrails_check from api_hudsonrock import api_hudsonrock_check -from db_creator import get_dorking_query from screen_snapshotting import take_screenshot -from config_processing import read_config from html_snapshotting import save_page_as_html from archive_snapshotting import download_snapshot -try: - import requests - from datetime import datetime - import os - from colorama import Fore, Style - import sqlite3 - import configparser -except ImportError as e: - print(Fore.RED + "Import error appeared. Reason: {}".format(e) + Style.RESET_ALL) - sys.exit() - def establishing_dork_db_connection(dorking_flag): dorking_db_paths = { 'basic': 'dorking//basic_dorking.db', @@ -118,6 +108,10 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k for key in common_socials: common_socials[key] = list(set(common_socials[key])) total_socials = sum(len(values) for values in common_socials.values()) + total_ports = len(ports) + total_ips = len(subdomain_ip) + 1 + total_vulns = len(vulns) + print(Fore.LIGHTMAGENTA_EX + "\n[BASIC SCAN END]\n" + Style.RESET_ALL) if report_file_type == 'xlsx': if pagesearch_flag.lower() == 'y': @@ -206,7 +200,17 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k if subdomains[0] != 'No subdomains were found': to_search_array = [subdomains, social_medias, sd_socials] print(Fore.LIGHTMAGENTA_EX + "\n[EXTENDED SCAN START: PAGESEARCH]\n" + Style.RESET_ALL) - ps_emails_return, accessible_subdomains, emails_amount, files_counter, cookies_counter, api_keys_counter, website_elements_counter, exposed_passwords_counter, keywords_messages_list = subdomains_parser(to_search_array[0], report_folder, keywords, keywords_flag) + ( + ps_emails_return, + accessible_subdomains, + emails_amount, + files_counter, + cookies_counter, + api_keys_counter, + website_elements_counter, + exposed_passwords_counter, + keywords_messages_list + ), ps_string = subdomains_parser(to_search_array[0], report_folder, keywords, keywords_flag) total_links_counter = accessed_links_counter = "No results because PageSearch does not gather these categories" if len(keywords_messages_list) == 0: keywords_messages_list = ['No keywords were found'] @@ -215,11 +219,13 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k print(Fore.RED + "Cant start PageSearch because no subdomains were detected") ps_emails_return = "" accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = 'No results because no subdomains were found' + ps_string = 'No PageSearch listing provided because no subdomains were found' keywords_messages_list = ['No data was gathered because no subdomains were found'] pass elif pagesearch_flag.lower() == 'n': accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = keywords_messages_list = "No results because user did not selected PageSearch for this scan" ps_emails_return = "" + ps_string = 'No PageSearch listing provided because user did not selected PageSearch mode for this scan' pass if dorking_flag == 'n': @@ -282,7 +288,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k hostnames, cpes, tags, vulns, common_socials, total_socials, ps_emails_return, accessible_subdomains, emails_amount, files_counter, cookies_counter, api_keys_counter, website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter, keywords_messages_list, dorking_status, dorking_file_path, - virustotal_output, securitytrails_output, hudsonrock_output] + virustotal_output, securitytrails_output, hudsonrock_output, ps_string, total_ports, total_ips, total_vulns] report_info_array = [casename, db_casename, db_creation_date, report_folder, ctime, report_file_type, report_ctime, api_scan_db, used_api_flag] logging.info(f'### THIS LOG PART FOR {casename} CASE, TIME: {ctime} ENDS HERE') diff --git a/dorking/dorking_handler.py b/dorking/dorking_handler.py index 0ff444c..967a61d 100644 --- a/dorking/dorking_handler.py +++ b/dorking/dorking_handler.py @@ -1,22 +1,18 @@ import sys +import random +import time +import os +import logging +from colorama import Fore, Style +import undetected_chromedriver as uc +from selenium.webdriver.common.by import By +from selenium.webdriver.common.keys import Keys + sys.path.append('service') -from config_processing import read_config from logs_processing import logging from ua_rotator import user_agent_rotator from proxies_rotator import proxies_rotator - -try: - import requests.exceptions - from colorama import Fore, Style - import mechanicalsoup - import re - import requests - import sqlite3 - import time - import os -except ImportError as e: - print(Fore.RED + "Import error appeared. Reason: {}".format(e) + Style.RESET_ALL) - sys.exit() +from config_processing import read_config def proxy_transfer(): proxy_flag, proxies_list = proxies_rotator.get_proxies() @@ -27,44 +23,96 @@ def proxy_transfer(): working_proxies = proxies_rotator.check_proxies(proxies_list) return proxy_flag, working_proxies -def solid_google_dorking(query, dorking_delay, delay_step, proxy_flag, proxies_list, pages=100): +def solid_google_dorking(query, proxy_flag, proxies_list, pages=1): + result_query = [] + request_count = 0 try: - browser = mechanicalsoup.StatefulBrowser() - if proxy_flag == 1: - browser.session.proxies = proxies_rotator.get_random_proxy(proxies_list) - else: + config_values = read_config() + options = uc.ChromeOptions() + options.binary_location = r"{}".format(config_values['dorking_browser']) + dorking_browser_mode = config_values['dorking_browser_mode'] + if dorking_browser_mode.lower() == 'headless': + options.add_argument("--headless=new") + elif dorking_browser_mode.lower() == 'nonheadless': pass - browser.open("https://www.google.com/") - browser.select_form('form[action="/search"]') - browser["q"] = str(query) - browser.submit_selected(btnName="btnG") - result_query = [] - request_count = 0 + options.add_argument("--no-sandbox") + options.add_argument("--disable-dev-shm-usage") + options.add_argument("--disable-blink-features=AutomationControlled") + options.add_argument("--disable-infobars") + options.add_argument("--disable-extensions") + options.add_argument(f"user-agent={user_agent_rotator.get_random_user_agent()}") + if proxy_flag == 1: + proxy = proxies_rotator.get_random_proxy(proxies_list) + options.add_argument(f'--proxy-server={proxy["http"]}') + driver = uc.Chrome(options=options) for page in range(pages): try: - for link in browser.links(): - target = link.attrs['href'] - if (target.startswith('/url?') and not target.startswith("/url?q=http://webcache.googleusercontent.com")): - target = re.sub(r"^/url\?q=([^&]*)&.*", r"\1", target) - result_query.append(target) + driver.get("https://www.google.com") + time.sleep(random.uniform(2, 4)) + try: + accepted = False + try: + accept_btn = driver.find_element(By.XPATH, '//button[contains(text(), "Принять все") or contains(text(), "Accept all")]') + driver.execute_script("arguments[0].click();", accept_btn) + print(Fore.GREEN + 'Pressed "Accept all" button!' + Style.RESET_ALL) + accepted = True + time.sleep(random.uniform(2, 3)) + except: + pass + if not accepted: + iframes = driver.find_elements(By.TAG_NAME, "iframe") + for iframe in iframes: + driver.switch_to.frame(iframe) + try: + accept_btn = driver.find_element(By.XPATH, '//button[contains(text(), "Принять все") or contains(text(), "Accept all")]') + driver.execute_script("arguments[0].click();", accept_btn) + print(Fore.GREEN + 'Pressed "Accept all" button!' + Style.RESET_ALL) + accepted = True + driver.switch_to.default_content() + time.sleep(random.uniform(2, 3)) + break + except: + driver.switch_to.default_content() + continue + driver.switch_to.default_content() + if not accepted: + print(Fore.GREEN + "Google TOS button was not found. Seems good..." + Style.RESET_ALL) + except Exception: + print(Fore.RED + f'Error with pressing "Accept all" button. Closing...' + Style.RESET_ALL) + driver.save_screenshot("consent_error.png") + driver.switch_to.default_content() + search_box = driver.find_element(By.NAME, "q") + for char in query: + search_box.send_keys(char) + time.sleep(random.uniform(0.05, 0.2)) + time.sleep(random.uniform(0.5, 1.2)) + search_box.send_keys(Keys.RETURN) + time.sleep(random.uniform(2.5, 4)) + links = driver.find_elements(By.CSS_SELECTOR, 'a') + for link in links: + href = link.get_attribute('href') + if href and href.startswith('http') and 'google.' not in href and 'webcache.googleusercontent.com' not in href: + result_query.append(href) request_count += 1 - if request_count % delay_step == 0: - time.sleep(dorking_delay) - browser.session.headers['User-Agent'] = user_agent_rotator.get_random_user_agent() - browser.follow_link(nr=page + 1) - except mechanicalsoup.LinkNotFoundError: - break + try: + next_button = driver.find_element(By.ID, 'pnnext') + next_button.click() + time.sleep(random.uniform(2, 3)) + except: + break except Exception as e: - logging.error(f'DORKING PROCESSING: ERROR. REASON: {e}') - del result_query[-2:] + logging.error(f'DORKING PROCESSING (SELENIUM): ERROR. REASON: {e}') + continue + driver.quit() + if len(result_query) >= 2: + del result_query[-2:] return result_query - except requests.exceptions.ConnectionError as e: - print(Fore.RED + "Error while establishing connection with domain. No results will appear. See journal for details" + Style.RESET_ALL) - logging.error(f'DORKING PROCESSING: ERROR. REASON: {e}') except Exception as e: logging.error(f'DORKING PROCESSING: ERROR. REASON: {e}') + print(Fore.RED + "Error while running Selenium dorking. See journal for details." + Style.RESET_ALL) + return [] -def save_results_to_txt(folderpath, table, queries, pages=10): +def save_results_to_txt(folderpath, table, queries, pages=1): try: config_values = read_config() dorking_delay = int(config_values['dorking_delay (secs)']) @@ -80,7 +128,7 @@ def save_results_to_txt(folderpath, table, queries, pages=10): for i, query in enumerate(queries, start=1): f.write(f"QUERY #{i}: {query}\n") try: - results = solid_google_dorking(query, dorking_delay, delay_step, proxy_flag, proxies_list, pages) + results = solid_google_dorking(query, proxy_flag, proxies_list, pages) if not results: f.write("=> NO RESULT FOUND\n") total_results.append((query, 0)) diff --git a/pagesearch/pagesearch_parsers.py b/pagesearch/pagesearch_parsers.py index 5e72e51..c6933e8 100644 --- a/pagesearch/pagesearch_parsers.py +++ b/pagesearch/pagesearch_parsers.py @@ -1,14 +1,26 @@ +import os +import re +import sys +import fitz import requests + from bs4 import BeautifulSoup -import re +from typing import List, Tuple from colorama import Fore, Style -import os -import fitz -import sys + sys.path.append('service') from logs_processing import logging from cli_init import print_ps_cli_report +ansi_re = re.compile(r'\x1b\[[0-9;]*[mK]') + +def make_recorder(storage: List[str]): + def _rec(*parts, sep=" ", end="\n"): + msg = sep.join(str(p) for p in parts) + end + print(msg, end="") + storage.append(ansi_re.sub("", msg)) + return _rec + def extract_text_from_pdf(filename: str) -> str: try: logging.info('TEXT EXTRACTION FROM PDF (PAGESEARCH): OK') @@ -18,16 +30,15 @@ def extract_text_from_pdf(filename: str) -> str: text += page.get_text() return text except Exception as e: - print(Fore.RED + f"Can't open some PDF file. See journal for details" + Style.RESET_ALL) + print(Fore.RED + "Can't open some PDF file. See journal for details" + Style.RESET_ALL) logging.error(f'TEXT EXTRACTION FROM PDF (PAGESEARCH): ERROR. REASON: {e}') - pass + return "" -def find_keywords_in_pdfs(ps_docs_path, keywords: list) -> dict: +def find_keywords_in_pdfs(ps_docs_path, keywords: List[str]) -> Tuple[dict, int]: try: logging.info('KEYWORDS SEARCH IN PDF (PAGESEARCH): OK') pdf_files = [f for f in os.listdir(ps_docs_path) if f.lower().endswith(".pdf")] - results = {} - pdf_with_keywords = 0 + results, pdf_with_keywords = {}, 0 for pdf_file in pdf_files: pdf_path = os.path.join(ps_docs_path, pdf_file) extracted_text = extract_text_from_pdf(pdf_path) @@ -39,42 +50,30 @@ def find_keywords_in_pdfs(ps_docs_path, keywords: list) -> dict: pdf_with_keywords += 1 return results, pdf_with_keywords except Exception as e: - print(Fore.RED + f"Can't find keywords. See journal for details") + print(Fore.RED + "Can't find keywords. See journal for details" + Style.RESET_ALL) logging.error(f'KEYWORDS SEARCH IN PDF (PAGESEARCH): ERROR. REASON: {e}') - pass + return {}, 0 def clean_bad_pdfs(ps_docs_path): pdf_files = [f for f in os.listdir(ps_docs_path) if f.lower().endswith(".pdf")] - bad_pdfs = [] for pdf_file in pdf_files: try: - full_path = os.path.join(ps_docs_path, pdf_file) - fitz.open(filename=full_path) + fitz.open(filename=os.path.join(ps_docs_path, pdf_file)) except Exception: - bad_pdfs.append(pdf_file) - pass - if len(bad_pdfs) > 0: - corrupted_pdfs_counter = 0 - for pdfs in bad_pdfs: - os.remove(os.path.join(ps_docs_path, pdfs)) - corrupted_pdfs_counter += 1 - print(Fore.GREEN + f"Found {corrupted_pdfs_counter} corrupted PDF files. Deleting...") - else: - print(Fore.GREEN + "Corrupted PDF files were not found" + Style.RESET_ALL) + os.remove(os.path.join(ps_docs_path, pdf_file)) def subdomains_parser(subdomains_list, report_folder, keywords, keywords_flag): + report_lines: List[str] = [] + p = make_recorder(report_lines) print(Fore.GREEN + "Conducting PageSearch. Please, be patient, it may take a long time\n" + Style.RESET_ALL) - ps_docs_path = report_folder + '//ps_documents' + ps_docs_path = os.path.join(report_folder, 'ps_documents') if not os.path.exists(ps_docs_path): os.makedirs(ps_docs_path) + email_pattern = r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}' - total_emails = [] - accessible_subdomains = 0 - files_counter = 0 - website_elements_counter = 0 - exposed_passwords_counter = 0 - api_keys_counter = 0 - cookies_counter = 0 + total_emails, keywords_messages_list = [], [] + accessible_subdomains = files_counter = website_elements_counter = 0 + exposed_passwords_counter = api_keys_counter = cookies_counter = 0 tried_subdomains_counter = 0 for url in subdomains_list: @@ -85,14 +84,16 @@ def subdomains_parser(subdomains_list, report_folder, keywords, keywords_flag): if response.status_code == 200: accessible_subdomains += 1 soup = BeautifulSoup(response.content, 'html.parser') + else: + continue except Exception as e: - print(Fore.RED + "Can't access some subdomain. See journal for details") + print(Fore.RED + "Can't access some subdomain. See journal for details" + Style.RESET_ALL) logging.error(f'ACCESSING SUBDOMAIN (PAGESEARCH): ERROR. REASON: {e}') - pass + continue try: logging.info('WEB RESOURCE ADDITIONAL INFO GATHERING (PAGESEARCH): OK') - title = soup.title.string + title = soup.title.string if soup.title else "No title" emails = re.findall(email_pattern, soup.text) total_emails.append(emails) if not emails: @@ -101,105 +102,112 @@ def subdomains_parser(subdomains_list, report_folder, keywords, keywords_flag): search_query_input = soup.find('input', {'name': 'q'}) customization_input = soup.find('input', {'name': 'language'}) passwords = soup.find_all('input', {'type': 'password'}) - print(Fore.LIGHTGREEN_EX + "-------------------------------------------------" + Style.RESET_ALL) - print(Fore.GREEN + "Page number: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{tried_subdomains_counter}/{len(subdomains_list)}" + Style.RESET_ALL) - print(Fore.GREEN + "Page URL: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{url}" + Style.RESET_ALL) - print(Fore.GREEN + "Page title: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{title}" + Style.RESET_ALL) - print(Fore.GREEN + "Found e-mails: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{', '.join(emails)}" + Style.RESET_ALL) - - if customization_input and customization_input.get('value') is not None and len(customization_input.get('value')) > 0: - print(Fore.GREEN + "Found site customization setting: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{customization_input.get('value')}" + Style.RESET_ALL) + p(Fore.LIGHTGREEN_EX + "-------------------------------------------------" + Style.RESET_ALL) + p(Fore.GREEN + "Page number: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{tried_subdomains_counter}/{len(subdomains_list)}" + Style.RESET_ALL) + p(Fore.GREEN + "Page URL: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{url}" + Style.RESET_ALL) + p(Fore.GREEN + "Page title: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{title}" + Style.RESET_ALL) + p(Fore.GREEN + "Found e-mails: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{', '.join(emails)}" + Style.RESET_ALL) + + if customization_input and customization_input.get('value'): + p(Fore.GREEN + "Found site customization setting: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{customization_input.get('value')}" + Style.RESET_ALL) website_elements_counter += 1 - if search_query_input and search_query_input.get('value') is not None and len(search_query_input.get('value')) > 0: - print(Fore.GREEN + "Found search query: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{search_query_input.get('value')}" + Style.RESET_ALL) + if search_query_input and search_query_input.get('value'): + p(Fore.GREEN + "Found search query: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{search_query_input.get('value')}" + Style.RESET_ALL) website_elements_counter += 1 for hidden_input in hidden_inputs: - if hidden_input is not None and hidden_input.get('value') is not None and len(hidden_input.get('value')) > 0: - print(Fore.GREEN + "Found hidden form data: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{hidden_input.get('value')}" + Style.RESET_ALL) + if hidden_input and hidden_input.get('value'): + p(Fore.GREEN + "Found hidden form data: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{hidden_input.get('value')}" + Style.RESET_ALL) website_elements_counter += 1 for password in passwords: - if password is not None and password.get('value') is not None and len(password.get('value')) > 0: - print(Fore.GREEN + "Found exposed password: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{password.get('value')}" + Style.RESET_ALL) + if password and password.get('value'): + p(Fore.GREEN + "Found exposed password: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{password.get('value')}" + Style.RESET_ALL) exposed_passwords_counter += 1 - api_keys = soup.find_all('input', attrs={'type': 'apikey'}) for key in api_keys: key_value = key.get('value') - print(Fore.GREEN + f"Found API Key: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{key_value}") + p(Fore.GREEN + f"Found API Key: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{key_value}" + Style.RESET_ALL) api_keys_counter += 1 cookies_dict = response.cookies for cookie_name, cookie_value in cookies_dict.items(): - print(Fore.GREEN + "Found cookie: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{cookie_name}. " + Style.RESET_ALL + Fore.GREEN + "Value: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{cookie_value}" + Style.RESET_ALL) + p(Fore.GREEN + "Found cookie: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{cookie_name}. " + Style.RESET_ALL + Fore.GREEN + "Value: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{cookie_value}" + Style.RESET_ALL) cookies_counter += 1 except Exception as e: - print(Fore.RED + "Error while getting detailed info on web resource. See journal for details") + print(Fore.RED + "Error while getting detailed info on web resource. See journal for details" + Style.RESET_ALL) logging.error(f'WEB RESOURCE ADDITIONAL INFO GATHERING (PAGESEARCH): ERROR. REASON: {e}') - pass try: logging.info('FILES EXTRACTION (PAGESEARCH): OK') links = soup.find_all('a') for link in links: href = link.get('href') - if href: - if href.lower().endswith(('.docx', '.xlsx', '.csv', '.pdf', '.pptx', '.doc', '.ppt', '.xls', '.rtf', '.conf', '.config', '.db', '.sql', '.json', '.txt')): - document_url = 'http://' + url + href - print(Fore.GREEN + "Found document: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{document_url}" + Style.RESET_ALL) - response = requests.get(document_url) - file_extensions = { - '.docx': 'extracted_{}.docx', - '.xlsx': 'extracted_{}.xlsx', - '.pdf': 'extracted_{}.pdf', - '.csv': 'extracted_{}.csv', - '.pptx': 'extracted_{}.pptx', - '.doc': 'extracted_{}.doc', - '.ppt': 'extracted_{}.ppt', - '.xls': 'extracted_{}.xls', - '.json': 'extracted_{}.json', - '.txt': 'extracted_{}.txt', - '.sql': 'extracted_{}.sql', - '.db': 'extracted_{}.db', - '.config': 'extracted_{}.config', - '.conf': 'extracted_{}.conf' - } - if response.status_code == 200: - if href: - file_extension = os.path.splitext(href.lower())[1] - if file_extension in file_extensions: - filename = os.path.basename(href) - extracted_path = os.path.join(ps_docs_path, file_extensions[file_extension].format( - os.path.splitext(filename)[0])) - with open(extracted_path, 'wb') as file: - file.write(response.content) - files_counter += 1 - print(Fore.GREEN + "File was successfully saved") + if href and href.lower().endswith(('.docx', '.xlsx', '.csv', '.pdf', '.pptx', '.doc', '.ppt', '.xls', '.rtf', '.conf', '.config', '.db', '.sql', '.json', '.txt')): + document_url = 'http://' + url + href + p(Fore.GREEN + "Found document: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{document_url}" + Style.RESET_ALL) + response_doc = requests.get(document_url) + file_extensions = { + '.docx': 'extracted_{}.docx', + '.xlsx': 'extracted_{}.xlsx', + '.pdf': 'extracted_{}.pdf', + '.csv': 'extracted_{}.csv', + '.pptx': 'extracted_{}.pptx', + '.doc': 'extracted_{}.doc', + '.ppt': 'extracted_{}.ppt', + '.xls': 'extracted_{}.xls', + '.json': 'extracted_{}.json', + '.txt': 'extracted_{}.txt', + '.sql': 'extracted_{}.sql', + '.db': 'extracted_{}.db', + '.config': 'extracted_{}.config', + '.conf': 'extracted_{}.conf' + } + if response_doc.status_code == 200: + file_extension = os.path.splitext(href.lower())[1] + if file_extension in file_extensions: + filename = os.path.basename(href) + extracted_path = os.path.join(ps_docs_path, file_extensions[file_extension].format(os.path.splitext(filename)[0])) + with open(extracted_path, 'wb') as file: + file.write(response_doc.content) + files_counter += 1 + p(Fore.GREEN + "File was successfully saved" + Style.RESET_ALL) except Exception as e: - print(Fore.RED + "This file can't be accessed to extract it. See journal for details") + print(Fore.RED + "This file can't be accessed to extract it. See journal for details" + Style.RESET_ALL) logging.error(f'FILES EXTRACTION (PAGESEARCH): ERROR. REASON: {e}') - pass + p(Fore.LIGHTGREEN_EX + "-------------------------------------------------" + Style.RESET_ALL) ps_emails_list = [x for x in total_emails if x] ps_emails_return = [', '.join(sublist) for sublist in ps_emails_list] clean_bad_pdfs(ps_docs_path) - keywords_messages_list = [] + + pdf_with_keywords = 0 if keywords_flag == 1: print(Fore.GREEN + "Searching keywords in PDF files..." + Style.RESET_ALL) - try: - pdf_results, pdf_with_keywords = find_keywords_in_pdfs(ps_docs_path, keywords) - for pdf_file, found_keywords in pdf_results.items(): - print(Fore.GREEN + f"Keywords " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{', '.join(found_keywords)}" + Style.RESET_ALL + Fore.GREEN + f" found in '{pdf_file}'" + Style.RESET_ALL) - keywords_messages_list.append(f"Keywords {', '.join(found_keywords)} found in '{pdf_file}'") - except Exception as e: - print(Fore.RED + f"Can't find keywords. See journal for details") - logging.error(f'KEYWORDS SEARCH IN PDF (PAGESEARCH): ERROR. REASON: {e}') - pdf_with_keywords = 0 + pdf_results, pdf_with_keywords = find_keywords_in_pdfs(ps_docs_path, keywords) + for pdf_file, found_keywords in pdf_results.items(): + p(Fore.GREEN + f"Keywords " + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{', '.join(found_keywords)}" + Style.RESET_ALL + Fore.GREEN + f" found in '{pdf_file}'" + Style.RESET_ALL) + keywords_messages_list.append(f"Keywords {', '.join(found_keywords)} found in '{pdf_file}'") + print_ps_cli_report(subdomains_list, accessible_subdomains, ps_emails_return, files_counter, cookies_counter, api_keys_counter, website_elements_counter, exposed_passwords_counter) if keywords_flag == 0: - print(Fore.RED + "[+] Keywords were not gathered because of None user input") - return ps_emails_return, accessible_subdomains, len(ps_emails_return), files_counter, cookies_counter, api_keys_counter, website_elements_counter, exposed_passwords_counter, ['No keywords were found because of None user input'] + print(Fore.RED + "[+] Keywords were not gathered because of None user input" + Style.RESET_ALL) + keywords_messages_list = ['No keywords were found because of None user input'] else: - print(Fore.GREEN + f"[+] Total {pdf_with_keywords} keywords were found in PDF files") - return ps_emails_return, accessible_subdomains, len(ps_emails_return), files_counter, cookies_counter, api_keys_counter, website_elements_counter, exposed_passwords_counter, keywords_messages_list + print(Fore.GREEN + f"[+] Total {pdf_with_keywords} keywords were found in PDF files" + Style.RESET_ALL) + + data_tuple = ( + ps_emails_return, + accessible_subdomains, + len(ps_emails_return), + files_counter, + cookies_counter, + api_keys_counter, + website_elements_counter, + exposed_passwords_counter, + keywords_messages_list + ) + + exclude = ("Conducting PageSearch", "Searching keywords", "Keywords were not gathered", "Total ") + pagesearch_query = "\n".join(line for line in report_lines if not line.startswith(exclude)) + return data_tuple, pagesearch_query diff --git a/poetry.lock b/poetry.lock index 0871fe5..cf07a58 100644 --- a/poetry.lock +++ b/poetry.lock @@ -250,14 +250,14 @@ files = [ [[package]] name = "click" -version = "8.1.8" +version = "8.2.1" description = "Composable command line interface toolkit" optional = false -python-versions = ">=3.7" +python-versions = ">=3.10" groups = ["dev"] files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, + {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, + {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, ] [package.dependencies] @@ -343,17 +343,20 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.2" +version = "1.3.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["main"] markers = "python_version < \"3.11\"" files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + [package.extras] test = ["pytest (>=6)"] @@ -402,152 +405,6 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] -[[package]] -name = "lxml" -version = "5.4.0" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -optional = false -python-versions = ">=3.6" -groups = ["main"] -files = [ - {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c"}, - {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c"}, - {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b"}, - {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b"}, - {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563"}, - {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5"}, - {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776"}, - {file = "lxml-5.4.0-cp310-cp310-win32.whl", hash = "sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7"}, - {file = "lxml-5.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250"}, - {file = "lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9"}, - {file = "lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8"}, - {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86"}, - {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056"}, - {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7"}, - {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd"}, - {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751"}, - {file = "lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4"}, - {file = "lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539"}, - {file = "lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4"}, - {file = "lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7"}, - {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079"}, - {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20"}, - {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8"}, - {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f"}, - {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc"}, - {file = "lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f"}, - {file = "lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2"}, - {file = "lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0"}, - {file = "lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8"}, - {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982"}, - {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61"}, - {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54"}, - {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b"}, - {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a"}, - {file = "lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82"}, - {file = "lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f"}, - {file = "lxml-5.4.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7be701c24e7f843e6788353c055d806e8bd8466b52907bafe5d13ec6a6dbaecd"}, - {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb54f7c6bafaa808f27166569b1511fc42701a7713858dddc08afdde9746849e"}, - {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97dac543661e84a284502e0cf8a67b5c711b0ad5fb661d1bd505c02f8cf716d7"}, - {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:c70e93fba207106cb16bf852e421c37bbded92acd5964390aad07cb50d60f5cf"}, - {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9c886b481aefdf818ad44846145f6eaf373a20d200b5ce1a5c8e1bc2d8745410"}, - {file = "lxml-5.4.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:fa0e294046de09acd6146be0ed6727d1f42ded4ce3ea1e9a19c11b6774eea27c"}, - {file = "lxml-5.4.0-cp36-cp36m-win32.whl", hash = "sha256:61c7bbf432f09ee44b1ccaa24896d21075e533cd01477966a5ff5a71d88b2f56"}, - {file = "lxml-5.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7ce1a171ec325192c6a636b64c94418e71a1964f56d002cc28122fceff0b6121"}, - {file = "lxml-5.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:795f61bcaf8770e1b37eec24edf9771b307df3af74d1d6f27d812e15a9ff3872"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29f451a4b614a7b5b6c2e043d7b64a15bd8304d7e767055e8ab68387a8cacf4e"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4aa412a82e460571fad592d0f93ce9935a20090029ba08eca05c614f99b0cc92"}, - {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:c5d32f5284012deaccd37da1e2cd42f081feaa76981f0eaa474351b68df813c5"}, - {file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:31e63621e073e04697c1b2d23fcb89991790eef370ec37ce4d5d469f40924ed6"}, - {file = "lxml-5.4.0-cp37-cp37m-win32.whl", hash = "sha256:be2ba4c3c5b7900246a8f866580700ef0d538f2ca32535e991027bdaba944063"}, - {file = "lxml-5.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:09846782b1ef650b321484ad429217f5154da4d6e786636c38e434fa32e94e49"}, - {file = "lxml-5.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eaf24066ad0b30917186420d51e2e3edf4b0e2ea68d8cd885b14dc8afdcf6556"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b31a3a77501d86d8ade128abb01082724c0dfd9524f542f2f07d693c9f1175f"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e108352e203c7afd0eb91d782582f00a0b16a948d204d4dec8565024fafeea5"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11a96c3b3f7551c8a8109aa65e8594e551d5a84c76bf950da33d0fb6dfafab7"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ca755eebf0d9e62d6cb013f1261e510317a41bf4650f22963474a663fdfe02aa"}, - {file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4cd915c0fb1bed47b5e6d6edd424ac25856252f09120e3e8ba5154b6b921860e"}, - {file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:226046e386556a45ebc787871d6d2467b32c37ce76c2680f5c608e25823ffc84"}, - {file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b108134b9667bcd71236c5a02aad5ddd073e372fb5d48ea74853e009fe38acb6"}, - {file = "lxml-5.4.0-cp38-cp38-win32.whl", hash = "sha256:1320091caa89805df7dcb9e908add28166113dcd062590668514dbd510798c88"}, - {file = "lxml-5.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:073eb6dcdf1f587d9b88c8c93528b57eccda40209cf9be549d469b942b41d70b"}, - {file = "lxml-5.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bda3ea44c39eb74e2488297bb39d47186ed01342f0022c8ff407c250ac3f498e"}, - {file = "lxml-5.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9ceaf423b50ecfc23ca00b7f50b64baba85fb3fb91c53e2c9d00bc86150c7e40"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:664cdc733bc87449fe781dbb1f309090966c11cc0c0cd7b84af956a02a8a4729"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67ed8a40665b84d161bae3181aa2763beea3747f748bca5874b4af4d75998f87"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b4a3bd174cc9cdaa1afbc4620c049038b441d6ba07629d89a83b408e54c35cd"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b0989737a3ba6cf2a16efb857fb0dfa20bc5c542737fddb6d893fde48be45433"}, - {file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:dc0af80267edc68adf85f2a5d9be1cdf062f973db6790c1d065e45025fa26140"}, - {file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:639978bccb04c42677db43c79bdaa23785dc7f9b83bfd87570da8207872f1ce5"}, - {file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a99d86351f9c15e4a901fc56404b485b1462039db59288b203f8c629260a142"}, - {file = "lxml-5.4.0-cp39-cp39-win32.whl", hash = "sha256:3e6d5557989cdc3ebb5302bbdc42b439733a841891762ded9514e74f60319ad6"}, - {file = "lxml-5.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:a8c9b7f16b63e65bbba889acb436a1034a82d34fa09752d754f88d708eca80e1"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571"}, - {file = "lxml-5.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5f11a1526ebd0dee85e7b1e39e39a0cc0d9d03fb527f56d8457f6df48a10dc0c"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b4afaf38bf79109bb060d9016fad014a9a48fb244e11b94f74ae366a64d252"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de6f6bb8a7840c7bf216fb83eec4e2f79f7325eca8858167b68708b929ab2172"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5cca36a194a4eb4e2ed6be36923d3cffd03dcdf477515dea687185506583d4c9"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b7c86884ad23d61b025989d99bfdd92a7351de956e01c61307cb87035960bcb1"}, - {file = "lxml-5.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:53d9469ab5460402c19553b56c3648746774ecd0681b1b27ea74d5d8a3ef5590"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:56dbdbab0551532bb26c19c914848d7251d73edb507c3079d6805fa8bba5b706"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14479c2ad1cb08b62bb941ba8e0e05938524ee3c3114644df905d2331c76cd57"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32697d2ea994e0db19c1df9e40275ffe84973e4232b5c274f47e7c1ec9763cdd"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24f6df5f24fc3385f622c0c9d63fe34604893bc1a5bdbb2dbf5870f85f9a404a"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:151d6c40bc9db11e960619d2bf2ec5829f0aaffb10b41dcf6ad2ce0f3c0b2325"}, - {file = "lxml-5.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4025bf2884ac4370a3243c5aa8d66d3cb9e15d3ddd0af2d796eccc5f0244390e"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9459e6892f59ecea2e2584ee1058f5d8f629446eab52ba2305ae13a32a059530"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47fb24cc0f052f0576ea382872b3fc7e1f7e3028e53299ea751839418ade92a6"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50441c9de951a153c698b9b99992e806b71c1f36d14b154592580ff4a9d0d877"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ab339536aa798b1e17750733663d272038bf28069761d5be57cb4a9b0137b4f8"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9776af1aad5a4b4a1317242ee2bea51da54b2a7b7b48674be736d463c999f37d"}, - {file = "lxml-5.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:63e7968ff83da2eb6fdda967483a7a023aa497d85ad8f05c3ad9b1f2e8c84987"}, - {file = "lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd"}, -] - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html-clean = ["lxml_html_clean"] -html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.11,<3.1.0)"] - [[package]] name = "markdown-it-py" version = "3.0.0" @@ -656,23 +513,6 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] -[[package]] -name = "mechanicalsoup" -version = "1.3.0" -description = "A Python library for automating interaction with websites" -optional = false -python-versions = ">=3.6" -groups = ["main"] -files = [ - {file = "MechanicalSoup-1.3.0-py3-none-any.whl", hash = "sha256:83dfc23bbbcaafb62dd43e0f12aee3202e780650b4612d999b54324558980114"}, - {file = "MechanicalSoup-1.3.0.tar.gz", hash = "sha256:38e8748f62fd9455a0818701a9e2dbfa549639d09f829f3fdd03665c825e7ce1"}, -] - -[package.dependencies] -beautifulsoup4 = ">=4.7" -lxml = "*" -requests = ">=2.22.0" - [[package]] name = "openpyxl" version = "3.1.2" @@ -904,19 +744,18 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requirements-parser" -version = "0.11.0" +version = "0.13.0" description = "This is a small Python module for parsing Pip requirement files." optional = false python-versions = "<4.0,>=3.8" groups = ["dev"] files = [ - {file = "requirements_parser-0.11.0-py3-none-any.whl", hash = "sha256:50379eb50311834386c2568263ae5225d7b9d0867fb55cf4ecc93959de2c2684"}, - {file = "requirements_parser-0.11.0.tar.gz", hash = "sha256:35f36dc969d14830bf459803da84f314dc3d17c802592e9e970f63d0359e5920"}, + {file = "requirements_parser-0.13.0-py3-none-any.whl", hash = "sha256:2b3173faecf19ec5501971b7222d38f04cb45bb9d87d0ad629ca71e2e62ded14"}, + {file = "requirements_parser-0.13.0.tar.gz", hash = "sha256:0843119ca2cb2331de4eb31b10d70462e39ace698fd660a915c247d2301a4418"}, ] [package.dependencies] packaging = ">=23.2" -types-setuptools = ">=69.1.0" [[package]] name = "rich" @@ -957,27 +796,6 @@ typing_extensions = ">=4.9,<5.0" urllib3 = {version = ">=1.26,<3", extras = ["socks"]} websocket-client = ">=1.8,<2.0" -[[package]] -name = "setuptools" -version = "80.3.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "setuptools-80.3.0-py3-none-any.whl", hash = "sha256:a65cffc4fb86167e3020b3ef58e08226baad8b29a3b34ce2c9d07e901bac481d"}, - {file = "setuptools-80.3.0.tar.gz", hash = "sha256:ec8308eb180b2312062b1c5523204acf872cd8b0a9e6c2ae76431b22bc4065d7"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] -core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] - [[package]] name = "six" version = "1.17.0" @@ -1109,32 +927,33 @@ trio = ">=0.11" wsproto = ">=0.14" [[package]] -name = "types-setuptools" -version = "80.1.0.20250503" -description = "Typing stubs for setuptools" +name = "typing-extensions" +version = "4.14.0" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" -groups = ["dev"] +groups = ["main"] files = [ - {file = "types_setuptools-80.1.0.20250503-py3-none-any.whl", hash = "sha256:b25bcbfeebae06bc4d2d09231b0c0622e95bdbb36df070cfe40b925fcef97dd3"}, - {file = "types_setuptools-80.1.0.20250503.tar.gz", hash = "sha256:3cbdbe42c12e1d9179e6e87644a2cf176259411b5a3cc1d0fb0d824dc711d9ed"}, + {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, + {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, ] -[package.dependencies] -setuptools = "*" - [[package]] -name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +name = "undetected-chromedriver" +version = "3.5.5" +description = "('Selenium.webdriver.Chrome replacement with compatiblity for Brave, and other Chromium based browsers.', 'Not triggered by CloudFlare/Imperva/hCaptcha and such.', 'NOTE: results may vary due to many factors. No guarantees are given, except for ongoing efforts in understanding detection algorithms.')" optional = false -python-versions = ">=3.8" +python-versions = "*" groups = ["main"] files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, + {file = "undetected-chromedriver-3.5.5.tar.gz", hash = "sha256:9f945e1435005247abe17de316bcfda85b284a4177fd5f25167c78ced33b65ec"}, ] +[package.dependencies] +requests = "*" +selenium = ">=4.9.0" +websockets = "*" + [[package]] name = "urllib3" version = "2.4.0" @@ -1190,6 +1009,85 @@ docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] +[[package]] +name = "websockets" +version = "15.0.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}, + {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}, + {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}, + {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}, + {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}, + {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}, + {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}, + {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}, + {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"}, + {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"}, + {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"}, + {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}, + {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, +] + [[package]] name = "wsproto" version = "1.2.0" @@ -1208,4 +1106,4 @@ h11 = ">=0.9.0,<1" [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "da8180404107284630cbcc7ddf3bbc27e8cd964a2d8e91aba64920288ce55358" +content-hash = "8cfa511b5d3bf93067128637e8ff4c1dda03d1d14c601047822c85d84407ec1b" diff --git a/pyproject.toml b/pyproject.toml index 1ba2dc0..4546fba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "dpulse" -version = "1.3" +version = "1.3.1" description = "Convenient,fast and user-friendly collector of domain information from Open-Sources" authors = ["OSINT-TECHNOLOGIES "] readme = "README.md" @@ -29,17 +29,16 @@ python-whois = "0.9.4" colorama = "0.4.6" pyfiglet = "1.0.2" rich = "13.7.1" -MechanicalSoup = "1.3.0" builtwith = "1.3.4" dnspython = "2.6.1" openpyxl = "3.1.2" PyMuPDF = "1.24.7" selenium = "4.28.1" webdriver-manager = "4.0.2" +undetected_chromedriver = "3.5.5" [tool.poetry.scripts] dpulse = 'dpulse.dpulse:run' [tool.poetry.group.dev.dependencies] deptry = "^0.23.0" - diff --git a/reporting_modules/html_report_creation.py b/reporting_modules/html_report_creation.py index 0f67527..1dce4ff 100644 --- a/reporting_modules/html_report_creation.py +++ b/reporting_modules/html_report_creation.py @@ -1,4 +1,7 @@ import sys +import os +from jinja2 import Environment, FileSystemLoader +from colorama import Fore, Style sys.path.append('service') sys.path.append('service//pdf_report_templates') @@ -10,17 +13,7 @@ from api_hudsonrock import hudsonrock_html_prep from api_virustotal import virustotal_html_prep from api_securitytrails import securitytrails_html_prep - -try: - from datetime import datetime - from jinja2 import Environment, FileSystemLoader - import os - from colorama import Fore, Style - import sqlite3 - import re -except ImportError as e: - print(Fore.RED + "Import error appeared. Reason: {}".format(e) + Style.RESET_ALL) - sys.exit() +from config_processing import read_config def generate_report(data, output_file, template_path): env = Environment(loader=FileSystemLoader('.')) @@ -79,6 +72,10 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a virustotal_output = data_array[45] securitytrails_output = data_array[46] hudsonrock_output = data_array[47] + ps_string = data_array[48] + total_ports = data_array[49] + total_ips = data_array[50] + total_vulns = data_array[51] casename = report_info_array[0] db_casename = report_info_array[1] db_creation_date = report_info_array[2] @@ -91,7 +88,6 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a virustotal_output = virustotal_html_prep(virustotal_output) securitytrails_output = securitytrails_html_prep(securitytrails_output) - pdf_templates_path = 'service//pdf_report_templates' if len(ps_emails_return) > 0: subdomain_mails += ps_emails_return @@ -115,7 +111,15 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a new_emails = email.split(', ') subdomain_mails_cleaned.extend(new_emails) - template_path = pdf_templates_path + '//default_report_temp.html' + total_mails = len(subdomain_mails_cleaned) + pdf_templates_path = 'service//pdf_report_templates' + config_values = read_config() + selected_template = config_values['template'] + delete_txt_files = config_values['delete_txt_files'] + if selected_template.lower() == 'modern': + template_path = pdf_templates_path + '//modern_report_template.html' + elif selected_template.lower() == 'legacy': + template_path = pdf_templates_path + '//legacy_report_template.html' dorking_results_path = report_folder + '//04-dorking_results.txt' if os.path.isfile(dorking_results_path): with open(dorking_results_path, 'r') as f: @@ -123,6 +127,8 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a else: add_dsi = 'Dorking mode was not enabled so there is no results to see' + robots_content, sitemap_content, sitemap_links_content, dorking_content = fp.get_db_columns(report_folder) + context = {'sh_domain': short_domain, 'full_url': url, 'ip_address': ip, 'registrar': res['registrar'], 'creation_date': res['creation_date'], 'expiration_date': res['expiration_date'], 'name_servers': ', '.join(res['name_servers']), 'org': res['org'], @@ -147,19 +153,30 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a 'dorking_status': dorking_status, 'add_dsi': add_dsi, 'ps_s': accessible_subdomains, 'ps_e': emails_amount, 'ps_f': files_counter, 'ps_c': cookies_counter, 'ps_a': api_keys_counter, 'ps_w': website_elements_counter, 'ps_p': exposed_passwords_counter, 'ss_l': total_links_counter, 'ss_a': accessed_links_counter, 'hudsonrock_output': hudsonrock_output, "snapshotting_ui_mark": snapshotting_ui_mark, - 'virustotal_output': virustotal_output, 'securitytrails_output': securitytrails_output} + 'virustotal_output': virustotal_output, 'securitytrails_output': securitytrails_output, 'ps_string': ps_string, 'a_tops': total_ports, + 'a_temails': total_mails, 'a_tips': total_ips, 'a_tpv': total_vulns, 'robots_content': robots_content, 'sitemap_xml_content': sitemap_content, 'sitemap_txt_content': sitemap_links_content} html_report_name = report_folder + '//' + casename if generate_report(context, html_report_name, template_path): print(Fore.GREEN + "HTML report for {} case was created at {}".format(short_domain, report_ctime) + Style.RESET_ALL) print(Fore.GREEN + f"Scan elapsed time: {end}" + Style.RESET_ALL) - robots_content, sitemap_content, sitemap_links_content, dorking_content = fp.get_db_columns(report_folder) pdf_blob = fp.get_blob(html_report_name) db.insert_blob('HTML', pdf_blob, db_casename, db_creation_date, case_comment, robots_content, sitemap_content, sitemap_links_content, dorking_content, api_scan_db) - if os.path.exists(report_folder + '//04-dorking_results.txt'): - os.remove(report_folder + '//04-dorking_results.txt') - else: + + if delete_txt_files.lower() == 'y': + files_to_remove = [ + '04-dorking_results.txt', + '03-sitemap_links.txt', + '02-sitemap.txt', + '01-robots.txt' + ] + for file in files_to_remove: + file_path = os.path.join(report_folder, file) + if os.path.exists(file_path): + os.remove(file_path) + elif delete_txt_files.lower() == 'n': pass + except Exception as e: print(Fore.RED + 'Unable to create HTML report. See journal for details') logging.error(f'HTML REPORT CREATION: ERROR. REASON: {e}') diff --git a/requirements.txt b/requirements.txt index 2091be3..8bd2d6b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,3 +12,4 @@ openpyxl==3.1.2 PyMuPDF==1.24.7 selenium==4.28.1 webdriver-manager==4.0.2 +undetected_chromedriver==3.5.5 diff --git a/service/cli_init.py b/service/cli_init.py index d504c7b..1a3d7f3 100644 --- a/service/cli_init.py +++ b/service/cli_init.py @@ -20,7 +20,7 @@ def welcome_menu(self): fig = Figlet(font=wm_font) print('\n') self.console.print(fig.renderText('DPULSE'), style=preview_style) - print(Fore.MAGENTA + Style.BRIGHT + '[DPULSE-CLI] - [v1.3 stable] - [OSINT-TECHNOLOGIES]\n' + Style.RESET_ALL) + print(Fore.MAGENTA + Style.BRIGHT + '[DPULSE-CLI] - [v1.3.1 rolling] - [OSINT-TECHNOLOGIES]\n' + Style.RESET_ALL) print(Fore.MAGENTA + Style.BRIGHT + '[Visit our pages]\nGitHub repository: https://github.com/OSINT-TECHNOLOGIES\nPyPi page: https://pypi.org/project/dpulse/\nDocumentation: https://dpulse.readthedocs.io' + Style.RESET_ALL) def print_main_menu(self): diff --git a/service/config_processing.py b/service/config_processing.py index 49e0fa1..e7c3f64 100644 --- a/service/config_processing.py +++ b/service/config_processing.py @@ -27,17 +27,16 @@ def create_config(): ] config = configparser.ConfigParser() - config['HTML_REPORTING'] = {'template': 'default'} + config['HTML_REPORTING'] = {'template': 'modern', 'delete_txt_files': 'n'} config['LOGGING'] = {'log_level': 'info'} config['CLI VISUAL'] = {'preview_color': 'red', 'font': 'slant'} - config['DORKING'] = {'dorking_delay (secs)': '2', 'delay_step': '5'} + config['DORKING'] = {'dorking_delay (secs)': '2', 'delay_step': '5', 'full_path_to_browser': r'path\to\browser\for\dorking', 'browser_mode': 'nonheadless'} config['SNAPSHOTTING'] = {'installed_browser': 'firefox', 'opera_browser_path': 'None', 'wayback_retries': '3', 'wayback_req_pause': '2'} config['USER-AGENTS'] = {} for i, agent in enumerate(basic_user_agents): config['USER-AGENTS'][f'agent_{i + 1}'] = agent config['PROXIES'] = {'proxies_file_path': 'NONE'} - with open('service//config.ini', 'w') as configfile: config.write(configfile) @@ -60,7 +59,9 @@ def read_config(): wayback_retries_amount = config.get('SNAPSHOTTING', 'wayback_retries') wayback_requests_pause = config.get('SNAPSHOTTING', 'wayback_req_pause') html_report_template = config.get('HTML_REPORTING', 'template') - + dorking_browser = config.get('DORKING', 'full_path_to_browser') + dorking_browser_mode = config.get('DORKING', 'browser_mode') + delete_txt_files = config.get('HTML_REPORTING', 'delete_txt_files') config_values = { 'logging_level': log_level, @@ -74,7 +75,10 @@ def read_config(): 'opera_browser_path': opera_browser_path, 'wayback_retries_amount': wayback_retries_amount, 'wayback_requests_pause': wayback_requests_pause, - 'template': html_report_template + 'template': html_report_template, + 'dorking_browser': dorking_browser, + 'dorking_browser_mode': dorking_browser_mode, + 'delete_txt_files': delete_txt_files } return config_values diff --git a/service/pdf_report_templates/compromise_report_template.html b/service/pdf_report_templates/compromise_report_template.html deleted file mode 100644 index 0b7b26b..0000000 --- a/service/pdf_report_templates/compromise_report_template.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - - - - -

OPEN SOURCE RESEARCH REPORT

-

Organization: {{org}}

-
- -

TABLE OF CONTENTS

-
-

1. General scan information

-

2. WHOIS information

-

3. Social medias links

-

4. Subdomains information

-

5. DNS & SSL information

-

6. Services & frameworks

-

7. Basic pre-pentest information

-

8. Dorking scan info

-

9. PageSearch results

-

10. API scan results

-
-
- -

GENERAL SCAN INFO

-
-

Total subdomains: {{a_tsf}}

-

Social media links: {{a_tsm}}

-
Robots.txt: {{robots_txt_result}}
-Sitemap.xml: {{sitemap_xml_result}}
-Dorking: {{dorking_status}}
-
-
- -

WHOIS INFORMATION

-
-

Domain: {{sh_domain}} URL: {{full_url}}

-
IP: {{ip_address}}
-Registrar: {{registrar}}
-Dates: {{creation_date}} → {{expiration_date}}
-
-
- -

SOCIAL MEDIAS

-
-

Facebook:

{% for l in fb_links %}⇒ {{ l }}{% endfor %}
-

Twitter/X:

{% for l in tw_links+xcom_links %}⇒ {{ l }}{% endfor %}
-

Instagram:

{% for l in inst_links %}⇒ {{ l }}{% endfor %}
-
-
- -

SUBDOMAINS

-
-

Found subdomains:

{% for sd in subdomains %}⇒ {{ sd }}{% endfor %}
-

IPs:

{% for sdip in subdomain_ip %}⇒ {{ sdip }}{% endfor %}
-
-
- -

DNS/SSL

-
-
NS: {{name_servers}}
-MX: {{mx_records}}
-SSL Issuer: {{issuer}}
-NotBefore: {{notBefore}}
-NotAfter: {{notAfter}}
-
-
- -

SERVICES

-
-

Web servers:

{% for ws in web_servers %}⇒ {{ ws }}{% endfor %}
-

CMS:

{% for cm in cms %}⇒ {{ cm }}{% endfor %}
-

Languages:

{% for pl in programming_languages %}⇒ {{ pl }}{% endfor %}
-
-
- -

BASIC PRE-PENTEST

-
-

Open ports:

{% for op in ports %}⇒ {{ op }}{% endfor %}
-

Vulnerabilities:

{% for vuln in vulns %}⇒ {{ vuln }}{% endfor %}
-
-
- -

DORKING SCAN

-
{{ add_dsi | safe }}
-
- -

PAGESEARCH

-
-
Subdomains: {{ps_s}}
-Emails: {{ps_e}}
-Documents: {{ps_f}}
-…
-
-
- -

VIRUSTOTAL

-
{{ virustotal_output }}
-

SECURITYTRAILS

-
{{ securitytrails_output }}
-

HUDSONROCK

-
{{ hudsonrock_output }}
-
- -

Created by DPULSE (OSINT-TECHNOLOGIES)

-

- GitHub | - PyPI -

- - - \ No newline at end of file diff --git a/service/pdf_report_templates/default_report_temp.html b/service/pdf_report_templates/default_report_temp.html deleted file mode 100644 index 3c6b6eb..0000000 --- a/service/pdf_report_templates/default_report_temp.html +++ /dev/null @@ -1,224 +0,0 @@ - - - - - - -

Open Source Information Research Report

-

{{org}}

-

 

-
-
 
-

Table of contents

-

1. General scan information

-

[BASIC SCAN INFO]

-

2. WHOIS information

-

3. Social medias links

-

4. Subdomains information

-

5. DNS & SSL information

-

6. Services & frameworks information

-

7. Basic pre-pentest information

-

[DORKING SCAN INFO]

-

8. Dorking queries and results links (if was selected)

-

[PAGESEARCH SCAN INFO]

-

9. PageSearch results (if was selected)

-

[API SCAN INFO]

-

10. VirusTotal API scan results (if was selected)

-

11. SecurityTrails API scan results (if was selected)

-

12. HudsonRock API scan results (if was selected)

-

 

-
-
 
-

GENERAL SCAN INFORMATION

-

Total subdomains found: {{a_tsf}}

-

Total social media links found: {{a_tsm}}

-

Status of robots.txt extraction: {{robots_txt_result}}

-

Status of sitemap.xml extraction: {{sitemap_xml_result}}

-

Status of sitemap.xml links extraction: {{sitemap_links}}

-

Google Dorking status: {{dorking_status}}

-

PageSearch conduction: {{pagesearch_ui_mark}}

-

Snapshotting conduction: {{snapshotting_ui_mark}}

-

Report creation time: {{ctime}}

-

 

-
-
 
-

WHOIS INFORMATION

-

Domain: {{sh_domain}}

-

Full URL: {{full_url}}

-

IP address: {{ip_address}}

-

Registrar: {{registrar}}

-

Creation date: {{creation_date}}

-

Expiration date: {{expiration_date}}

-

Organization name: {{org}}

-

Contact e-mails: {{mails}}

-

 

-
-
 
-

SOCIAL MEDIAS SEARCH RESULTS

-

FACEBOOK:

- -

TWITTER (+ X.com):

- - -

INSTAGRAM:

- -

TELEGRAM:

- -

TIKTOK:

- -

LINKEDIN:

- -

VKONTAKTE:

- -

YOUTUBE:

- -

ODNOKLASSNIKI:

- -

WECHAT:

- -

 

-
-
 
-

SUBDOMAINS ANALYSIS RESULTS

-

Found subdomains:

- -

Subdomains IP addresses:

- -

Subdomains e-mails:

- -

 

-
-
 
-

DNS & SSL INFORMATION

-

(DNS) Name servers: {{name_servers}}

-

(DNS) MX addresses: {{mx_records}}

-

(SSL) Issuer: {{issuer}}

-

(SSL) Subject: {{subject}}

-

(SSL) Not before: {{notBefore}}

-

(SSL) Not after: {{notAfter}}

-

(SSL) Certificate name: {{commonName}}

-

(SSL) Certificate serial number: {{serialNumber}}

-

 

-
-
 
-

SERVICES & FRAMEWORKS INFORMATION

-

Web servers:

- -

CMS:

- -

Used programming languages:

- -

Used web frameworks:

- -

Analytics service:

- -

Used JavaScript frameworks:

- -

Tags:

- -

Common Platform Enumeration:

- -

 

-
-
 
-

BASIC PRE-PENTEST INFORMATION

-

Open ports:

- -

Hostnames:

- -

Potential vulnerabilities:

- -

 

-
-
 
-

DORKING SCAN INFO

-
{{ add_dsi | safe }}
-

 

-
-
 
-

PAGESEARCH RESULTS

-

 

-

Amount of accessible subdomains: {{ps_s}}

-

Amount of email addresses: {{ps_e}}

-

Amount of found documents: {{ps_f}}

-

Amount of found cookies: {{ps_c}}

-

Amount of found API key: {{ps_a}}

-

Amount of WEB elements found: {{ps_w}}

-

Amount of exposed passwords found: {{ps_p}}

-

 

-
-
 
-

VIRUSTOTAL API SCAN RESULTS

-

 

-
{{ virustotal_output }}
-

 

-
-

 

-

SECURITYTRAILS API SCAN RESULTS

-

 

-
{{ securitytrails_output }}
-

 

-
-

 

-

HUDSONROCK API SCAN RESULTS

-

 

-

{{ hudsonrock_output }}

-

 

-
-

 

-

Created using DPULSE software by OSINT-TECHNOLOGIES

-

Visit our web-pages:

- diff --git a/service/pdf_report_templates/legacy_report_template.html b/service/pdf_report_templates/legacy_report_template.html new file mode 100644 index 0000000..51481f0 --- /dev/null +++ b/service/pdf_report_templates/legacy_report_template.html @@ -0,0 +1,297 @@ + + + + + OSINT report for {{ sh_domain }} + + + + +

Open Source Information Research Report

+

{{ org }}

+
+

Table of contents

+

1. General scan information

+

[BASIC SCAN INFO]

+

2. WHOIS information

+

3. Social medias links

+

4. Subdomains information

+

5. DNS & SSL information

+

6. Services & frameworks information

+

7. Basic pre-pentest information

+

[DORKING SCAN INFO]

+

8. Dorking queries and results links (if was selected)

+

[PAGESEARCH SCAN INFO]

+

9. PageSearch results (if was selected)

+

[API SCAN INFO]

+

10. VirusTotal API scan results (if was selected)

+

11. SecurityTrails API scan results (if was selected)

+

12. HudsonRock API scan results (if was selected)

+ +
+

GENERAL SCAN INFORMATION

+

Total subdomains found: {{ a_tsf }}

+

Total social media links found: {{ a_tsm }}

+

Status of robots.txt extraction: {{ robots_txt_result }}

+

Status of sitemap.xml extraction: {{ sitemap_xml_result }}

+

Status of sitemap.xml links extraction: {{ sitemap_links }}

+

Google Dorking status: {{ dorking_status }}

+

PageSearch conduction: {{ pagesearch_ui_mark }}

+

Snapshotting conduction: {{ snapshotting_ui_mark }}

+

Report creation time: {{ ctime }}

+ +
+

WHOIS INFORMATION

+

Domain: {{ sh_domain }}

+

Full URL: {{ full_url }}

+

IP address: {{ ip_address }}

+

Registrar: {{ registrar }}

+

Creation date: {{ creation_date }}

+

Expiration date: {{ expiration_date }}

+

Organization name: {{ org }}

+

Contact e-mails: {{ mails }}

+ +
+

SOCIAL MEDIAS SEARCH RESULTS

+

FACEBOOK:

+ +

TWITTER (+ X.com):

+ +

INSTAGRAM:

+ +

TELEGRAM:

+ +

TIKTOK:

+ +

LINKEDIN:

+ +

VKONTAKTE:

+ +

YOUTUBE:

+ +

ODNOKLASSNIKI:

+ +

WECHAT:

+ + +
+

SUBDOMAINS ANALYSIS RESULTS

+

Found subdomains:

+ + +

Subdomains IP addresses:

+ + +

Subdomains e-mails:

+ + +
+

DNS & SSL INFORMATION

+

(DNS) Name servers: {{ name_servers }}

+

(DNS) MX addresses: {{ mx_records }}

+

(SSL) Issuer: {{ issuer }}

+

(SSL) Subject: {{ subject }}

+

(SSL) Not before: {{ notBefore }}

+

(SSL) Not after: {{ notAfter }}

+

(SSL) Certificate name: {{ commonName }}

+

(SSL) Certificate serial number: {{ serialNumber }}

+ +
+

SERVICES & FRAMEWORKS INFORMATION

+

Web servers:

+ +

CMS:

+ +

Used programming languages:

+ +

Used web frameworks:

+ +

Analytics service:

+ +

Used JavaScript frameworks:

+ +

Tags:

+ +

Common Platform Enumeration:

+ + +
+

BASIC PRE-PENTEST INFORMATION

+

Open ports:

+ +

Hostnames:

+ +

Potential vulnerabilities:

+ + +
+

DORKING SCAN INFO

+
{{ add_dsi | safe }}
+ +
+

PAGESEARCH RESULTS

+

Amount of accessible subdomains: {{ ps_s }}

+

Amount of email addresses: {{ ps_e }}

+

Amount of found documents: {{ ps_f }}

+

Amount of found cookies: {{ ps_c }}

+

Amount of found API key: {{ ps_a }}

+

Amount of WEB elements found: {{ ps_w }}

+

Amount of exposed passwords found: {{ ps_p }}

+

PageSearch process listing:

+
{{ ps_string }}
+ +
+

VIRUSTOTAL API SCAN RESULTS

+
{{ virustotal_output }}
+ +
+

SECURITYTRAILS API SCAN RESULTS

+
{{ securitytrails_output }}
+ +
+

HUDSONROCK API SCAN RESULTS

+
{{ hudsonrock_output }}
+ +
+

Created using DPULSE software by OSINT-TECHNOLOGIES

+ +

Visit our web-pages:

+ diff --git a/service/pdf_report_templates/modern_report_template.html b/service/pdf_report_templates/modern_report_template.html new file mode 100644 index 0000000..c6b73b2 --- /dev/null +++ b/service/pdf_report_templates/modern_report_template.html @@ -0,0 +1,1034 @@ + + + + + +OSINT Report for {{ sh_domain }} + + + + + + + + + + + +
+
+ + +
+ + + + +
+ +
+ +
+
+
+
+ + + +
+ +

Table of contents

+ 1. General scan information + 2. General scan statistics +

[BASIC SCAN INFO]

+ 3. WHOIS information + 4. Social media links + 5. Subdomains + 6. DNS & SSL + 7. Services & Frameworks + 8. Pre-Pentest Info + 9. Website configuration files +

[DORKING SCAN INFO]

+ 10. Dorking Results +

[PAGESEARCH SCAN INFO]

+ 11. PageSearch Results +

[API SCAN INFO]

+ 12. VirusTotal + 13. SecurityTrails + 14. HudsonRock +
+ +

Open Source Information Research Report

+

{{ org }}

+
+ +

GENERAL SCAN INFORMATION

+

Status of robots.txt extraction: {{ robots_txt_result }}

+

Status of sitemap.xml extraction: {{ sitemap_xml_result }}

+

Status of sitemap.xml links extraction:{{ sitemap_links }}

+

Google Dorking status: {{ dorking_status }}

+

PageSearch conduction: {{ pagesearch_ui_mark }}

+

Snapshotting conduction: {{ snapshotting_ui_mark }}

+

Report creation time: {{ ctime }}

+
↑ Back to top
+
+ +

GENERAL SCAN STATISTICS

+
+
+

Total subdomains found: {{ a_tsf }}

+

Total social media links found: {{ a_tsm }}

+

Total emails found: {{ a_temails }}

+

Total associated IP addresses found: {{ a_tips }}

+

Total open ports found: {{ a_tops }}

+

Total potential vulnerabilities found: {{ a_tpv }}

+

Accessible subdomains (PageSearch): {{ ps_s }}

+

Email addresses (PageSearch): {{ ps_e }}

+

Found documents (PageSearch): {{ ps_f }}

+

Found cookies (PageSearch): {{ ps_c }}

+

Found API keys (PageSearch): {{ ps_a }}

+

Found WEB elements (PageSearch): {{ ps_w }}

+

Exposed passwords (PageSearch): {{ ps_p }}

+
+
+ +
+
+
+ +

WHOIS INFORMATION

+

Domain: {{ sh_domain }}

+

Full URL: {{ full_url }}

+

IP address: {{ ip_address }}

+

Registrar: {{ registrar }}

+

Creation date: {{ creation_date }}

+

Expiration date: {{ expiration_date }}

+

Organization name: {{ org }}

+

Contact e-mails: {{ mails }}

+
↑ Back to top
+
+ + +

FACEBOOK:

+ + +

TWITTER (+ X.com):

+ + +

INSTAGRAM:

+ + +

TELEGRAM:

+ + +

TIKTOK:

+ + +

LINKEDIN:

+ + +

VKONTAKTE:

+ + +

YOUTUBE:

+ + +

ODNOKLASSNIKI:

+ + +

WECHAT:

+ +
↑ Back to top
+
+ +

SUBDOMAINS

+

Found subdomains:

+ + +

Subdomains IP addresses:

+ + +

Subdomains e-mails:

+ +
↑ Back to top
+
+ +

DNS & SSL INFORMATION

+

(DNS) Name servers: {{ name_servers }}

+

(DNS) MX addresses: {{ mx_records }}

+

(SSL) Issuer: {{ issuer }}

+

(SSL) Subject: {{ subject }}

+

(SSL) Not before: {{ notBefore }}

+

(SSL) Not after: {{ notAfter }}

+

(SSL) Certificate name: {{ commonName }}

+

(SSL) Certificate serial number: {{ serialNumber }}

+
↑ Back to top
+
+ +

SERVICES & FRAMEWORKS INFORMATION

+

Web servers:

+ + +

CMS:

+ + +

Used programming languages:

+ + +

Used web frameworks:

+ + +

Analytics service:

+ + +

Used JavaScript frameworks:

+ + +

Tags:

+ + +

Common Platform Enumeration:

+ +
↑ Back to top
+
+ +

BASIC PRE-PENTEST INFORMATION

+

Open ports:

+ + +

Hostnames:

+ + +

Potential vulnerabilities:

+ +
↑ Back to top
+
+ + +

WEBSITE TECHNICAL FILES

+
+ robots.txt +
{{ robots_content | safe }}
+
+ +
+ sitemap.xml +
{{ sitemap_xml_content | safe }}
+
+ +
+ Sitemap links +
{{ sitemap_txt_content | safe }}
+
+
↑ Back to top
+
+ + +{% if add_dsi %} +

DORKING SCAN INFO

+
+ Dorking Results +
{{ add_dsi | safe }}
+
+
↑ Back to top
+
+{% endif %} + +{% if ps_string %} +

PAGESEARCH RESULTS

+
+ PageSearch Process Listing +
{{ ps_string }}
+
+
↑ Back to top
+
+{% endif %} + +{% if virustotal_output %} +

VIRUSTOTAL API SCAN RESULTS

+
+ VirusTotal Output +
{{ virustotal_output | safe }}
+
+
↑ Back to top
+
+{% endif %} + +{% if securitytrails_output %} +

SECURITYTRAILS API SCAN RESULTS

+
+ SecurityTrails Output +
{{ securitytrails_output | safe }}
+
+
↑ Back to top
+
+{% endif %} + +{% if hudsonrock_output %} +

HUDSONROCK API SCAN RESULTS

+
+ HudsonRock Output +
{{ hudsonrock_output | safe }}
+
+
↑ Back to top
+
+{% endif %} + +

Created using DPULSE software by OSINT-TECHNOLOGIES

+

Visit our web-pages:

+ + + + + diff --git a/service/pdf_report_templates/monospaced_report_template.html b/service/pdf_report_templates/monospaced_report_template.html deleted file mode 100644 index e3521a4..0000000 --- a/service/pdf_report_templates/monospaced_report_template.html +++ /dev/null @@ -1,174 +0,0 @@ - - - - - - - - -

OPEN SOURCE RESEARCH REPORT

-
Organization: {{org}}
-
- -

TABLE OF CONTENTS

-
-1. General info
-2. WHOIS
-3. Social medias
-4. Subdomains
-5. DNS/SSL
-6. Services
-7. Pre-pentest
-8. Dorking
-9. PageSearch
-10. APIs
-
-
- -

GENERAL SCAN INFO

-
-Subdomains:  {{a_tsf}}
-Social:      {{a_tsm}}
-Robots.txt:  {{robots_txt_result}}
-Sitemap.xml: {{sitemap_xml_result}}
-Sitemap links: {{sitemap_links}}
-Dorking:     {{dorking_status}}
-PageSearch:  {{pagesearch_ui_mark}}
-Snapshotting: {{snapshotting_ui_mark}}
-Report time: {{ctime}}
-
-
- -

WHOIS INFORMATION

-
-Domain:     {{sh_domain}}
-URL:        {{full_url}}
-IP:         {{ip_address}}
-Registrar:  {{registrar}}
-Created:    {{creation_date}}
-Expires:    {{expiration_date}}
-Emails:     {{mails}}
-
-
- -

SOCIAL MEDIAS SEARCH RESULTS

-
-FACEBOOK:
-{% for link in fb_links %}⇒ {{ link }}{% endfor %}
-TWITTER/X:
-{% for link in tw_links+xcom_links %}⇒ {{ link }}{% endfor %}
-INSTAGRAM:
-{% for link in inst_links %}⇒ {{ link }}{% endfor %}
-TELEGRAM:
-{% for link in tg_links %}⇒ {{ link }}{% endfor %}
-TIKTOK:
-{% for link in tt_links %}⇒ {{ link }}{% endfor %}
-LINKEDIN:
-{% for link in li_links %}⇒ {{ link }}{% endfor %}
-VKONTAKTE:
-{% for link in vk_links %}⇒ {{ link }}{% endfor %}
-YOUTUBE:
-{% for link in yt_links %}⇒ {{ link }}{% endfor %}
-ODNOKLASSNIKI:
-{% for link in ok_links %}⇒ {{ link }}{% endfor %}
-WECHAT:
-{% for link in wc_links %}⇒ {{ link }}{% endfor %}
-
-
- -

SUBDOMAINS ANALYSIS RESULTS

-
-FOUND SUBDOMAINS:
-{% for sd in subdomains %}⇒ {{ sd }}{% endfor %}
-IPs:
-{% for sdip in subdomain_ip %}⇒ {{ sdip }}{% endfor %}
-Emails:
-{% for smails in subdomain_mails %}⇒ {{ smails }}{% endfor %}
-
-
- -

DNS & SSL INFORMATION

-
-NAME SERVERS: {{name_servers}}
-MX RECORDS:  {{mx_records}}
-SSL ISSUER:  {{issuer}}
-SUBJECT:     {{subject}}
-NOT BEFORE:  {{notBefore}}
-NOT AFTER:   {{notAfter}}
-COMMON NAME: {{commonName}}
-SERIAL:      {{serialNumber}}
-
-
- -

SERVICES & FRAMEWORKS INFORMATION

-
-WEB SERVERS:
-{% for ws in web_servers %}⇒ {{ ws }}{% endfor %}
-CMS:
-{% for cm in cms %}⇒ {{ cm }}{% endfor %}
-PROGRAMMING LANGUAGES:
-{% for pl in programming_languages %}⇒ {{ pl }}{% endfor %}
-WEB FRAMEWORKS:
-{% for wf in web_frameworks %}⇒ {{ wf }}{% endfor %}
-ANALYTICS:
-{% for analytic in analytics %}⇒ {{ analytic }}{% endfor %}
-JS FRAMEWORKS:
-{% for jsf in javascript_frameworks %}⇒ {{ jsf }}{% endfor %}
-TAGS:
-{% for tag in tags %}⇒ {{ tag }}{% endfor %}
-CPE:
-{% for cpe in cpes %}⇒ {{ cpe }}{% endfor %}
-
-
- -

BASIC PRE-PENTEST INFORMATION

-
-OPEN PORTS:
-{% for op in ports %}⇒ {{ op }}{% endfor %}
-HOSTNAMES:
-{% for hn in hostnames %}⇒ {{ hn }}{% endfor %}
-POTENTIAL VULNERABILITIES:
-{% for vuln in vulns %}⇒ {{ vuln }}{% endfor %}
-
-
- -

DORKING SCAN INFO

-
{{ add_dsi | safe }}
-
- -

PAGESEARCH RESULTS

-
-SUBDOMAINS FOUND: {{ps_s}}
-EMAILS FOUND:    {{ps_e}}
-DOCUMENTS:       {{ps_f}}
-COOKIES:         {{ps_c}}
-API KEYS:        {{ps_a}}
-WEB ELEMENTS:    {{ps_w}}
-PASSWORDS:       {{ps_p}}
-
-
- -

VIRUSTOTAL API SCAN RESULTS

-
{{ virustotal_output }}
-
- -

SECURITYTRAILS API SCAN RESULTS

-
{{ securitytrails_output }}
-
- -

HUDSONROCK API SCAN RESULTS

-
{{ hudsonrock_output }}
-
- -
-Created by DPULSE (OSINT-TECHNOLOGIES)
-GitHub:  https://github.com/OSINT-TECHNOLOGIES
-PyPI:    https://pypi.org/project/dpulse/
-
- - - \ No newline at end of file diff --git a/service/pdf_report_templates/paragraph_report_template.html b/service/pdf_report_templates/paragraph_report_template.html deleted file mode 100644 index cf966ab..0000000 --- a/service/pdf_report_templates/paragraph_report_template.html +++ /dev/null @@ -1,154 +0,0 @@ - - - - - - - - -

Open Source Research Report

-

{{org}}

-
- -

Table of Contents

-
-

1. General scan information

-

2. WHOIS information

-

3. Social medias links

-

4. Subdomains information

-

5. DNS & SSL information

-

6. Services & frameworks

-

7. Basic pre-pentest information

-

8. Dorking scan info

-

9. PageSearch results

-

10. API scan results

-
-
- -

GENERAL SCAN INFO

-
-

Total subdomains: {{a_tsf}}

-

Total social media links: {{a_tsm}}

-

Status of robots.txt: {{robots_txt_result}}

-

Status of sitemap.xml: {{sitemap_xml_result}}

-

Status of sitemap links: {{sitemap_links}}

-

Google Dorking: {{dorking_status}}

-

PageSearch: {{pagesearch_ui_mark}}

-

Snapshotting: {{snapshotting_ui_mark}}

-

Report time: {{ctime}}

-
-
- -

WHOIS INFORMATION

-
-

Domain: {{sh_domain}}

-

Full URL: {{full_url}}

-

IP address: {{ip_address}}

-

Registrar: {{registrar}}

-

Creation date: {{creation_date}}

-

Expiration date: {{expiration_date}}

-

Organization name: {{org}}

-

Contact e-mails: {{mails}}

-
-
- -

SOCIAL MEDIAS SEARCH RESULTS

-
-

FACEBOOK:

-

TWITTER (+ X.com):

-

INSTAGRAM:

-

TELEGRAM:

-

TIKTOK:

-

LINKEDIN:

-

VKONTAKTE:

-

YOUTUBE:

-

ODNOKLASSNIKI:

-

WECHAT:

-
-
- -

SUBDOMAINS ANALYSIS RESULTS

-
-

Found subdomains:

-

Subdomains IP addresses:

-

Subdomains e-mails:

-
-
- -

DNS & SSL INFORMATION

-
-

(DNS) Name servers: {{name_servers}}

-

(DNS) MX addresses: {{mx_records}}

-

(SSL) Issuer: {{issuer}}

-

(SSL) Subject: {{subject}}

-

(SSL) Not before: {{notBefore}}

-

(SSL) Not after: {{notAfter}}

-

(SSL) Certificate name: {{commonName}}

-

(SSL) Certificate serial number: {{serialNumber}}

-
-
- -

SERVICES & FRAMEWORKS INFORMATION

-
-

Web servers:

-

CMS:

-

Used programming languages:

-

Used web frameworks:

-

Analytics service:

-

Used JavaScript frameworks:

-

Tags:

-

Common Platform Enumeration:

-
-
- -

BASIC PRE-PENTEST INFORMATION

-
-

Open ports:

-

Hostnames:

-

Potential vulnerabilities:

-
-
- -

DORKING SCAN INFO

-
{{ add_dsi | safe }}
-
- -

PAGESEARCH RESULTS

-
-

Amount of accessible subdomains: {{ps_s}}

-

Amount of email addresses: {{ps_e}}

-

Amount of found documents: {{ps_f}}

-

Amount of found cookies: {{ps_c}}

-

Amount of found API key: {{ps_a}}

-

Amount of WEB elements found: {{ps_w}}

-

Amount of exposed passwords found: {{ps_p}}

-
-
- -

VIRUSTOTAL API SCAN RESULTS

-
{{ virustotal_output }}
-
- -

SECURITYTRAILS API SCAN RESULTS

-
{{ securitytrails_output }}
-
- -

HUDSONROCK API SCAN RESULTS

-
{{ hudsonrock_output }}
-
- -

Created by DPULSE (OSINT-TECHNOLOGIES)

- - - - \ No newline at end of file