@@ -60,7 +60,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
60
60
subdomain_urls , subdomain_mails , subdomain_ip , sd_socials = cp .domains_reverse_research (subdomains , report_file_type )
61
61
print (Fore .GREEN + 'Processing SSL certificate gathering' + Style .RESET_ALL )
62
62
issuer , subject , notBefore , notAfter , commonName , serialNumber = np .get_ssl_certificate (short_domain )
63
- print (Fore .GREEN + 'Processing MX records gathering' + Style .RESET_ALL )
63
+ print (Fore .GREEN + 'Processing DNS records gathering' + Style .RESET_ALL )
64
64
mx_records = np .get_dns_info (short_domain , report_file_type )
65
65
print (Fore .GREEN + 'Extracting robots.txt and sitemap.xml' + Style .RESET_ALL )
66
66
robots_txt_result = np .get_robots_txt (short_domain , robots_filepath )
@@ -96,6 +96,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
96
96
else :
97
97
print (Fore .RED + "Cant start PageSearch because no subdomains were detected" )
98
98
elif pagesearch_flag .lower () == 'n' :
99
+ ps_emails_return = ""
99
100
pass
100
101
data_array = [ip , res , mails , subdomains , subdomains_amount , social_medias , subdomain_mails , sd_socials ,
101
102
subdomain_ip , issuer , subject , notBefore , notAfter , commonName , serialNumber , mx_records ,
@@ -112,6 +113,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
112
113
else :
113
114
print (Fore .RED + "Cant start PageSearch because no subdomains were detected" )
114
115
elif pagesearch_flag .lower () == 'n' :
116
+ ps_emails_return = ""
115
117
pass
116
118
data_array = [ip , res , mails , subdomains , subdomains_amount , social_medias , subdomain_mails , sd_socials ,
117
119
subdomain_ip , issuer , subject , notBefore , notAfter , commonName , serialNumber , mx_records ,
0 commit comments