Skip to content

Commit fe2fcd0

Browse files
Added filler if domain doesn't have contact e-mails (#46)
1 parent 23b0c6e commit fe2fcd0

File tree

1 file changed

+19
-2
lines changed

1 file changed

+19
-2
lines changed

datagather_modules/crawl_processor.py

Lines changed: 19 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,24 @@ def whois_gather(short_domain):
2626
print(Fore.RED + "Error while gathering WHOIS information. Reason: {}".format(e))
2727
pass
2828

29-
def mail_gather(url):
29+
def contact_mail_gather(url):
30+
try:
31+
r = requests.get(url)
32+
data = r.text
33+
soup = BeautifulSoup(data, "html.parser")
34+
mails = []
35+
for i in soup.find_all(href=re.compile("mailto")):
36+
i.encode().decode()
37+
mails.append(i.string)
38+
if (not mails) or (mails is None):
39+
return 'No contact e-mails were found'
40+
else:
41+
return mails
42+
except requests.RequestException as e:
43+
print(Fore.RED + "Error while gathering e-mails. Reason: {}".format(e))
44+
pass
45+
46+
def subdomains_mail_gather(url):
3047
try:
3148
r = requests.get(url)
3249
data = r.text
@@ -131,7 +148,7 @@ def domains_reverse_research(subdomains, report_file_type):
131148

132149
try:
133150
for subdomain_url in subdomain_urls:
134-
subdomain_mail = mail_gather(subdomain_url)
151+
subdomain_mail = subdomains_mail_gather(subdomain_url)
135152
subdomain_mails.append(subdomain_mail)
136153
subdomain_social = sm_gather(subdomain_url)
137154
subdomain_socials.append(subdomain_social)

0 commit comments

Comments
 (0)