From e1352cde80301921477fb330d3cf108e36144acd Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 12 Sep 2024 02:11:52 +0300
Subject: [PATCH 01/72] Bumped version to 1.1.1 (rolling)
---
service/cli_init.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/service/cli_init.py b/service/cli_init.py
index 6ca004b..ef141e2 100644
--- a/service/cli_init.py
+++ b/service/cli_init.py
@@ -16,7 +16,7 @@ def welcome_menu(self):
fig = Figlet(font='slant')
print('\n')
self.console.print(fig.renderText('DPULSE'), style="red")
- print(Fore.MAGENTA + Style.BRIGHT + 'DPULSE-CLI // 1.1 (stable) // OSINT-TECHNOLOGIES\n' + Style.RESET_ALL)
+ print(Fore.MAGENTA + Style.BRIGHT + 'DPULSE-CLI // 1.1.1 (rolling) // OSINT-TECHNOLOGIES\n' + Style.RESET_ALL)
print(Fore.MAGENTA + Style.BRIGHT + 'Visit our pages:\nhttps://github.com/OSINT-TECHNOLOGIES\nhttps://pypi.org/project/dpulse/' + Style.RESET_ALL + '\n')
def print_main_menu(self):
From 11d1696f7267e6693e0424d3a97bb5a5176a7ca2 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 12 Sep 2024 02:12:54 +0300
Subject: [PATCH 02/72] Bumped rolling version to 1.1.1
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index eb50b8c..7d7562f 100644
--- a/README.md
+++ b/README.md
@@ -9,7 +9,7 @@
-
+
From 2e9fec0047207e282b60e73397d03691b0ab2e15 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 12 Sep 2024 02:17:48 +0300
Subject: [PATCH 03/72] Update README.md with new animated examples
---
README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index 7d7562f..6b0ab59 100644
--- a/README.md
+++ b/README.md
@@ -122,11 +122,11 @@ If you have problems with starting installer.sh, you should try to use `dos2unix
### You can start DPULSE and see the main menu on the screen using one of the recommended commands in DPULSE root folder. Don't forget to install all requirements before starting DPULSE
-
+
### After choosing first menu point, you will be able to enter target's URL and case comment, and then you will see scanning progress
-
+
### Finally, DPULSE will create report folder which contains case name (basically URL of target), date and time of scan. All report folders are contained in DPULSE root folder
From 2967a8a7f44644df8dc1a0e4c9d34f07f36c2395 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 12 Sep 2024 02:22:21 +0300
Subject: [PATCH 04/72] Update README.md with fast-access links to download ZIP
archives
---
README.md | 3 +++
1 file changed, 3 insertions(+)
diff --git a/README.md b/README.md
index 6b0ab59..dbc0eb8 100644
--- a/README.md
+++ b/README.md
@@ -24,6 +24,9 @@
> You can also contact the developer via e-mail: osint.technologies@gmail.com
+***[Download DPULSE stable ZIP archive (with latest stable changes)](https://github.com/OSINT-TECHNOLOGIES/dpulse/archive/refs/heads/main.zip)***
+
+***[Download DPULSE rolling ZIP archive (with latest developer commit)](https://github.com/OSINT-TECHNOLOGIES/dpulse/archive/refs/heads/rolling.zip)***
# About DPULSE
From 1638acaaff084a9baf90aada2a8def7a6df08336 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 12 Sep 2024 02:36:02 +0300
Subject: [PATCH 05/72] Update README.md with installation guide corrections
---
README.md | 27 ++++++++++++++++++++-------
1 file changed, 20 insertions(+), 7 deletions(-)
diff --git a/README.md b/README.md
index dbc0eb8..7c42307 100644
--- a/README.md
+++ b/README.md
@@ -28,6 +28,7 @@
***[Download DPULSE rolling ZIP archive (with latest developer commit)](https://github.com/OSINT-TECHNOLOGIES/dpulse/archive/refs/heads/rolling.zip)***
+
# About DPULSE
DPULSE is a software solution for conducting OSINT research in relation to a certain domain. In general, it provides you with a certain set of functions, such as:
@@ -66,7 +67,17 @@ Since DPULSE repository is using Poetry* to manage dependencies, it is higly rec
_* Poetry is a tool for dependency management and packaging in Python. It can be simply installed everywhere using `pip install poetry` command, but more instructions you can find on [Poetry official documentation page](https://python-poetry.org/docs/#ci-recommendations)_
-### First way (recommended on every OS, using Poetry)
+### First way (the simplest way, recommended on every OS)
+
+Just download DPULSE using fast-access links at the top of the README:
+
+
+
+Then just unpack downloaded archive, open terminal in DPULSE root folder and use `pip install -r requirements.txt` command to install requirements. Then type `python dpulse.py` in terminal, and that's where program starts.
+
+If `pip install -r requirements.txt` doesn't work, then just use `poetry install` command. After that, start DPULSE with `poetry run python dpulse.py`
+
+### Second way (recommended on every OS, using Poetry)
Use this set of commands to use recommended way of DPULSE installation:
@@ -77,7 +88,7 @@ Use this set of commands to use recommended way of DPULSE installation:
```
Then you simply start DPULSE using `poetry run python dpulse.py`
-### Second way (recommended on Windows systems, without using Poetry)
+### Third way (recommended on Windows systems, without using Poetry)
Simply download zip archive from assets in releases bookmark, just right here:
@@ -93,9 +104,11 @@ You also can use this installation way with some different approach using this s
pip install -r requirements.txt
```
+If `pip install -r requirements.txt` doesn't work, then just use `poetry install` command. After that, start DPULSE with `poetry run python dpulse.py`
+
## _Other ways_
-### Third way (using pip)
+### Fourth way (using pip)
You also can install DPULSE using pip manager. It'll install DPULSE and necessery dependencies in one command: `pip install dpulse`. Then you just locate DPULSE root folder and type `python dpulse.py` to start program.
@@ -103,13 +116,13 @@ You also can install DPULSE using pip manager. It'll install DPULSE and necesser
DPULSE has two pre-written installation scripts, both for Windows (installer.bat) and for Linux (installer.sh). You can use them to clone repository and install dependencies or only for dependencies installation. Keep in mind that installer.bat (Windows installer) requires installed Git to clone repository.
-### Windows installer usage
+### Windows installer usage
You can start installer.bat from terminal by typing `./installer.bat` in terminal. Then you choose menu item which you want to start.
If you have problems with starting installer.bat, you should try to start it in admin terminal.
-### Linux installer usage
+### Linux installer usage
To start installer.sh in Linux you should follow these steps in your terminal:
@@ -125,11 +138,11 @@ If you have problems with starting installer.sh, you should try to use `dos2unix
### You can start DPULSE and see the main menu on the screen using one of the recommended commands in DPULSE root folder. Don't forget to install all requirements before starting DPULSE
-
+
### After choosing first menu point, you will be able to enter target's URL and case comment, and then you will see scanning progress
-
+
### Finally, DPULSE will create report folder which contains case name (basically URL of target), date and time of scan. All report folders are contained in DPULSE root folder
From 78985d512d165811476d7c123a8ce55a809b29e3 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sat, 14 Sep 2024 20:14:48 +0300
Subject: [PATCH 06/72] Update pyproject.toml with 1.1.1 version
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index af56925..49c530d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "dpulse"
-version = "1.1"
+version = "1.1.1"
description = "Convenient,fast and user-friendly collector of domain information from Open-Sources"
authors = ["OSINT-TECHNOLOGIES "]
readme = "README.md"
From 0341203219033ff5c000fde2afc36008451b0055 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sat, 14 Sep 2024 20:17:57 +0300
Subject: [PATCH 07/72] Extended README with new installation commands set for
rolling versions
---
README.md | 13 +++++++++++--
1 file changed, 11 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index 7c42307..7c2f15b 100644
--- a/README.md
+++ b/README.md
@@ -79,14 +79,23 @@ If `pip install -r requirements.txt` doesn't work, then just use `poetry install
### Second way (recommended on every OS, using Poetry)
-Use this set of commands to use recommended way of DPULSE installation:
+Use this set of commands to install DPULSE stable versions:
```
git clone https://github.com/OSINT-TECHNOLOGIES/dpulse
cd dpulse
poetry install
```
-Then you simply start DPULSE using `poetry run python dpulse.py`
+
+Use this set of commands to install DPULSE rolling versions:
+
+ ```
+ git clone --branch rolling --single-branch https://github.com/OSINT-TECHNOLOGIES/dpulse.git
+ cd dpulse
+ poetry install
+ ```
+
+After installation, you simply start DPULSE using `poetry run python dpulse.py`
### Third way (recommended on Windows systems, without using Poetry)
From 343136be1542cbbab51e45e8d8f071b93a20ad5f Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sat, 14 Sep 2024 20:21:57 +0300
Subject: [PATCH 08/72] Update README.md
---
README.md | 24 +++---------------------
1 file changed, 3 insertions(+), 21 deletions(-)
diff --git a/README.md b/README.md
index 7c2f15b..19a2c92 100644
--- a/README.md
+++ b/README.md
@@ -67,7 +67,7 @@ Since DPULSE repository is using Poetry* to manage dependencies, it is higly rec
_* Poetry is a tool for dependency management and packaging in Python. It can be simply installed everywhere using `pip install poetry` command, but more instructions you can find on [Poetry official documentation page](https://python-poetry.org/docs/#ci-recommendations)_
-### First way (the simplest way, recommended on every OS)
+### First way (the simplest way)
Just download DPULSE using fast-access links at the top of the README:
@@ -77,7 +77,7 @@ Then just unpack downloaded archive, open terminal in DPULSE root folder and use
If `pip install -r requirements.txt` doesn't work, then just use `poetry install` command. After that, start DPULSE with `poetry run python dpulse.py`
-### Second way (recommended on every OS, using Poetry)
+### Second way (the most correct way)
Use this set of commands to install DPULSE stable versions:
@@ -97,27 +97,9 @@ Use this set of commands to install DPULSE rolling versions:
After installation, you simply start DPULSE using `poetry run python dpulse.py`
-### Third way (recommended on Windows systems, without using Poetry)
-
-Simply download zip archive from assets in releases bookmark, just right here:
-
-
-
-Then you just unpack the archive, open terminal in DPULSE root folder and use `pip install -r requirements.txt` command to install requirements. Then type `python dpulse.py` in terminal, and that's where program starts.
-
-You also can use this installation way with some different approach using this set of commands:
-
- ```
- git clone https://github.com/OSINT-TECHNOLOGIES/dpulse
- cd dpulse
- pip install -r requirements.txt
- ```
-
-If `pip install -r requirements.txt` doesn't work, then just use `poetry install` command. After that, start DPULSE with `poetry run python dpulse.py`
-
## _Other ways_
-### Fourth way (using pip)
+### Third way (using pip manager)
You also can install DPULSE using pip manager. It'll install DPULSE and necessery dependencies in one command: `pip install dpulse`. Then you just locate DPULSE root folder and type `python dpulse.py` to start program.
From d6d8d3424e54f78844b2328a9dad5d5c58e55d9b Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sat, 14 Sep 2024 23:05:07 +0300
Subject: [PATCH 09/72] Create __init__.py
---
dorking/__init__.py | 1 +
1 file changed, 1 insertion(+)
create mode 100644 dorking/__init__.py
diff --git a/dorking/__init__.py b/dorking/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/dorking/__init__.py
@@ -0,0 +1 @@
+
From 821b9e50fe9d66238dd7427fe1573a0c478cd9e5 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sat, 14 Sep 2024 16:05:24 -0400
Subject: [PATCH 10/72] Add files via upload
---
dorking/basic_dorking.db | Bin 0 -> 20480 bytes
1 file changed, 0 insertions(+), 0 deletions(-)
create mode 100644 dorking/basic_dorking.db
diff --git a/dorking/basic_dorking.db b/dorking/basic_dorking.db
new file mode 100644
index 0000000000000000000000000000000000000000..0421fb0e05dd0a27ff09ba51ee40caa19a87d1cd
GIT binary patch
literal 20480
zcmeI)J#W)M7zglk&Uvv>`mlviTDi&6s1*{RfZC-5WK|Jq(uPE$Q{*OYWi_$WI8iI1
z4lqJL2Ye0~n3$LlNS%Ozg_i*_FmTs4iJO98L8{9C6kqQA_|BJKyx32ayDJ5!Ya8oc
zr)71GYwSEz6n4`v7-J!^g~iq{ifE|)j(C^-Ka~)>T-uG(cczZ*v9W!6Mvp`T1p*L&
z00bZa0SG_<0uX=z1paG*Wi^tRnNe1|)>_l{x0>R{wC3AeJ=?91cEh*JX0BoymE5g@
zX^b2@?OH8+R*cL%$SBc7I;}iR9?ZFB`A)so@H(4*pcy(gS1J%m8PmF!lE>GaMoP;U
zE9RnEHj1T+QLGjUMzxq9?rCfQ{xMQwP4`wrR)k-N}6z5(t
ziSXJ$6prd9(e>?0-U>Fw{T1;MSak1$%!RQOU
zr#IsM{0Tj#7M19_*oguG2tWV=5P$##AOHafKmY;|INJiLMaS)R5;|vD*=WwMS@q3c
z+wjFRiTV4x32hLld+xfk(GOgTakk*NU3;fHpK4pazwLDzsX;Uvi1vKDBOg=@qR9wn
zbBAZ^*c*=DwLATR$?zc5@V4EiXEpkvggRJCuN@3%oU>%hYK!ISA
Date: Sat, 14 Sep 2024 16:16:28 -0400
Subject: [PATCH 11/72] Add files via upload
---
dorking/files_dorking.db | Bin 0 -> 20480 bytes
1 file changed, 0 insertions(+), 0 deletions(-)
create mode 100644 dorking/files_dorking.db
diff --git a/dorking/files_dorking.db b/dorking/files_dorking.db
new file mode 100644
index 0000000000000000000000000000000000000000..335ad281e1e1a438efd91de5026858efc6ee3d74
GIT binary patch
literal 20480
zcmeI4J#1T56vyAo^LKpjalZVX6Wcr-ffBVKK`<3rXh0USNS)SoM8jfS+i9FS_KWRS
z22`Y$4y6+l17AZ~n3#Y{z|byCh#~|MVqii7D)BKP1_thp`mWCnLs?1BQ+(p*pU-)I
zKfesue=8?XthNSCceC5usSVsEBVm}PanyAU!!X4yC~p2{i8}#*NBoxjZkuU5ymo=X
zUq*mmG58h!J=}z=cWWX}2FL&zAOmE843GgbKnBPF86X1(*TC^W$Ubz)+!)kOx10Ul
zwpf%_`_0{R%}#y)Zt&@HVWm=VD=SZ}7TkT$UFg(ynukT_;>Cx8VS9ewymbHG%*~dV
zTWxfEXZz^xm^LeiNX}hgv1m>!sp}^}=EQNx5jH
za7v(m%-`X!
z^0U0mpWyfL5c`#V!#-u#*lWyV@=j1EQHWGC&5%02v?yWPl8i0Wv@a$iV;1fSt9B
zyj&y=Ui6yFtxn7Bi)HBY%NLxCWgJnU-tBDqB$>91B}E#i{m1P&JW8zQ6qv=JSL-Q|
z!r-Nrr+|Y&?Yw$bwvEA#_n7KP>R?HM84S*})M?n$7IKmThkY
zCNQY?`~IwC0=wE)k2`>mQR{m{zr*HulpqQ)47@D`oH(AfHP}(&opDUI``wNj7Q@4a
zqZT`gLA`%obwn^23@hq{
z5Oow21Fxap`Iz_{%|gk5zu`~#18&2Q@B@4YU&9yhIoyCx#AgB@!256w-i9~eb+`(b
z;T0G{58AK^FTnFq5zVxb0Wv@a$N(8217v^zUm7HfagOeSTWnUQsRTGpv4StloDotTg{k&qQYR?cN*OxAc@*70#!
zV=-ByQCTAqS;JvjLm^p%L0JO!;Zo&_=x?+
zzhaELe3`fTZT2nu41VUetRfbxGw133TR
z2UPx_)Bwu=XEcEF|7q>aasEH0wV?ceQUfUepU_;5^8bW(44nUiHVWncTmvZoXWA^B
z|Hn0g^Z#*e7|Q=+8bJAfR0Amgk7x(M`F~hzLHU1311SFwYO`?uAJ7QS|1E78%Kyh~
zD<4&tHp>6y_y0}!-4H*tkpVJ52FL&zAOmE843GgbKnBPF86X1(!vL7p{NCqPcm5SZ
F_8&S;xpx2n
literal 0
HcmV?d00001
From 7978b3f6d50089c59f0a2832a223d6193bb7d301 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sat, 14 Sep 2024 16:40:55 -0400
Subject: [PATCH 12/72] Add files via upload
---
dorking/iot_dorking.db | Bin 0 -> 20480 bytes
1 file changed, 0 insertions(+), 0 deletions(-)
create mode 100644 dorking/iot_dorking.db
diff --git a/dorking/iot_dorking.db b/dorking/iot_dorking.db
new file mode 100644
index 0000000000000000000000000000000000000000..3a1d4c954b9a5d6488714bb946218042a9e63344
GIT binary patch
literal 20480
zcmeI4J!~UI6vt;g{#qwH&q+3MauRsuhD3MA-JY?|Nku>_6fWoFf+La2+Sq%rBKC5-
z>*GsT+?7`W#YrG4BoGw^Pyq!ER7V5NRaA(t3K~R#1T(RBcI_*mN)f%0_j%|4=Di*N
z}AOR$R1dsp{Kmter2_OL^fCP{L5;!KXo6T3wo}~wob9m&2H;%+DYA1AW
z4BbI@dYXHB&)#g?W_$CgZQGn$yE<_C?m1DpcKb0}s8p-;&LhV?JwNL7{NSZfon$8Z
z)RbN`SM`!slZ!iEucmFa+V(T{p4r-Io2~urZF9f1b#3217cZ)5ZToqVdN{qiw{>-M
z@4&obAFQf==H`BTXR9SHe${TZ*Ywhw#qt%qN{KhFYlh-xeB+H<#sVW3Mk(iI
zFIFq_j5J1S=ng_ZP>DH}=!$#tz=;z}Wk%|wikn`~^<%vPsvwL4*XhTJWzNXuQ9<7y
zcu{hVWnE=LH@N9_)n+TRsb)?z3{|rkRnqkbQQ#lNnMz5O^xSl>B~5L(=N=AUhzpjA
zO1CbG5$lTuMy%0(*VA%d$&Hk}kYnWhXm!(yTNqizy3u$dRu`G-&}=sAO3orj85v6@88uDDVo}CILB@Pu##~OuY*t3bWXxn3)3oFJe_kbU1-^&Z;Y0BR;4SzP
zx_pCsK%fq1c$I(3clo#cXZ|j~bAmwdQb+&^AOR$R1dsp{Kmter2_OL^aB>9Buys0e
zqaX7B7ukhzD&_w#EVIT~o%H|blfVane_il$%ZHVe=X4`{Qu$-v&Q>P`~QVRPW%6PgPk9*PW=B|!aC*u
zXOj+T|DPpt+W*fiGD{o(ups6C%isS~_=AWS4kUmCkN^@u0!RP}AOR$R1dsp{KmthM
Ov=GoKt5Wqp0LH&TS=^%l
literal 0
HcmV?d00001
From a3db8cb12103db3bbfb3dfa698ce592b0219bc8a Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sun, 15 Sep 2024 00:10:36 +0300
Subject: [PATCH 13/72] Some minor cosmetical changes in DB CLI
---
service/db_processing.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/service/db_processing.py b/service/db_processing.py
index d25cdf6..ed7aaf0 100644
--- a/service/db_processing.py
+++ b/service/db_processing.py
@@ -32,7 +32,7 @@ def db_creation(db_path):
sqlite_connection.close()
print(Fore.GREEN + "Successfully created report storage database" + Style.RESET_ALL)
else:
- print(Fore.GREEN + "Report storage database exists" + Style.RESET_ALL)
+ print(Fore.GREEN + "Report storage database presence: OK" + Style.RESET_ALL)
def db_select():
db_creation('report_storage.db')
From 9a7a63c21f7ae208236ecdc0c0b42f47eeff96cd Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sun, 15 Sep 2024 00:11:19 +0300
Subject: [PATCH 14/72] Deprecated "Settings" menu, added check on dorking
folder integrity
---
dpulse.py | 43 ++++++++++++++++++++-----------------------
1 file changed, 20 insertions(+), 23 deletions(-)
diff --git a/dpulse.py b/dpulse.py
index dc22b29..338df8d 100644
--- a/dpulse.py
+++ b/dpulse.py
@@ -2,6 +2,7 @@
sys.path.append('datagather_modules')
sys.path.append('service')
sys.path.append('reporting_modules')
+sys.path.append('dorking')
import pdf_report_creation as pdf_rc
import cli_init
@@ -39,6 +40,23 @@ def time_processing(end):
endtime_string = f'approximately {time_minutes} minutes'
return endtime_string
+def dorks_files_check():
+ dorks_path = 'dorking//'
+ dorks_files = ['iot_dorking.db', 'files_dorking.db', 'basic_dorking.db']
+ dorks_files_counter = 0
+ for dork_files in dorks_files:
+ files_path = os.path.join(dorks_path, dork_files)
+ if os.path.isfile(files_path):
+ dorks_files_counter += 1
+ else:
+ pass
+
+ if dorks_files_counter == 3:
+ print(Fore.GREEN + "Dorks databases presence: OK" + Style.RESET_ALL)
+ else:
+ print(Fore.RED + "Dorks databases presence: NOT OK\nSome files may not be in folder. Please compare dorking folder with the same folder on the official repository\n" + Style.RESET_ALL)
+ sys.exit()
+
class ProgressBar(threading.Thread):
def __init__(self):
super(ProgressBar, self).__init__()
@@ -52,7 +70,7 @@ def run(self):
sleep(0.1)
db.db_creation('report_storage.db')
-
+dorks_files_check()
def run():
while True:
try:
@@ -174,28 +192,7 @@ def run():
print(Fore.RED + "\nUnsupported PageSearch mode. Please choose between Y, N or SI")
elif choice == "2":
- cli.print_settings_menu()
- choice_settings = input(Fore.YELLOW + "Enter your choice >> ")
- if choice_settings == '1':
- with open('dorkslist.txt', 'r') as cfg_file:
- print(Fore.LIGHTMAGENTA_EX + '\n[START OF CONFIG FILE]' + Style.RESET_ALL)
- print('\n' + Fore.LIGHTBLUE_EX + cfg_file.read() + Style.RESET_ALL)
- print(Fore.LIGHTMAGENTA_EX + '\n[END OF CONFIG FILE]\n' + Style.RESET_ALL)
- continue
- elif choice_settings == '2':
- with open('dorkslist.txt', 'a+') as cfg_file:
- print(Fore.LIGHTMAGENTA_EX + '\n[START OF CONFIG FILE]' + Style.RESET_ALL)
- cfg_file.seek(0)
- print('\n' + Fore.LIGHTBLUE_EX + cfg_file.read() + Style.RESET_ALL)
- print(Fore.LIGHTMAGENTA_EX + '\n[END OF CONFIG FILE]\n' + Style.RESET_ALL)
- new_line = str(input(Fore.YELLOW + "Input new dork >> ") + Style.RESET_ALL)
- print(Fore.GREEN + "New dork successfully added to dorks list" + Style.RESET_ALL)
- cfg_file.write(new_line + '\n')
- continue
- elif choice_settings == '3':
- continue
- else:
- print(Fore.RED + "\nInvalid menu item. Please select between existing menu items")
+ print(Fore.RED + "Sorry, but this menu is deprecated since v1.1.1. It will be back soon")
elif choice == "3":
cli.print_help_menu()
From fda95e4cdfae310fb3fd2f95eaea7305d8d6f1c9 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sun, 15 Sep 2024 00:11:57 +0300
Subject: [PATCH 15/72] Removed dorkslist.txt check in PDF reporting module
---
reporting_modules/pdf_report_creation.py | 8 --------
1 file changed, 8 deletions(-)
diff --git a/reporting_modules/pdf_report_creation.py b/reporting_modules/pdf_report_creation.py
index f5b4cd9..8710b12 100644
--- a/reporting_modules/pdf_report_creation.py
+++ b/reporting_modules/pdf_report_creation.py
@@ -18,14 +18,6 @@
print(Fore.RED + "Import error appeared. Reason: {}".format(e) + Style.RESET_ALL)
sys.exit()
-try:
- current_script = os.path.realpath(__file__)
- current_directory = os.path.dirname(current_script)
- cfg_file_path = os.path.join(current_directory, fp.find_files('dorkslist.txt'))
- print(Fore.GREEN + 'Dorks list was found at {}'.format(cfg_file_path))
-except TypeError as e:
- print(Fore.RED + 'Dorks list was not found in DPULSE root directory. Reason: {}'.format(e) + Style.RESET_ALL)
- sys.exit()
short_domain = ''
search_query = []
From 8973b4cef289537053d86596a34f8071ca1b477a Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sun, 15 Sep 2024 03:47:24 +0300
Subject: [PATCH 16/72] Added basic support for reworked Google Dorking
---
dpulse.py | 32 +++++++++++++++++++++++---------
1 file changed, 23 insertions(+), 9 deletions(-)
diff --git a/dpulse.py b/dpulse.py
index 338df8d..10d4769 100644
--- a/dpulse.py
+++ b/dpulse.py
@@ -96,6 +96,7 @@ def run():
else:
print(Fore.GREEN + "[!] SI mode suppose you to have sitemap_links.txt file in report folder [!]\n[!] It'll visit every link from this file [!]")
pagesearch_flag = input(Fore.YELLOW + "Would you like to use PageSearch function? [Y/N/SI] >> ")
+ dorking_flag = input(Fore.YELLOW + "Select Dorking mode [Basic/IoT/Files/None] >> ")
if pagesearch_flag.lower() == 'y':
keywords_input = input(Fore.YELLOW + "Enter keywords (separate by comma) to search in files during PageSearch process (or write None if you don't need it) >> ")
if keywords_input.lower() != "none":
@@ -123,10 +124,23 @@ def run():
pagesearch_ui_mark = 'Yes, in Sitemap Inspection mode'
else:
pagesearch_ui_mark = 'Yes, without keywords search'
+ if dorking_flag.lower() not in ['basic', 'iot', 'none', 'files']:
+ print(Fore.RED + "\nInvalid Dorking mode. Please select mode among Basic, IoT, Files or None")
+ break
+ else:
+ if dorking_flag.lower() == 'basic':
+ dorking_ui_mark = 'Yes, Basic dorking (N dorks)'
+ elif dorking_flag.lower() == 'iot':
+ dorking_ui_mark = 'Yes, IoT dorking (N dorks)'
+ elif dorking_flag.lower() == 'none':
+ dorking_ui_mark = 'No'
+ elif dorking_flag.lower() == 'files':
+ dorking_ui_mark = 'Yes, Files dorking (N dorks)'
print(Fore.LIGHTMAGENTA_EX + "\n[PRE-SCAN SUMMARY]\n" + Style.RESET_ALL)
print(Fore.GREEN + "Determined target: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + short_domain + Style.RESET_ALL)
print(Fore.GREEN + "Report type: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + report_filetype.lower() + Style.RESET_ALL)
print(Fore.GREEN + "PageSearch conduction: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + pagesearch_ui_mark + Style.RESET_ALL)
+ print(Fore.GREEN + "Dorking conduction: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + dorking_ui_mark + Style.RESET_ALL)
print(Fore.GREEN + "Case comment: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + case_comment + Style.RESET_ALL + "\n")
print(Fore.LIGHTMAGENTA_EX + "[BASIC SCAN START]\n" + Style.RESET_ALL)
spinner_thread = ProgressBar()
@@ -135,15 +149,15 @@ def run():
try:
if pagesearch_flag.lower() == 'y':
start = time()
- data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), keywords_list, keywords_flag)
+ data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), keywords_list, keywords_flag, dorking_flag.lower())
end = time() - start
elif pagesearch_flag.lower() == 'si':
start = time()
- data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), keywords_list, keywords_flag)
+ data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), keywords_list, keywords_flag, dorking_flag.lower())
end = time() - start
else:
start = time()
- data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), '', keywords_flag)
+ data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), '', keywords_flag, dorking_flag.lower())
end = time() - start
endtime_string = time_processing(end)
pdf_rc.report_assembling(short_domain, url, case_comment, data_array, report_info_array, pagesearch_ui_mark, pagesearch_flag.lower(), endtime_string)
@@ -154,15 +168,15 @@ def run():
try:
if pagesearch_flag.lower() == 'y':
start = time()
- data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), keywords_list, keywords_flag)
+ data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), keywords_list, keywords_flag, dorking_flag.lower())
end = time() - start
elif pagesearch_flag.lower() == 'si':
start = time()
- data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), keywords_list, keywords_flag)
+ data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), keywords_list, keywords_flag, dorking_flag.lower())
end = time() - start
else:
start = time()
- data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), '', keywords_flag)
+ data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), '', keywords_flag, dorking_flag.lower())
end = time() - start
endtime_string = time_processing(end)
xlsx_rc.create_report(short_domain, url, case_comment, data_array, report_info_array, pagesearch_ui_mark, pagesearch_flag.lower(), endtime_string)
@@ -173,15 +187,15 @@ def run():
try:
if pagesearch_flag.lower() == 'y':
start = time()
- data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), keywords_list, keywords_flag)
+ data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), keywords_list, keywords_flag, dorking_flag.lower())
end = time() - start
elif pagesearch_flag.lower() == 'si':
start = time()
- data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), keywords_list, keywords_flag)
+ data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), keywords_list, keywords_flag, dorking_flag.lower())
end = time() - start
else:
start = time()
- data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), '', keywords_flag)
+ data_array, report_info_array = data_processing.data_gathering(short_domain, url, report_filetype.lower(), pagesearch_flag.lower(), '', keywords_flag, dorking_flag.lower())
end = time() - start
endtime_string = time_processing(end)
html_rc.report_assembling(short_domain, url, case_comment, data_array, report_info_array, pagesearch_ui_mark, pagesearch_flag.lower(), endtime_string)
From c78a2fa283ab7b8b303dd6f043f83890f0336d2c Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sun, 15 Sep 2024 03:47:43 +0300
Subject: [PATCH 17/72] Added basic support for reworked Google Dorking
---
datagather_modules/data_assembler.py | 88 +++++++++++++++++++++++++---
1 file changed, 79 insertions(+), 9 deletions(-)
diff --git a/datagather_modules/data_assembler.py b/datagather_modules/data_assembler.py
index da05385..199dffc 100644
--- a/datagather_modules/data_assembler.py
+++ b/datagather_modules/data_assembler.py
@@ -1,9 +1,10 @@
import sys
sys.path.append('service')
sys.path.append('pagesearch')
+sys.path.append('dorking')
import crawl_processor as cp
-import dorking_processor as dp
+import dorking_handler as dp
import networking_processor as np
from pagesearch_main import normal_search, sitemap_inspection_search
from logs_processing import logging
@@ -20,6 +21,21 @@
print(Fore.RED + "Import error appeared. Reason: {}".format(e) + Style.RESET_ALL)
sys.exit()
+def establishing_dork_db_connection(dorking_flag):
+ if dorking_flag == 'basic':
+ conn = sqlite3.connect('dorking//basic_dorking.db')
+ table = 'basic_dorks'
+ elif dorking_flag == 'iot':
+ conn = sqlite3.connect('dorking//iot_dorking.db')
+ table = 'iot_dorks'
+ elif dorking_flag == 'files':
+ conn = sqlite3.connect('dorking//files_dorking.db')
+ table = 'files_dorks'
+ cursor = conn.cursor()
+ cursor.execute(f"SELECT dork_id, dork FROM {table}")
+ dorks = cursor.fetchall()
+ return conn, dorks
+
class DataProcessing():
def report_preprocessing(self, short_domain, report_file_type):
report_ctime = datetime.now().strftime('%d-%m-%Y, %H:%M:%S')
@@ -42,7 +58,7 @@ def report_preprocessing(self, short_domain, report_file_type):
os.makedirs(report_folder, exist_ok=True)
return casename, db_casename, db_creation_date, robots_filepath, sitemap_filepath, sitemap_links_filepath, report_file_type, report_folder, files_ctime, report_ctime
- def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, keywords, keywords_flag):
+ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, keywords, keywords_flag, dorking_flag):
casename, db_casename, db_creation_date, robots_filepath, sitemap_filepath, sitemap_links_filepath, report_file_type, report_folder, ctime, report_ctime = self.report_preprocessing(short_domain, report_file_type)
logging.info(f'### THIS LOG PART FOR {casename} CASE, TIME: {ctime} STARTS HERE')
print(Fore.GREEN + "Started scanning domain" + Style.RESET_ALL)
@@ -84,10 +100,10 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
print(Fore.GREEN + 'Processing Shodan InternetDB search' + Style.RESET_ALL)
ports, hostnames, cpes, tags, vulns = np.query_internetdb(ip, report_file_type)
print(Fore.GREEN + 'Processing Google Dorking' + Style.RESET_ALL)
- if report_file_type == 'pdf' or report_file_type == 'html':
- dorking_status = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain))
- elif report_file_type == 'xlsx':
- dorking_status, dorking_results = dp.transfer_results_to_xlsx(dp.get_dorking_query(short_domain))
+ #if report_file_type == 'pdf' or report_file_type == 'html':
+ #dorking_status = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain))
+ #elif report_file_type == 'xlsx':
+ #dorking_status, dorking_results = dp.transfer_results_to_xlsx(dp.get_dorking_query(short_domain))
common_socials = {key: social_medias.get(key, []) + sd_socials.get(key, []) for key in set(social_medias) | set(sd_socials)}
for key in common_socials:
common_socials[key] = list(set(common_socials[key]))
@@ -117,11 +133,29 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
ps_emails_return = ""
pass
+ if dorking_flag == 'none':
+ pass
+ elif dorking_flag == 'basic':
+ conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ elif dorking_flag == 'iot':
+ conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ elif dorking_flag == 'files':
+ conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
subdomain_ip, issuer, subject, notBefore, notAfter, commonName, serialNumber, mx_records,
robots_txt_result, sitemap_xml_result, sitemap_links_status,
web_servers, cms, programming_languages, web_frameworks, analytics, javascript_frameworks, ports,
- hostnames, cpes, tags, vulns, dorking_status, common_socials, total_socials, ps_emails_return,
+ hostnames, cpes, tags, vulns, common_socials, total_socials, ps_emails_return,
accessible_subdomains, emails_amount, files_counter, cookies_counter, api_keys_counter,
website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter, keywords_messages_list]
@@ -148,11 +182,29 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = 0
pass
+ if dorking_flag == 'none':
+ pass
+ elif dorking_flag == 'basic':
+ conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ elif dorking_flag == 'iot':
+ conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ elif dorking_flag == 'files':
+ conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
subdomain_ip, issuer, subject, notBefore, notAfter, commonName, serialNumber, mx_records,
robots_txt_result, sitemap_xml_result, sitemap_links_status,
web_servers, cms, programming_languages, web_frameworks, analytics, javascript_frameworks, ports,
- hostnames, cpes, tags, vulns, dorking_status, common_socials, total_socials, ps_emails_return,
+ hostnames, cpes, tags, vulns, common_socials, total_socials, ps_emails_return,
accessible_subdomains, emails_amount, files_counter, cookies_counter, api_keys_counter,
website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter, dorking_results]
@@ -179,11 +231,29 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
ps_emails_return = ""
pass
+ if dorking_flag == 'none':
+ pass
+ elif dorking_flag == 'basic':
+ conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ elif dorking_flag == 'iot':
+ conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ elif dorking_flag == 'files':
+ conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+ dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
+
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
subdomain_ip, issuer, subject, notBefore, notAfter, commonName, serialNumber, mx_records,
robots_txt_result, sitemap_xml_result, sitemap_links_status,
web_servers, cms, programming_languages, web_frameworks, analytics, javascript_frameworks, ports,
- hostnames, cpes, tags, vulns, dorking_status, common_socials, total_socials, ps_emails_return,
+ hostnames, cpes, tags, vulns, common_socials, total_socials, ps_emails_return,
accessible_subdomains, emails_amount, files_counter, cookies_counter, api_keys_counter,
website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter, keywords_messages_list]
From afe3ea9d2c419cee34327794f4910655a1cf2133 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sat, 14 Sep 2024 20:48:09 -0400
Subject: [PATCH 18/72] Added basic support for reworked Google Dorking
---
dorking/dorking_handler.py | 38 ++++++++++++++++++++++++++++++++++++++
1 file changed, 38 insertions(+)
create mode 100644 dorking/dorking_handler.py
diff --git a/dorking/dorking_handler.py b/dorking/dorking_handler.py
new file mode 100644
index 0000000..acd015a
--- /dev/null
+++ b/dorking/dorking_handler.py
@@ -0,0 +1,38 @@
+import mechanicalsoup
+from bs4 import BeautifulSoup
+import requests
+from colorama import Fore, Style
+
+def scrape_dork(dork, short_domain):
+ browser = mechanicalsoup.Browser(soup_config={'features': 'html.parser'})
+ modified_dork = dork.replace("{}", f"{short_domain}")
+ print(Fore.GREEN + f"Dorking with " + Style.RESET_ALL + Fore.LIGHTCYAN_EX + f"{modified_dork}" + Style.RESET_ALL + Fore.GREEN + " dork" + Style.RESET_ALL)
+ search_result = browser.get(f"https://www.google.com/search?q={modified_dork}")
+ soup = BeautifulSoup(search_result.content, "html.parser")
+ results = soup.find_all("a", href=True)
+ full_urls = []
+ for result in results:
+ relative_link = result['href']
+ if relative_link.startswith("/"):
+ pass
+ else: # Sometimes Google might give an already full URL
+ full_urls.append(relative_link)
+
+ return full_urls
+
+def composing_dorking(dorks, conn, short_domain, report_path):
+ results_file = open(f"{report_path}//dork_results.txt", "w")
+ for dork_id, dork in dorks:
+ results = scrape_dork(dork, short_domain)
+ #print(Fore.GREEN + f"\nDORK #{dork_id}: {dork}")
+ results_file.write(f"\nDORK #{dork_id}: {dork}\n")
+ if results:
+ for result in results:
+ #print(f"=> {result_link}")
+ results_file.write(f"=> {result}\n")
+ print(Fore.LIGHTGREEN_EX + "-------------------------------------------------" + Style.RESET_ALL)
+ else:
+ #print("No results found for this dork.")
+ results_file.write("No results found for this dork.\n")
+ conn.close()
+ results_file.close()
\ No newline at end of file
From 7a2e99b28098aebc6f47c320eb7417330fc223a1 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sun, 15 Sep 2024 03:49:13 +0300
Subject: [PATCH 19/72] Added basic support for reworked Google Dorking
---
reporting_modules/html_report_creation.py | 38 +++++++++++------------
1 file changed, 19 insertions(+), 19 deletions(-)
diff --git a/reporting_modules/html_report_creation.py b/reporting_modules/html_report_creation.py
index 4cfd760..532bd37 100644
--- a/reporting_modules/html_report_creation.py
+++ b/reporting_modules/html_report_creation.py
@@ -57,20 +57,20 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
cpes = data_array[27]
tags = data_array[28]
vulns = data_array[29]
- dorking_status = data_array[30]
- common_socials = data_array[31]
- total_socials = data_array[32]
- ps_emails_return = data_array[33]
- accessible_subdomains = data_array[34]
- emails_amount = data_array[35]
- files_counter = data_array[36]
- cookies_counter = data_array[37]
- api_keys_counter = data_array[38]
- website_elements_counter = data_array[39]
- exposed_passwords_counter = data_array[40]
- total_links_counter = data_array[41]
- accessed_links_counter = data_array[42]
- keywords_messages_list = data_array[43]
+ #dorking_status = data_array[30]
+ common_socials = data_array[30]
+ total_socials = data_array[31]
+ ps_emails_return = data_array[32]
+ accessible_subdomains = data_array[33]
+ emails_amount = data_array[34]
+ files_counter = data_array[35]
+ cookies_counter = data_array[36]
+ api_keys_counter = data_array[37]
+ website_elements_counter = data_array[38]
+ exposed_passwords_counter = data_array[39]
+ total_links_counter = data_array[40]
+ accessed_links_counter = data_array[41]
+ keywords_messages_list = data_array[42]
casename = report_info_array[0]
db_casename = report_info_array[1]
db_creation_date = report_info_array[2]
@@ -113,7 +113,7 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
'tg_links': common_socials['Telegram'], 'tt_links': common_socials['TikTok'],
'li_links': common_socials['LinkedIn'], 'vk_links': common_socials['VKontakte'],
'yt_links': common_socials['YouTube'], 'wc_links': common_socials['WeChat'],
- 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result, 'dorking_status': dorking_status,
+ 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result,
'sitemap_links': sitemap_links_status, 'web_servers': web_servers, 'cms': cms, 'programming_languages': programming_languages, 'web_frameworks': web_frameworks, 'analytics': analytics,
'javascript_frameworks': javascript_frameworks,
'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
@@ -132,7 +132,7 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
'tg_links': common_socials['Telegram'], 'tt_links': common_socials['TikTok'],
'li_links': common_socials['LinkedIn'], 'vk_links': common_socials['VKontakte'],
'yt_links': common_socials['YouTube'], 'wc_links': common_socials['WeChat'],
- 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result, 'dorking_status': dorking_status,
+ 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result,
'sitemap_links': sitemap_links_status, 'web_servers': web_servers, 'cms': cms, 'programming_languages': programming_languages, 'web_frameworks': web_frameworks, 'analytics': analytics,
'javascript_frameworks': javascript_frameworks,
'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
@@ -153,7 +153,7 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
'tg_links': common_socials['Telegram'], 'tt_links': common_socials['TikTok'],
'li_links': common_socials['LinkedIn'], 'vk_links': common_socials['VKontakte'],
'yt_links': common_socials['YouTube'], 'wc_links': common_socials['WeChat'],
- 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result, 'dorking_status': dorking_status,
+ 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result,
'sitemap_links': sitemap_links_status, 'web_servers': web_servers, 'cms': cms, 'programming_languages': programming_languages, 'web_frameworks': web_frameworks, 'analytics': analytics,
'javascript_frameworks': javascript_frameworks,
'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
@@ -165,9 +165,9 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
if generate_report(context, html_report_name, template_path):
print(Fore.GREEN + "HTML report for {} case was created at {}".format(''.join(short_domain), report_ctime) + Style.RESET_ALL)
print(Fore.GREEN + f"Scan elapsed time: {end}" + Style.RESET_ALL)
- robots_content, sitemap_content, sitemap_links_content, dorking_content = fp.get_db_columns(report_folder)
+ robots_content, sitemap_content, sitemap_links_content = fp.get_db_columns(report_folder) #, dorking_content was removed here
pdf_blob = fp.get_blob(html_report_name)
- db.insert_blob('HTML', pdf_blob, db_casename, db_creation_date, case_comment, robots_content, sitemap_content, sitemap_links_content, dorking_content)
+ db.insert_blob('HTML', pdf_blob, db_casename, db_creation_date, case_comment, robots_content, sitemap_content, sitemap_links_content) #, dorking_content was removed here
except Exception as e:
print(Fore.RED + 'Unable to create HTML report. See journal for details')
logging.error(f'HTML REPORT CREATION: ERROR. REASON: {e}')
From 660f4bef262180e453fc06bfaadcbd8f2ded1d65 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sun, 15 Sep 2024 03:49:33 +0300
Subject: [PATCH 20/72] Added basic support for reworked Google Dorking
---
service/files_processing.py | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/service/files_processing.py b/service/files_processing.py
index df9dffe..aaa27df 100644
--- a/service/files_processing.py
+++ b/service/files_processing.py
@@ -31,10 +31,10 @@ def get_db_columns(report_folder):
except:
sitemap_links_content = 0
pass
- try:
- with open(report_folder + "//" + '04-dorking_results.txt', 'r') as dorking_file:
- dorking_content = dorking_file.read()
- except:
- dorking_content = 0
- pass
- return robots_content, sitemap_content, sitemap_links_content, dorking_content
+ #try:
+ #with open(report_folder + "//" + '04-dorking_results.txt', 'r') as dorking_file:
+ #dorking_content = dorking_file.read()
+ #except:
+ #dorking_content = 0
+ #pass
+ return robots_content, sitemap_content, sitemap_links_content
From 939f9c61124b26bc48a78af581e51e63d986b9a3 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sun, 15 Sep 2024 03:49:50 +0300
Subject: [PATCH 21/72] Added basic support for reworked Google Dorking
---
service/db_processing.py | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/service/db_processing.py b/service/db_processing.py
index ed7aaf0..c82c5a6 100644
--- a/service/db_processing.py
+++ b/service/db_processing.py
@@ -98,15 +98,16 @@ def db_report_recreate(extracted_folder_name, id_to_extract):
except Exception as e:
print(Fore.RED + "Error appeared when recreating report from database. Reason: {}".format(e))
-def insert_blob(report_file_type, pdf_blob, db_casename, creation_date, case_comment, robots, sitemap_xml, sitemap_links, dorking_results):
+def insert_blob(report_file_type, pdf_blob, db_casename, creation_date, case_comment, robots, sitemap_xml, sitemap_links): #, dorking_results was removed here
try:
sqlite_connection = sqlite3.connect('report_storage.db')
cursor = sqlite_connection.cursor()
print(Fore.GREEN + "Connected to report storage database")
sqlite_insert_blob_query = """INSERT INTO report_storage
- (report_file_extension, report_content, creation_date, target, comment, dorks_results, robots_text, sitemap_text, sitemap_file) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"""
+ (report_file_extension, report_content, creation_date, target, comment, robots_text, sitemap_text, sitemap_file) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"""
+ #dorks_results was removed between comment and robots_text
- data_tuple = (report_file_type, pdf_blob, creation_date, db_casename, case_comment, dorking_results, robots, sitemap_links, sitemap_xml)
+ data_tuple = (report_file_type, pdf_blob, creation_date, db_casename, case_comment, robots, sitemap_links, sitemap_xml) #dorking_results was removed between case_comments and robots
cursor.execute(sqlite_insert_blob_query, data_tuple)
sqlite_connection.commit()
print(Fore.GREEN + "Scanning results are successfully saved in report storage database")
From da40526a9c49f6fa5e644f88c736375ca2699c93 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sun, 15 Sep 2024 03:50:08 +0300
Subject: [PATCH 22/72] Added basic support for reworked Google Dorking
---
reporting_modules/pdf_report_creation.py | 38 ++++++++++++------------
1 file changed, 19 insertions(+), 19 deletions(-)
diff --git a/reporting_modules/pdf_report_creation.py b/reporting_modules/pdf_report_creation.py
index 8710b12..bba1933 100644
--- a/reporting_modules/pdf_report_creation.py
+++ b/reporting_modules/pdf_report_creation.py
@@ -63,20 +63,20 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
cpes = data_array[27]
tags = data_array[28]
vulns = data_array[29]
- dorking_status = data_array[30]
- common_socials = data_array[31]
- total_socials = data_array[32]
- ps_emails_return = data_array[33]
- accessible_subdomains = data_array[34]
- emails_amount = data_array[35]
- files_counter = data_array[36]
- cookies_counter = data_array[37]
- api_keys_counter = data_array[38]
- website_elements_counter = data_array[39]
- exposed_passwords_counter = data_array[40]
- total_links_counter = data_array[41]
- accessed_links_counter = data_array[42]
- keywords_messages_list = data_array[43]
+ #dorking_status = data_array[30]
+ common_socials = data_array[30]
+ total_socials = data_array[31]
+ ps_emails_return = data_array[32]
+ accessible_subdomains = data_array[33]
+ emails_amount = data_array[34]
+ files_counter = data_array[35]
+ cookies_counter = data_array[36]
+ api_keys_counter = data_array[37]
+ website_elements_counter = data_array[38]
+ exposed_passwords_counter = data_array[39]
+ total_links_counter = data_array[40]
+ accessed_links_counter = data_array[41]
+ keywords_messages_list = data_array[42]
casename = report_info_array[0]
db_casename = report_info_array[1]
db_creation_date = report_info_array[2]
@@ -119,7 +119,7 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
'tg_links': common_socials['Telegram'], 'tt_links': common_socials['TikTok'],
'li_links': common_socials['LinkedIn'], 'vk_links': common_socials['VKontakte'],
'yt_links': common_socials['YouTube'], 'wc_links': common_socials['WeChat'],
- 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result, 'dorking_status': dorking_status,
+ 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result,
'sitemap_links': sitemap_links_status, 'web_servers': web_servers, 'cms': cms, 'programming_languages': programming_languages, 'web_frameworks': web_frameworks, 'analytics': analytics,
'javascript_frameworks': javascript_frameworks,
'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
@@ -138,7 +138,7 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
'tg_links': common_socials['Telegram'], 'tt_links': common_socials['TikTok'],
'li_links': common_socials['LinkedIn'], 'vk_links': common_socials['VKontakte'],
'yt_links': common_socials['YouTube'], 'wc_links': common_socials['WeChat'],
- 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result, 'dorking_status': dorking_status,
+ 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result,
'sitemap_links': sitemap_links_status, 'web_servers': web_servers, 'cms': cms, 'programming_languages': programming_languages, 'web_frameworks': web_frameworks, 'analytics': analytics,
'javascript_frameworks': javascript_frameworks,
'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
@@ -159,7 +159,7 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
'tg_links': common_socials['Telegram'], 'tt_links': common_socials['TikTok'],
'li_links': common_socials['LinkedIn'], 'vk_links': common_socials['VKontakte'],
'yt_links': common_socials['YouTube'], 'wc_links': common_socials['WeChat'],
- 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result, 'dorking_status': dorking_status,
+ 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result,
'sitemap_links': sitemap_links_status, 'web_servers': web_servers, 'cms': cms, 'programming_languages': programming_languages, 'web_frameworks': web_frameworks, 'analytics': analytics,
'javascript_frameworks': javascript_frameworks,
'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
@@ -171,9 +171,9 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
if create_pdf(template_path, pdf_report_name, context):
print(Fore.GREEN + "PDF report for {} case was created at {}".format(''.join(short_domain), report_ctime) + Style.RESET_ALL)
print(Fore.GREEN + f"Scan elapsed time: {end}" + Style.RESET_ALL)
- robots_content, sitemap_content, sitemap_links_content, dorking_content = fp.get_db_columns(report_folder)
+ robots_content, sitemap_content, sitemap_links_content = fp.get_db_columns(report_folder) #, dorking_content was removed here
pdf_blob = fp.get_blob(pdf_report_name)
- db.insert_blob('PDF', pdf_blob, db_casename, db_creation_date, case_comment, robots_content, sitemap_content, sitemap_links_content, dorking_content)
+ db.insert_blob('HTML', pdf_blob, db_casename, db_creation_date, case_comment, robots_content, sitemap_content, sitemap_links_content) #, dorking_content was removed here
except Exception as e:
print(Fore.RED + 'Unable to create PDF report. See journal for details')
logging.error(f'XLSX REPORT CREATION: ERROR. REASON: {e}')
From b2905018e1e4fb48cfe69eaa8aa625da0b6c8780 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Sun, 15 Sep 2024 03:50:22 +0300
Subject: [PATCH 23/72] Added basic support for reworked Google Dorking
---
reporting_modules/xlsx_report_creation.py | 35 ++++++++++++-----------
1 file changed, 18 insertions(+), 17 deletions(-)
diff --git a/reporting_modules/xlsx_report_creation.py b/reporting_modules/xlsx_report_creation.py
index 2c31ee3..e1b9551 100644
--- a/reporting_modules/xlsx_report_creation.py
+++ b/reporting_modules/xlsx_report_creation.py
@@ -46,20 +46,21 @@ def create_report(short_domain, url, case_comment, data_array, report_info_array
cpes = data_array[27]
tags = data_array[28]
vulns = data_array[29]
- dorking_status = data_array[30]
- common_socials = data_array[31]
- total_socials = data_array[32]
- ps_emails_return = data_array[33]
- accessible_subdomains = data_array[34]
- emails_amount = data_array[35]
- files_counter = data_array[36]
- cookies_counter = data_array[37]
- api_keys_counter = data_array[38]
- website_elements_counter = data_array[39]
- exposed_passwords_counter = data_array[40]
- total_links_counter = data_array[41]
- accessed_links_counter = data_array[42]
- dorking_results = data_array[43]
+ #dorking_status = data_array[30]
+ common_socials = data_array[30]
+ total_socials = data_array[31]
+ ps_emails_return = data_array[32]
+ accessible_subdomains = data_array[33]
+ emails_amount = data_array[34]
+ files_counter = data_array[35]
+ cookies_counter = data_array[36]
+ api_keys_counter = data_array[37]
+ website_elements_counter = data_array[38]
+ exposed_passwords_counter = data_array[39]
+ total_links_counter = data_array[40]
+ accessed_links_counter = data_array[41]
+ #keywords_messages_list = data_array[42]
+ #dorking_results = data_array[43]
casename = report_info_array[0]
db_casename = report_info_array[1]
db_creation_date = report_info_array[2]
@@ -149,7 +150,7 @@ def create_report(short_domain, url, case_comment, data_array, report_info_array
ws['B3'] = robots_txt_result
ws['B4'] = sitemap_xml_result
ws['B5'] = sitemap_links_status
- ws['B6'] = dorking_status
+ #ws['B6'] = dorking_status
ws['B7'] = pagesearch_ui_mark
ws['B8'] = report_ctime
@@ -313,8 +314,8 @@ def create_report(short_domain, url, case_comment, data_array, report_info_array
ws = wb['DORKING RESULTS']
ws.column_dimensions['A'].width = 80
- for i in range(len(dorking_results)):
- ws[f"A{i + 1}"] = str(dorking_results[i])
+ #for i in range(len(dorking_results)):
+ #ws[f"A{i + 1}"] = str(dorking_results[i])
report_file = report_folder + "//" + casename
wb.save(report_file)
From 1badbcf095e5449d7f5be831419ce84a8d84f910 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 00:37:55 +0300
Subject: [PATCH 24/72] Stabilized Google Dorking module, optimized outputs,
returns and file writing
---
dorking/dorking_handler.py | 107 +++++++++++++++++++++++++------------
1 file changed, 73 insertions(+), 34 deletions(-)
diff --git a/dorking/dorking_handler.py b/dorking/dorking_handler.py
index acd015a..dd61b35 100644
--- a/dorking/dorking_handler.py
+++ b/dorking/dorking_handler.py
@@ -1,38 +1,77 @@
-import mechanicalsoup
-from bs4 import BeautifulSoup
-import requests
-from colorama import Fore, Style
+import sys
-def scrape_dork(dork, short_domain):
- browser = mechanicalsoup.Browser(soup_config={'features': 'html.parser'})
- modified_dork = dork.replace("{}", f"{short_domain}")
- print(Fore.GREEN + f"Dorking with " + Style.RESET_ALL + Fore.LIGHTCYAN_EX + f"{modified_dork}" + Style.RESET_ALL + Fore.GREEN + " dork" + Style.RESET_ALL)
- search_result = browser.get(f"https://www.google.com/search?q={modified_dork}")
- soup = BeautifulSoup(search_result.content, "html.parser")
- results = soup.find_all("a", href=True)
- full_urls = []
- for result in results:
- relative_link = result['href']
- if relative_link.startswith("/"):
- pass
- else: # Sometimes Google might give an already full URL
- full_urls.append(relative_link)
+try:
+ import requests.exceptions
+ from colorama import Fore, Style
+ import mechanicalsoup
+ import re
+ import requests
+ import sqlite3
+except ImportError as e:
+ print(Fore.RED + "Import error appeared. Reason: {}".format(e) + Style.RESET_ALL)
+ sys.exit()
- return full_urls
+def get_dorking_query(short_domain, dorking_db_path, table):
+ print(Fore.GREEN + "Getting dorking query from database")
+ conn = sqlite3.connect(dorking_db_path)
+ cursor = conn.cursor()
+ cursor.execute(f"SELECT dork FROM {table}")
+ rows = cursor.fetchall()
+ search_query = [row[0].format(short_domain) for row in rows]
+ conn.close()
+ return search_query
-def composing_dorking(dorks, conn, short_domain, report_path):
- results_file = open(f"{report_path}//dork_results.txt", "w")
- for dork_id, dork in dorks:
- results = scrape_dork(dork, short_domain)
- #print(Fore.GREEN + f"\nDORK #{dork_id}: {dork}")
- results_file.write(f"\nDORK #{dork_id}: {dork}\n")
- if results:
- for result in results:
- #print(f"=> {result_link}")
- results_file.write(f"=> {result}\n")
- print(Fore.LIGHTGREEN_EX + "-------------------------------------------------" + Style.RESET_ALL)
+def solid_google_dorking(query, pages=100):
+ try:
+ browser = mechanicalsoup.StatefulBrowser()
+ browser.open("https://www.google.com/")
+ browser.select_form('form[action="/search"]')
+ browser["q"] = str(query)
+ browser.submit_selected(btnName="btnG")
+ result_query = []
+ for page in range(pages):
+ for link in browser.links():
+ target = link.attrs['href']
+ if (target.startswith('/url?') and not
+ target.startswith("/url?q=http://webcache.googleusercontent.com")):
+ target = re.sub(r"^/url\?q=([^&]*)&.*", r"\1", target)
+ result_query.append(target)
+ try:
+ browser.follow_link(nr=page + 1)
+ except mechanicalsoup.LinkNotFoundError:
+ break
+ del result_query[-2:]
+ return result_query
+ except requests.exceptions.ConnectionError as e:
+ print(Fore.RED + "Error while establishing connection with domain. No results will appear. Reason: {}".format(e) + Style.RESET_ALL)
+ #return "Google Dorking results file was not created"
+
+def save_results_to_txt(folderpath, queries, pages=10):
+ txt_writepath = folderpath + '//04-dorking_results.txt'
+ with open(txt_writepath, 'w') as f:
+ for i, query in enumerate(queries, start=1):
+ f.write(f"QUERY #{i}: {query}\n")
+ results = solid_google_dorking(query, pages)
+ print(Fore.GREEN + f"Dorking with {query} dork")
+ if not results:
+ f.write("=> NO RESULT FOUND\n")
+ print(Fore.RED + f"No results were found for {query} dork")
+ else:
+ for result in results:
+ f.write(f"=> {result}\n")
+ f.write("\n")
+ print(Fore.GREEN + "Google Dorking results successfully saved in TXT file" + Style.RESET_ALL)
+ #return "File with gathered links was successfully created"
+
+def transfer_results_to_xlsx(queries, pages=10):
+ dorking_return_list = []
+ for i, query in enumerate(queries, start=1):
+ dorking_return_list.append(f"QUERY #{i}: {query}\n")
+ results = solid_google_dorking(query, pages)
+ if not results:
+ dorking_return_list.append("NO RESULT FOUND\n")
else:
- #print("No results found for this dork.")
- results_file.write("No results found for this dork.\n")
- conn.close()
- results_file.close()
\ No newline at end of file
+ for result in results:
+ dorking_return_list.append(f"{result}\n")
+ dorking_return_list.append("\n")
+ #return "File with gathered links was successfully created", dorking_return_list
From 9911e0bccae132673dd8d418f023168cf874a111 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 00:38:37 +0300
Subject: [PATCH 25/72] Finished Google Dorking & DB integration
---
datagather_modules/data_assembler.py | 49 +++++++++++++---------------
1 file changed, 23 insertions(+), 26 deletions(-)
diff --git a/datagather_modules/data_assembler.py b/datagather_modules/data_assembler.py
index 199dffc..d0fd42d 100644
--- a/datagather_modules/data_assembler.py
+++ b/datagather_modules/data_assembler.py
@@ -23,18 +23,15 @@
def establishing_dork_db_connection(dorking_flag):
if dorking_flag == 'basic':
- conn = sqlite3.connect('dorking//basic_dorking.db')
+ dorking_db_path = 'dorking//basic_dorking.db'
table = 'basic_dorks'
elif dorking_flag == 'iot':
- conn = sqlite3.connect('dorking//iot_dorking.db')
+ dorking_db_path = 'dorking//iot_dorking.db'
table = 'iot_dorks'
elif dorking_flag == 'files':
- conn = sqlite3.connect('dorking//files_dorking.db')
+ dorking_db_path = 'dorking//files_dorking.db'
table = 'files_dorks'
- cursor = conn.cursor()
- cursor.execute(f"SELECT dork_id, dork FROM {table}")
- dorks = cursor.fetchall()
- return conn, dorks
+ return dorking_db_path, table
class DataProcessing():
def report_preprocessing(self, short_domain, report_file_type):
@@ -136,19 +133,19 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
if dorking_flag == 'none':
pass
elif dorking_flag == 'basic':
- conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'iot':
- conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'files':
- conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
@@ -185,19 +182,19 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
if dorking_flag == 'none':
pass
elif dorking_flag == 'basic':
- conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'iot':
- conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'files':
- conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
@@ -206,7 +203,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
web_servers, cms, programming_languages, web_frameworks, analytics, javascript_frameworks, ports,
hostnames, cpes, tags, vulns, common_socials, total_socials, ps_emails_return,
accessible_subdomains, emails_amount, files_counter, cookies_counter, api_keys_counter,
- website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter, dorking_results]
+ website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter]
elif report_file_type == 'html':
if pagesearch_flag.lower() == 'y':
@@ -234,19 +231,19 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
if dorking_flag == 'none':
pass
elif dorking_flag == 'basic':
- conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'iot':
- conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'files':
- conn, dorks = establishing_dork_db_connection(dorking_flag.lower())
+ dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.composing_dorking(dorks, conn, short_domain, report_folder)
+ dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
From efcd8641bba3aeff39d798b79a2ec14fa2c02def Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 10:21:47 +0300
Subject: [PATCH 26/72] CLI minor changes
---
service/cli_init.py | 11 ++++++-----
1 file changed, 6 insertions(+), 5 deletions(-)
diff --git a/service/cli_init.py b/service/cli_init.py
index ef141e2..d1a77e6 100644
--- a/service/cli_init.py
+++ b/service/cli_init.py
@@ -24,16 +24,17 @@ def print_main_menu(self):
print(Fore.MAGENTA + Back.WHITE + '[MAIN MENU]' + Style.RESET_ALL)
print(Fore.CYAN + "1. Determine target and start scan")
print(Fore.CYAN + "2. Settings")
- print(Fore.CYAN + "3. Help")
- print(Fore.CYAN + "4. Manage/create report storage database")
+ print(Fore.CYAN + "3. Report storage DB management")
+ print(Fore.CYAN + "4. Help")
print(Fore.LIGHTRED_EX + "5. Exit DPULSE" + Style.RESET_ALL + '\n')
def print_settings_menu(self):
print('\n')
print(Fore.MAGENTA + Back.WHITE + '[SETTINGS MENU]' + Style.RESET_ALL)
- print(Fore.CYAN + "1. Show current dorks list")
- print(Fore.CYAN + "2. Add Google Dork to config file")
- print(Fore.LIGHTRED_EX + "3. Return to main menu" + Style.RESET_ALL + '\n')
+ print(Fore.CYAN + "1. Print current config file")
+ print(Fore.CYAN + "2. Edit config file")
+ print(Fore.CYAN + "3. Generate custom Dorking DB")
+ print(Fore.LIGHTRED_EX + "4. Return to main menu" + Style.RESET_ALL + '\n')
def print_help_menu(self):
print(Fore.MAGENTA + Back.WHITE + '[HELP MENU]' + Style.RESET_ALL)
From 926df695214c44c08a117f541910acc25508fe3b Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 10:26:36 +0300
Subject: [PATCH 27/72] Deleted dorkslist.txt [deprecated]
---
dorkslist.txt | 19 -------------------
1 file changed, 19 deletions(-)
delete mode 100644 dorkslist.txt
diff --git a/dorkslist.txt b/dorkslist.txt
deleted file mode 100644
index ff87008..0000000
--- a/dorkslist.txt
+++ /dev/null
@@ -1,19 +0,0 @@
-[SOLID DORKS]
-
-site:{} filetype:pdf
-site:{} filetype:xlsx
-site:{} filetype:docx
-site:{} filetype:ppt
-site:{} filetype:doc
-site:{} filetype:pptx
-site:{} filetype:db
-site:{} filetype:accdb
-site:{} filetype:nsf
-site:{} filetype:fp7
-site:{} filetype:mdf
-site:{} filetype:sqlitedb
-{} site:linkedin.com/in/
-site:{} inurl:login | inurl:logon | inurl:sign-in | inurl:signin | inurl:portal
-site:{} inurl:/signup.aspx
-site:{} intitle:"index of" ".ssh" OR "ssh_config" OR "ssh_known_hosts" OR "authorized_keys" OR "id_rsa" OR "id_dsa"
-site:{} intitle:"index of" "nginx.conf"
From 7790e7d1359febabbbddef8f7bd50cf789243771 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 08:51:09 -0400
Subject: [PATCH 28/72] Added config processing module
---
service/config_processing.py | 27 +++++++++++++++++++++++++++
1 file changed, 27 insertions(+)
create mode 100644 service/config_processing.py
diff --git a/service/config_processing.py b/service/config_processing.py
new file mode 100644
index 0000000..fc7ccda
--- /dev/null
+++ b/service/config_processing.py
@@ -0,0 +1,27 @@
+import configparser
+import os
+
+def create_config():
+ config = configparser.ConfigParser()
+ config['LOGGING'] = {'log_level': 'info'}
+ config['CLI VISUAL'] = {'preview_color': 'red'}
+
+ with open('service//config.ini', 'w') as configfile:
+ config.write(configfile)
+
+def check_cfg_presence():
+ cfg_presence = os.path.isfile('service//config.ini')
+ return cfg_presence
+
+def read_config():
+ config = configparser.ConfigParser()
+ config.read('service//config.ini')
+ log_level = config.get('LOGGING', 'log_level')
+ cli_preview_color = config.get('CLI VISUAL', 'preview_color')
+
+ config_values = {
+ 'logging_level': log_level,
+ 'preview_color': cli_preview_color
+ }
+
+ return config_values
From c7efe72b96dd9ec88b54247009d55b3649cda998 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 15:51:52 +0300
Subject: [PATCH 29/72] Slightly reworked file structure to support config file
---
dpulse.py | 64 +++++++++++++++++++++++++++++++++----------------------
1 file changed, 38 insertions(+), 26 deletions(-)
diff --git a/dpulse.py b/dpulse.py
index 10d4769..c8acb9b 100644
--- a/dpulse.py
+++ b/dpulse.py
@@ -3,20 +3,49 @@
sys.path.append('service')
sys.path.append('reporting_modules')
sys.path.append('dorking')
-
-import pdf_report_creation as pdf_rc
+from colorama import Fore, Style
import cli_init
+from config_processing import create_config, check_cfg_presence, read_config
import db_processing as db
+import os
+
+def dorks_files_check():
+ dorks_path = 'dorking//'
+ dorks_files = ['iot_dorking.db', 'files_dorking.db', 'basic_dorking.db']
+ dorks_files_counter = 0
+ for dork_files in dorks_files:
+ files_path = os.path.join(dorks_path, dork_files)
+ if os.path.isfile(files_path):
+ dorks_files_counter += 1
+ else:
+ pass
+
+ if dorks_files_counter == 3:
+ print(Fore.GREEN + "Dorks databases presence: OK" + Style.RESET_ALL)
+ else:
+ print(Fore.RED + "Dorks databases presence: NOT OK\nSome files may not be in folder. Please compare dorking folder with the same folder on the official repository\n" + Style.RESET_ALL)
+ sys.exit()
+
+db.db_creation('report_storage.db')
+dorks_files_check()
+cfg_presence = check_cfg_presence()
+if cfg_presence is True:
+ print(Fore.GREEN + "Global config file presence: OK" + Style.RESET_ALL)
+else:
+ print(Fore.RED + "Global config file presence: NOT OK")
+ create_config()
+ print(Fore.GREEN + "Successfully generated global config file")
+
+
+import pdf_report_creation as pdf_rc
import xlsx_report_creation as xlsx_rc
import html_report_creation as html_rc
from data_assembler import DataProcessing
-
try:
import time
from colorama import Fore, Style, Back
import webbrowser
import sqlite3
- import os
import itertools
import threading
from time import sleep, time
@@ -24,9 +53,11 @@
print(Fore.RED + "Import error appeared. Reason: {}".format(e) + Style.RESET_ALL)
sys.exit()
+data_processing = DataProcessing()
+config_values = read_config()
+
cli = cli_init.Menu()
cli.welcome_menu()
-data_processing = DataProcessing()
def time_processing(end):
if end < 60:
@@ -40,23 +71,6 @@ def time_processing(end):
endtime_string = f'approximately {time_minutes} minutes'
return endtime_string
-def dorks_files_check():
- dorks_path = 'dorking//'
- dorks_files = ['iot_dorking.db', 'files_dorking.db', 'basic_dorking.db']
- dorks_files_counter = 0
- for dork_files in dorks_files:
- files_path = os.path.join(dorks_path, dork_files)
- if os.path.isfile(files_path):
- dorks_files_counter += 1
- else:
- pass
-
- if dorks_files_counter == 3:
- print(Fore.GREEN + "Dorks databases presence: OK" + Style.RESET_ALL)
- else:
- print(Fore.RED + "Dorks databases presence: NOT OK\nSome files may not be in folder. Please compare dorking folder with the same folder on the official repository\n" + Style.RESET_ALL)
- sys.exit()
-
class ProgressBar(threading.Thread):
def __init__(self):
super(ProgressBar, self).__init__()
@@ -69,8 +83,6 @@ def run(self):
print(Fore.LIGHTMAGENTA_EX + Back.WHITE + char + Style.RESET_ALL, end='\r')
sleep(0.1)
-db.db_creation('report_storage.db')
-dorks_files_check()
def run():
while True:
try:
@@ -208,7 +220,7 @@ def run():
elif choice == "2":
print(Fore.RED + "Sorry, but this menu is deprecated since v1.1.1. It will be back soon")
- elif choice == "3":
+ elif choice == "4":
cli.print_help_menu()
choice_help = input(Fore.YELLOW + "Enter your choice >> ")
if choice_help == '1':
@@ -226,7 +238,7 @@ def run():
else:
print(Fore.RED + "\nInvalid menu item. Please select between existing menu items")
- elif choice == "4":
+ elif choice == "3":
cli.print_db_menu()
print('\n')
db.db_creation('report_storage.db')
From 90930f9fc599f15751a60643cf13ecc4ff6d0285 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 15:52:20 +0300
Subject: [PATCH 30/72] Added config file support for logging
---
service/logs_processing.py | 22 +++++++++++++++++++++-
1 file changed, 21 insertions(+), 1 deletion(-)
diff --git a/service/logs_processing.py b/service/logs_processing.py
index ae34de4..4286529 100644
--- a/service/logs_processing.py
+++ b/service/logs_processing.py
@@ -1,3 +1,23 @@
import logging
+from config_processing import read_config
+from colorama import Fore, Style
-logging.basicConfig(filename="journal.log", level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
+config_values = read_config()
+logging_level = (config_values['logging_level']).upper()
+
+if logging_level == 'DEBUG':
+ level = logging.DEBUG
+elif logging_level == 'INFO':
+ level = logging.INFO
+elif logging_level == 'WARNING':
+ level = logging.WARNING
+elif logging_level == 'ERROR':
+ level = logging.ERROR
+elif logging_level == 'CRITICAL':
+ level = logging.CRITICAL
+else:
+ print(Fore.RED + "You've entered wrong logging level in config file. Please verify proper mods and re-enter it" + Style.RESET_ALL)
+ print(Fore.RED + "Setting config level as DEBUG for this session" + Style.RESET_ALL)
+ level = logging.DEBUG
+
+logging.basicConfig(filename="journal.log", level=level, format="%(asctime)s - %(levelname)s - %(message)s")
From c6918303f732fef0587b3eb11e88310ac6a54134 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 15:53:10 +0300
Subject: [PATCH 31/72] Reworked CLI appearance order
---
service/cli_init.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/service/cli_init.py b/service/cli_init.py
index d1a77e6..ac9d312 100644
--- a/service/cli_init.py
+++ b/service/cli_init.py
@@ -1,5 +1,6 @@
import sys
+
try:
from colorama import Fore, Back, Style
from pyfiglet import Figlet
@@ -17,7 +18,7 @@ def welcome_menu(self):
print('\n')
self.console.print(fig.renderText('DPULSE'), style="red")
print(Fore.MAGENTA + Style.BRIGHT + 'DPULSE-CLI // 1.1.1 (rolling) // OSINT-TECHNOLOGIES\n' + Style.RESET_ALL)
- print(Fore.MAGENTA + Style.BRIGHT + 'Visit our pages:\nhttps://github.com/OSINT-TECHNOLOGIES\nhttps://pypi.org/project/dpulse/' + Style.RESET_ALL + '\n')
+ print(Fore.MAGENTA + Style.BRIGHT + 'Visit our pages:\nhttps://github.com/OSINT-TECHNOLOGIES\nhttps://pypi.org/project/dpulse/' + Style.RESET_ALL)
def print_main_menu(self):
print('\n')
From e139fac51c3e0c64e47e3a1a8892d2cc89cac64c Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 18:18:53 +0300
Subject: [PATCH 32/72] Added config support for CLI customization
---
service/cli_init.py | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/service/cli_init.py b/service/cli_init.py
index ac9d312..b8a6f58 100644
--- a/service/cli_init.py
+++ b/service/cli_init.py
@@ -1,5 +1,5 @@
import sys
-
+from config_processing import read_config
try:
from colorama import Fore, Back, Style
@@ -14,9 +14,11 @@ def __init__(self):
self.console = Console()
def welcome_menu(self):
+ config_values = read_config()
+ preview_style = (config_values['preview_color']).lower()
fig = Figlet(font='slant')
print('\n')
- self.console.print(fig.renderText('DPULSE'), style="red")
+ self.console.print(fig.renderText('DPULSE'), style=preview_style)
print(Fore.MAGENTA + Style.BRIGHT + 'DPULSE-CLI // 1.1.1 (rolling) // OSINT-TECHNOLOGIES\n' + Style.RESET_ALL)
print(Fore.MAGENTA + Style.BRIGHT + 'Visit our pages:\nhttps://github.com/OSINT-TECHNOLOGIES\nhttps://pypi.org/project/dpulse/' + Style.RESET_ALL)
From acbce82172713c45bfcd3e7a5bd079f4eba26a45 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 18:24:38 +0300
Subject: [PATCH 33/72] Added config support for CLI font
---
service/cli_init.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/service/cli_init.py b/service/cli_init.py
index b8a6f58..28b9f94 100644
--- a/service/cli_init.py
+++ b/service/cli_init.py
@@ -16,7 +16,8 @@ def __init__(self):
def welcome_menu(self):
config_values = read_config()
preview_style = (config_values['preview_color']).lower()
- fig = Figlet(font='slant')
+ wm_font = (config_values['wm_font']).lower()
+ fig = Figlet(font=wm_font)
print('\n')
self.console.print(fig.renderText('DPULSE'), style=preview_style)
print(Fore.MAGENTA + Style.BRIGHT + 'DPULSE-CLI // 1.1.1 (rolling) // OSINT-TECHNOLOGIES\n' + Style.RESET_ALL)
From 9920ef9a82f986d8cf352c4722f75f0c2eed4708 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 18:32:13 +0300
Subject: [PATCH 34/72] Moved dorks_files_check function to dorking_handler
---
dpulse.py | 18 +-----------------
1 file changed, 1 insertion(+), 17 deletions(-)
diff --git a/dpulse.py b/dpulse.py
index c8acb9b..807c1aa 100644
--- a/dpulse.py
+++ b/dpulse.py
@@ -8,23 +8,7 @@
from config_processing import create_config, check_cfg_presence, read_config
import db_processing as db
import os
-
-def dorks_files_check():
- dorks_path = 'dorking//'
- dorks_files = ['iot_dorking.db', 'files_dorking.db', 'basic_dorking.db']
- dorks_files_counter = 0
- for dork_files in dorks_files:
- files_path = os.path.join(dorks_path, dork_files)
- if os.path.isfile(files_path):
- dorks_files_counter += 1
- else:
- pass
-
- if dorks_files_counter == 3:
- print(Fore.GREEN + "Dorks databases presence: OK" + Style.RESET_ALL)
- else:
- print(Fore.RED + "Dorks databases presence: NOT OK\nSome files may not be in folder. Please compare dorking folder with the same folder on the official repository\n" + Style.RESET_ALL)
- sys.exit()
+from dorking_handler import dorks_files_check
db.db_creation('report_storage.db')
dorks_files_check()
From c17f5c6729f21b6480c37df81e1ab6828cc82fad Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 18:32:39 +0300
Subject: [PATCH 35/72] Moved dorks_files_check function to dorking_handler
---
dorking/dorking_handler.py | 18 ++++++++++++++++++
1 file changed, 18 insertions(+)
diff --git a/dorking/dorking_handler.py b/dorking/dorking_handler.py
index dd61b35..1019b0d 100644
--- a/dorking/dorking_handler.py
+++ b/dorking/dorking_handler.py
@@ -7,6 +7,7 @@
import re
import requests
import sqlite3
+ import os
except ImportError as e:
print(Fore.RED + "Import error appeared. Reason: {}".format(e) + Style.RESET_ALL)
sys.exit()
@@ -75,3 +76,20 @@ def transfer_results_to_xlsx(queries, pages=10):
dorking_return_list.append(f"{result}\n")
dorking_return_list.append("\n")
#return "File with gathered links was successfully created", dorking_return_list
+
+def dorks_files_check():
+ dorks_path = 'dorking//'
+ dorks_files = ['iot_dorking.db', 'files_dorking.db', 'basic_dorking.db']
+ dorks_files_counter = 0
+ for dork_files in dorks_files:
+ files_path = os.path.join(dorks_path, dork_files)
+ if os.path.isfile(files_path):
+ dorks_files_counter += 1
+ else:
+ pass
+
+ if dorks_files_counter == 3:
+ print(Fore.GREEN + "Dorks databases presence: OK" + Style.RESET_ALL)
+ else:
+ print(Fore.RED + "Dorks databases presence: NOT OK\nSome files may not be in folder. Please compare dorking folder with the same folder on the official repository\n" + Style.RESET_ALL)
+ sys.exit()
From 9112a9cc9692638342b486c39ceecde91715dbf2 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 18:33:23 +0300
Subject: [PATCH 36/72] Removed dorking_processor.py module [deprecated]
---
datagather_modules/dorking_processor.py | 73 -------------------------
1 file changed, 73 deletions(-)
delete mode 100644 datagather_modules/dorking_processor.py
diff --git a/datagather_modules/dorking_processor.py b/datagather_modules/dorking_processor.py
deleted file mode 100644
index 36abd2d..0000000
--- a/datagather_modules/dorking_processor.py
+++ /dev/null
@@ -1,73 +0,0 @@
-import sys
-
-try:
- import requests.exceptions
- from colorama import Fore, Style
- import mechanicalsoup
- import re
- import requests
-except ImportError as e:
- print(Fore.RED + "Import error appeared. Reason: {}".format(e) + Style.RESET_ALL)
- sys.exit()
-
-def get_dorking_query(short_domain):
- print(Fore.GREEN + "Getting dorking query from config file")
- with open('dorkslist.txt', 'r') as cfg_file:
- lines = cfg_file.readlines()
- index = lines.index('[SOLID DORKS]\n')
- lines_after = lines[index + 2:]
- search_query = [line.format(short_domain) for line in lines_after]
- return search_query
-
-def solid_google_dorking(query, pages=100):
- try:
- browser = mechanicalsoup.StatefulBrowser()
- browser.open("https://www.google.com/")
- browser.select_form('form[action="/search"]')
- browser["q"] = str(query)
- browser.submit_selected(btnName="btnG")
- result_query = []
- for page in range(pages):
- for link in browser.links():
- target = link.attrs['href']
- if (target.startswith('/url?') and not
- target.startswith("/url?q=http://webcache.googleusercontent.com")):
- target = re.sub(r"^/url\?q=([^&]*)&.*", r"\1", target)
- result_query.append(target)
- try:
- browser.follow_link(nr=page + 1)
- except mechanicalsoup.LinkNotFoundError:
- break
- del result_query[-2:]
- return result_query
- except requests.exceptions.ConnectionError as e:
- print(Fore.RED + "Error while establishing connection with domain. No results will appear. Reason: {}".format(e) + Style.RESET_ALL)
- return "Google Dorking results file was not created"
-
-def save_results_to_txt(folderpath, queries, pages=10):
- txt_writepath = folderpath + '//04-dorking_results.txt'
- with open(txt_writepath, 'w') as f:
- for i, query in enumerate(queries, start=1):
- f.write(f"QUERY #{i}: {query}\n")
- results = solid_google_dorking(query, pages)
- if not results:
- f.write("=> NO RESULT FOUND\n")
- else:
- for result in results:
- f.write(f"=> {result}\n")
- f.write("\n")
- print(Fore.GREEN + "Google Dorking results successfully saved in TXT file" + Style.RESET_ALL)
- return "File with gathered links was successfully created"
-
-def transfer_results_to_xlsx(queries, pages=10):
- dorking_return_list = []
- for i, query in enumerate(queries, start=1):
- dorking_return_list.append(f"QUERY #{i}: {query}\n")
- results = solid_google_dorking(query, pages)
- if not results:
- dorking_return_list.append("NO RESULT FOUND\n")
- else:
- for result in results:
- dorking_return_list.append(f"{result}\n")
- dorking_return_list.append("\n")
- return "File with gathered links was successfully created", dorking_return_list
From eba8c413c93f18f18924d85d230ccdecbcf7b6a3 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 18:58:22 +0300
Subject: [PATCH 37/72] Added domain regex and connection check (#81, #82)
---
dpulse.py | 19 +++++++++++++++++++
1 file changed, 19 insertions(+)
diff --git a/dpulse.py b/dpulse.py
index 807c1aa..eecc2ff 100644
--- a/dpulse.py
+++ b/dpulse.py
@@ -3,6 +3,7 @@
sys.path.append('service')
sys.path.append('reporting_modules')
sys.path.append('dorking')
+
from colorama import Fore, Style
import cli_init
from config_processing import create_config, check_cfg_presence, read_config
@@ -25,7 +26,10 @@
import xlsx_report_creation as xlsx_rc
import html_report_creation as html_rc
from data_assembler import DataProcessing
+
try:
+ import socket
+ import re
import time
from colorama import Fore, Style, Back
import webbrowser
@@ -55,6 +59,13 @@ def time_processing(end):
endtime_string = f'approximately {time_minutes} minutes'
return endtime_string
+def domain_precheck(domain):
+ try:
+ socket.create_connection((domain, 80), timeout=5)
+ return True
+ except OSError:
+ return False
+
class ProgressBar(threading.Thread):
def __init__(self):
super(ProgressBar, self).__init__()
@@ -71,6 +82,7 @@ def run():
while True:
try:
cli.print_main_menu()
+ domain_patter = r'^[a-zA-Z0-9-]+\.[a-zA-Z]{2,}$'
choice = input(Fore.YELLOW + "Enter your choice >> ")
if choice == "1":
while True:
@@ -81,8 +93,15 @@ def run():
else:
if not short_domain:
print(Fore.RED + "\nEmpty domain names are not supported")
+ elif re.match(domain_patter, short_domain) is None:
+ print(Fore.RED + '\nYour string does not match domain pattern')
else:
url = "http://" + short_domain + "/"
+ if domain_precheck(short_domain):
+ print(Fore.GREEN + 'Entered domain is accessible. Continuation' + Style.RESET_ALL)
+ else:
+ print(Fore.RED + "Entered domain is not accessible. Scan is impossible" + Style.RESET_ALL)
+ break
case_comment = input(Fore.YELLOW + "Enter case comment >> ")
report_filetype = input(Fore.YELLOW + "Enter report file extension [xlsx/pdf/html] >> ")
if not report_filetype:
From b3c3b658fababc91504f1581bc96b0ed3c5ea9c7 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 12:23:25 -0400
Subject: [PATCH 38/72] Added misc.py module to control various mini functions
---
service/misc.py | 20 ++++++++++++++++++++
1 file changed, 20 insertions(+)
create mode 100644 service/misc.py
diff --git a/service/misc.py b/service/misc.py
new file mode 100644
index 0000000..506ccfa
--- /dev/null
+++ b/service/misc.py
@@ -0,0 +1,20 @@
+import socket
+
+def time_processing(end):
+ if end < 60:
+ endtime = round(end)
+ endtime_string = f'approximately {endtime} seconds'
+ else:
+ time_minutes = round(end / 60)
+ if time_minutes == 1:
+ endtime_string = f'approximately {time_minutes} minute'
+ else:
+ endtime_string = f'approximately {time_minutes} minutes'
+ return endtime_string
+
+def domain_precheck(domain):
+ try:
+ socket.create_connection((domain, 80), timeout=5)
+ return True
+ except OSError:
+ return False
\ No newline at end of file
From 1b0a9400c0ef968238e243e110dfa70d39ac4b40 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 19:23:45 +0300
Subject: [PATCH 39/72] Added misc.py support for dpulse.py
---
dpulse.py | 20 +-------------------
1 file changed, 1 insertion(+), 19 deletions(-)
diff --git a/dpulse.py b/dpulse.py
index eecc2ff..6f48de5 100644
--- a/dpulse.py
+++ b/dpulse.py
@@ -26,6 +26,7 @@
import xlsx_report_creation as xlsx_rc
import html_report_creation as html_rc
from data_assembler import DataProcessing
+from misc import time_processing, domain_precheck
try:
import socket
@@ -47,25 +48,6 @@
cli = cli_init.Menu()
cli.welcome_menu()
-def time_processing(end):
- if end < 60:
- endtime = round(end)
- endtime_string = f'approximately {endtime} seconds'
- else:
- time_minutes = round(end / 60)
- if time_minutes == 1:
- endtime_string = f'approximately {time_minutes} minute'
- else:
- endtime_string = f'approximately {time_minutes} minutes'
- return endtime_string
-
-def domain_precheck(domain):
- try:
- socket.create_connection((domain, 80), timeout=5)
- return True
- except OSError:
- return False
-
class ProgressBar(threading.Thread):
def __init__(self):
super(ProgressBar, self).__init__()
From 16f4d7d030e465c6310848db7053b8a28f81ea15 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 19:35:28 +0300
Subject: [PATCH 40/72] Added wm_font support
---
service/config_processing.py | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/service/config_processing.py b/service/config_processing.py
index fc7ccda..baee6c1 100644
--- a/service/config_processing.py
+++ b/service/config_processing.py
@@ -4,7 +4,7 @@
def create_config():
config = configparser.ConfigParser()
config['LOGGING'] = {'log_level': 'info'}
- config['CLI VISUAL'] = {'preview_color': 'red'}
+ config['CLI VISUAL'] = {'preview_color': 'red', 'font': 'slant'}
with open('service//config.ini', 'w') as configfile:
config.write(configfile)
@@ -18,10 +18,12 @@ def read_config():
config.read('service//config.ini')
log_level = config.get('LOGGING', 'log_level')
cli_preview_color = config.get('CLI VISUAL', 'preview_color')
+ wm_font = config.get('CLI VISUAL', 'font')
config_values = {
'logging_level': log_level,
- 'preview_color': cli_preview_color
+ 'preview_color': cli_preview_color,
+ 'wm_font': wm_font
}
return config_values
From 5b3619f30af81e1385fc26927f8dbfc77ffa253d Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Mon, 23 Sep 2024 20:04:53 +0300
Subject: [PATCH 41/72] Added dorking post-scan summary output
---
dorking/dorking_handler.py | 17 +++++++++++++----
1 file changed, 13 insertions(+), 4 deletions(-)
diff --git a/dorking/dorking_handler.py b/dorking/dorking_handler.py
index 1019b0d..66587ef 100644
--- a/dorking/dorking_handler.py
+++ b/dorking/dorking_handler.py
@@ -47,22 +47,31 @@ def solid_google_dorking(query, pages=100):
print(Fore.RED + "Error while establishing connection with domain. No results will appear. Reason: {}".format(e) + Style.RESET_ALL)
#return "Google Dorking results file was not created"
+
def save_results_to_txt(folderpath, queries, pages=10):
txt_writepath = folderpath + '//04-dorking_results.txt'
+ total_results = []
+
with open(txt_writepath, 'w') as f:
+ print(Fore.GREEN + "Started Google Dorking. Please, be patient, it may take some time")
for i, query in enumerate(queries, start=1):
f.write(f"QUERY #{i}: {query}\n")
results = solid_google_dorking(query, pages)
- print(Fore.GREEN + f"Dorking with {query} dork")
if not results:
f.write("=> NO RESULT FOUND\n")
- print(Fore.RED + f"No results were found for {query} dork")
+ total_results.append((query, 0))
else:
+ total_results.append((query, len(results)))
for result in results:
f.write(f"=> {result}\n")
f.write("\n")
- print(Fore.GREEN + "Google Dorking results successfully saved in TXT file" + Style.RESET_ALL)
- #return "File with gathered links was successfully created"
+
+ print(Fore.GREEN + "Google Dorking end. Results successfully saved in TXT file\n" + Style.RESET_ALL)
+ print(Fore.GREEN + "During Google Dorking:")
+ for query, count in total_results:
+ if count == 0:
+ count = 'no results'
+ print(Fore.GREEN + f"[+] Found results for " + Fore.LIGHTCYAN_EX + f' {query}' + Fore.GREEN + ' query: ' + Fore.LIGHTCYAN_EX + f'{count}' + Style.RESET_ALL)
def transfer_results_to_xlsx(queries, pages=10):
dorking_return_list = []
From 50621531daf97332e6b63ad884b5ae8884eb548c Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Wed, 25 Sep 2024 02:27:15 +0300
Subject: [PATCH 42/72] Added new mentions
---
README.md | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 19a2c92..c1170b2 100644
--- a/README.md
+++ b/README.md
@@ -141,7 +141,7 @@ If you have problems with starting installer.sh, you should try to use `dos2unix
# Tasks to complete before new release
-- [ ] Rework Google Dorking module in separate mode
+- [x] Rework Google Dorking module in separate mode
- [ ] Rework Google Dorks list into separate databases with different pre-configured dorks for various purposes
- [ ] Allow user to create their own dorks DB
- [ ] Add separate API search mode with different free APIs
@@ -190,6 +190,9 @@ If you have problems with starting installer.sh, you should try to use `dos2unix
### [by C.I.T Security](https://t.me/citsecurity/8578)
+### [by Adityaa_oky](https://t.me/adityaa_oky/960)
+
+### [by Реальний OSINT](https://t.me/realOSINT/462)
From f3ad6d859ac1d96b47da069345c8e8d0d91658f3 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Wed, 25 Sep 2024 02:31:36 +0300
Subject: [PATCH 43/72] Restored correct CLI modes selection order
---
dpulse.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dpulse.py b/dpulse.py
index 6f48de5..5015bd6 100644
--- a/dpulse.py
+++ b/dpulse.py
@@ -93,7 +93,6 @@ def run():
else:
print(Fore.GREEN + "[!] SI mode suppose you to have sitemap_links.txt file in report folder [!]\n[!] It'll visit every link from this file [!]")
pagesearch_flag = input(Fore.YELLOW + "Would you like to use PageSearch function? [Y/N/SI] >> ")
- dorking_flag = input(Fore.YELLOW + "Select Dorking mode [Basic/IoT/Files/None] >> ")
if pagesearch_flag.lower() == 'y':
keywords_input = input(Fore.YELLOW + "Enter keywords (separate by comma) to search in files during PageSearch process (or write None if you don't need it) >> ")
if keywords_input.lower() != "none":
@@ -112,6 +111,7 @@ def run():
keywords_list = None
keywords_flag = 0
if report_filetype.lower() == 'pdf' or report_filetype.lower() == 'xlsx' or report_filetype.lower() == 'html':
+ dorking_flag = input(Fore.YELLOW + "Select Dorking mode [Basic/IoT/Files/None] >> ")
if pagesearch_flag.lower() == 'y' or pagesearch_flag.lower() == 'n' or pagesearch_flag.lower() == 'si':
if pagesearch_flag.lower() == "n":
pagesearch_ui_mark = 'No'
From 447ab00441ec08aff4e65d91ddaa00eef16a7906 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Wed, 25 Sep 2024 02:33:56 +0300
Subject: [PATCH 44/72] Removed double space in post-dorking summary
---
dorking/dorking_handler.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dorking/dorking_handler.py b/dorking/dorking_handler.py
index 66587ef..301d953 100644
--- a/dorking/dorking_handler.py
+++ b/dorking/dorking_handler.py
@@ -71,7 +71,7 @@ def save_results_to_txt(folderpath, queries, pages=10):
for query, count in total_results:
if count == 0:
count = 'no results'
- print(Fore.GREEN + f"[+] Found results for " + Fore.LIGHTCYAN_EX + f' {query}' + Fore.GREEN + ' query: ' + Fore.LIGHTCYAN_EX + f'{count}' + Style.RESET_ALL)
+ print(Fore.GREEN + f"[+] Found results for " + Fore.LIGHTCYAN_EX + f'{query}' + Fore.GREEN + ' query: ' + Fore.LIGHTCYAN_EX + f'{count}' + Style.RESET_ALL)
def transfer_results_to_xlsx(queries, pages=10):
dorking_return_list = []
From ea3c65a838fe0bb97eab5ee766869daab7e70993 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Wed, 25 Sep 2024 02:36:00 +0300
Subject: [PATCH 45/72] Added folder for future APIs module
---
apis/__init__.py | 1 +
1 file changed, 1 insertion(+)
create mode 100644 apis/__init__.py
diff --git a/apis/__init__.py b/apis/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/apis/__init__.py
@@ -0,0 +1 @@
+
From 49645d654a047124d22b15b73b7349d420a9264f Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Wed, 25 Sep 2024 02:43:01 +0300
Subject: [PATCH 46/72] Added new menu items
---
service/cli_init.py | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/service/cli_init.py b/service/cli_init.py
index 28b9f94..9db06f9 100644
--- a/service/cli_init.py
+++ b/service/cli_init.py
@@ -29,8 +29,9 @@ def print_main_menu(self):
print(Fore.CYAN + "1. Determine target and start scan")
print(Fore.CYAN + "2. Settings")
print(Fore.CYAN + "3. Report storage DB management")
- print(Fore.CYAN + "4. Help")
- print(Fore.LIGHTRED_EX + "5. Exit DPULSE" + Style.RESET_ALL + '\n')
+ print(Fore.CYAN + "4. Show API module status (not active)")
+ print(Fore.CYAN + "5. Help")
+ print(Fore.LIGHTRED_EX + "6. Exit DPULSE" + Style.RESET_ALL + '\n')
def print_settings_menu(self):
print('\n')
@@ -38,7 +39,8 @@ def print_settings_menu(self):
print(Fore.CYAN + "1. Print current config file")
print(Fore.CYAN + "2. Edit config file")
print(Fore.CYAN + "3. Generate custom Dorking DB")
- print(Fore.LIGHTRED_EX + "4. Return to main menu" + Style.RESET_ALL + '\n')
+ print(Fore.CYAN + "4. Add API key for existing API")
+ print(Fore.LIGHTRED_EX + "5. Return to main menu" + Style.RESET_ALL + '\n')
def print_help_menu(self):
print(Fore.MAGENTA + Back.WHITE + '[HELP MENU]' + Style.RESET_ALL)
From 12eacb15936967fcec616567fee5bd23377004d4 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Wed, 25 Sep 2024 02:43:25 +0300
Subject: [PATCH 47/72] Adapted program logic for new menu items
---
dpulse.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/dpulse.py b/dpulse.py
index 5015bd6..527ca30 100644
--- a/dpulse.py
+++ b/dpulse.py
@@ -205,7 +205,7 @@ def run():
elif choice == "2":
print(Fore.RED + "Sorry, but this menu is deprecated since v1.1.1. It will be back soon")
- elif choice == "4":
+ elif choice == "5":
cli.print_help_menu()
choice_help = input(Fore.YELLOW + "Enter your choice >> ")
if choice_help == '1':
@@ -248,7 +248,7 @@ def run():
elif choice_db == "3":
print(Fore.GREEN + "\nDatabase connection is successfully closed")
continue
- elif choice == "5":
+ elif choice == "6":
print(Fore.RED + "Exiting the program." + Style.RESET_ALL)
break
else:
From f7f39ece46f5e1c96f8e197b5fe493ed5f7bc5e3 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Wed, 25 Sep 2024 02:45:30 +0300
Subject: [PATCH 48/72] Removed double import
---
dpulse.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/dpulse.py b/dpulse.py
index 527ca30..1d240c6 100644
--- a/dpulse.py
+++ b/dpulse.py
@@ -4,7 +4,7 @@
sys.path.append('reporting_modules')
sys.path.append('dorking')
-from colorama import Fore, Style
+from colorama import Fore, Style, Back
import cli_init
from config_processing import create_config, check_cfg_presence, read_config
import db_processing as db
@@ -32,7 +32,6 @@
import socket
import re
import time
- from colorama import Fore, Style, Back
import webbrowser
import sqlite3
import itertools
From 76b8a39a7430fb71cec4c5d626e3ebb5e644e026 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 26 Sep 2024 14:02:03 +0300
Subject: [PATCH 49/72] Added CLI message which indicates incoming ping phase
---
dpulse.py | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/dpulse.py b/dpulse.py
index 1d240c6..09d0bc2 100644
--- a/dpulse.py
+++ b/dpulse.py
@@ -78,6 +78,7 @@ def run():
print(Fore.RED + '\nYour string does not match domain pattern')
else:
url = "http://" + short_domain + "/"
+ print(Fore.GREEN + 'Pinging domain...' + Style.RESET_ALL)
if domain_precheck(short_domain):
print(Fore.GREEN + 'Entered domain is accessible. Continuation' + Style.RESET_ALL)
else:
@@ -111,6 +112,14 @@ def run():
keywords_flag = 0
if report_filetype.lower() == 'pdf' or report_filetype.lower() == 'xlsx' or report_filetype.lower() == 'html':
dorking_flag = input(Fore.YELLOW + "Select Dorking mode [Basic/IoT/Files/None] >> ")
+ #api_flag = input(Fore.YELLOW + "Would you like to use 3rd party API in scan? [Y/N] >> ")
+ #if api_flag.lower() == 'y':
+ #print api db content
+ #write ID which you want to use using comma (ex: 1,3,4)
+ #elif api_flag.lower() == 'n':
+ #pass
+ #else:
+ #print invalid mode
if pagesearch_flag.lower() == 'y' or pagesearch_flag.lower() == 'n' or pagesearch_flag.lower() == 'si':
if pagesearch_flag.lower() == "n":
pagesearch_ui_mark = 'No'
From 3dd20df725a7bb04615426631dae09f024126d65 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 26 Sep 2024 14:08:00 +0300
Subject: [PATCH 50/72] Reactivated dorking results transfer to DB (HTML
report)
---
reporting_modules/html_report_creation.py | 104 +++++++++++-----------
1 file changed, 54 insertions(+), 50 deletions(-)
diff --git a/reporting_modules/html_report_creation.py b/reporting_modules/html_report_creation.py
index 532bd37..477fb90 100644
--- a/reporting_modules/html_report_creation.py
+++ b/reporting_modules/html_report_creation.py
@@ -71,6 +71,8 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
total_links_counter = data_array[40]
accessed_links_counter = data_array[41]
keywords_messages_list = data_array[42]
+ dorking_status = data_array[43]
+ dorking_file_path = data_array[44]
casename = report_info_array[0]
db_casename = report_info_array[1]
db_creation_date = report_info_array[2]
@@ -104,70 +106,72 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
if pagesearch_keyword == 'n':
template_path = pdf_templates_path + '//default_report_temp.html'
context = {'sh_domain': short_domain, 'full_url': url, 'ip_address': ip, 'registrar': res['registrar'],
- 'creation_date': res['creation_date'],'expiration_date': res['expiration_date'],
- 'name_servers': ', '.join(res['name_servers']),'org': res['org'],
- 'mails': mails, 'subdomain_mails': subdomain_mails_cleaned, 'subdomain_socials': social_medias,
- 'subdomain_ip': subdomain_ip,
- 'subdomains': subdomains, 'fb_links': common_socials['Facebook'],
- 'tw_links': common_socials['Twitter'], 'inst_links': common_socials['Instagram'],
- 'tg_links': common_socials['Telegram'], 'tt_links': common_socials['TikTok'],
- 'li_links': common_socials['LinkedIn'], 'vk_links': common_socials['VKontakte'],
- 'yt_links': common_socials['YouTube'], 'wc_links': common_socials['WeChat'],
- 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result,
- 'sitemap_links': sitemap_links_status, 'web_servers': web_servers, 'cms': cms, 'programming_languages': programming_languages, 'web_frameworks': web_frameworks, 'analytics': analytics,
- 'javascript_frameworks': javascript_frameworks,
- 'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
- 'commonName': commonName, 'serialNumber': serialNumber, 'ports': ports, 'hostnames': hostnames, 'cpes': cpes,
- 'tags': tags, 'vulns': vulns, 'a_tsm': total_socials, 'pagesearch_ui_mark': pagesearch_ui_mark}
+ 'creation_date': res['creation_date'],'expiration_date': res['expiration_date'],
+ 'name_servers': ', '.join(res['name_servers']),'org': res['org'],
+ 'mails': mails, 'subdomain_mails': subdomain_mails_cleaned, 'subdomain_socials': social_medias,
+ 'subdomain_ip': subdomain_ip,
+ 'subdomains': subdomains, 'fb_links': common_socials['Facebook'],
+ 'tw_links': common_socials['Twitter'], 'inst_links': common_socials['Instagram'],
+ 'tg_links': common_socials['Telegram'], 'tt_links': common_socials['TikTok'],
+ 'li_links': common_socials['LinkedIn'], 'vk_links': common_socials['VKontakte'],
+ 'yt_links': common_socials['YouTube'], 'wc_links': common_socials['WeChat'],
+ 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result,
+ 'sitemap_links': sitemap_links_status, 'web_servers': web_servers, 'cms': cms, 'programming_languages': programming_languages, 'web_frameworks': web_frameworks, 'analytics': analytics,
+ 'javascript_frameworks': javascript_frameworks,
+ 'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
+ 'commonName': commonName, 'serialNumber': serialNumber, 'ports': ports, 'hostnames': hostnames, 'cpes': cpes,
+ 'tags': tags, 'vulns': vulns, 'a_tsm': total_socials, 'pagesearch_ui_mark': pagesearch_ui_mark, 'dorking_status': dorking_status}
elif pagesearch_keyword == 'y':
template_path = pdf_templates_path + '//ps_report_temp.html'
context = {'sh_domain': short_domain, 'full_url': url, 'ip_address': ip, 'registrar': res['registrar'],
- 'creation_date': res['creation_date'],'expiration_date': res['expiration_date'],
- 'name_servers': ', '.join(res['name_servers']),'org': res['org'],
- 'mails': mails, 'subdomain_mails': subdomain_mails_cleaned, 'subdomain_socials': social_medias,
- 'subdomain_ip': subdomain_ip,
- 'subdomains': subdomains, 'fb_links': common_socials['Facebook'],
- 'tw_links': common_socials['Twitter'], 'inst_links': common_socials['Instagram'],
- 'tg_links': common_socials['Telegram'], 'tt_links': common_socials['TikTok'],
- 'li_links': common_socials['LinkedIn'], 'vk_links': common_socials['VKontakte'],
- 'yt_links': common_socials['YouTube'], 'wc_links': common_socials['WeChat'],
- 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result,
- 'sitemap_links': sitemap_links_status, 'web_servers': web_servers, 'cms': cms, 'programming_languages': programming_languages, 'web_frameworks': web_frameworks, 'analytics': analytics,
- 'javascript_frameworks': javascript_frameworks,
- 'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
- 'commonName': commonName, 'serialNumber': serialNumber, 'ports': ports, 'hostnames': hostnames, 'cpes': cpes,
- 'tags': tags, 'vulns': vulns, 'a_tsm': total_socials, 'pagesearch_ui_mark': pagesearch_ui_mark,
- 'acc_sd': accessible_subdomains, 'add_mails': emails_amount, 'extr_files': files_counter, 'cookies': cookies_counter, 'apis': api_keys_counter,
- 'wpe': website_elements_counter, 'exp_pass': exposed_passwords_counter, 'kml': keywords_messages_list}
+ 'creation_date': res['creation_date'],'expiration_date': res['expiration_date'],
+ 'name_servers': ', '.join(res['name_servers']),'org': res['org'],
+ 'mails': mails, 'subdomain_mails': subdomain_mails_cleaned, 'subdomain_socials': social_medias,
+ 'subdomain_ip': subdomain_ip,
+ 'subdomains': subdomains, 'fb_links': common_socials['Facebook'],
+ 'tw_links': common_socials['Twitter'], 'inst_links': common_socials['Instagram'],
+ 'tg_links': common_socials['Telegram'], 'tt_links': common_socials['TikTok'],
+ 'li_links': common_socials['LinkedIn'], 'vk_links': common_socials['VKontakte'],
+ 'yt_links': common_socials['YouTube'], 'wc_links': common_socials['WeChat'],
+ 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result,
+ 'sitemap_links': sitemap_links_status, 'web_servers': web_servers, 'cms': cms, 'programming_languages': programming_languages, 'web_frameworks': web_frameworks, 'analytics': analytics,
+ 'javascript_frameworks': javascript_frameworks,
+ 'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
+ 'commonName': commonName, 'serialNumber': serialNumber, 'ports': ports, 'hostnames': hostnames, 'cpes': cpes,
+ 'tags': tags, 'vulns': vulns, 'a_tsm': total_socials, 'pagesearch_ui_mark': pagesearch_ui_mark,
+ 'acc_sd': accessible_subdomains, 'add_mails': emails_amount, 'extr_files': files_counter, 'cookies': cookies_counter, 'apis': api_keys_counter,
+ 'wpe': website_elements_counter, 'exp_pass': exposed_passwords_counter, 'kml': keywords_messages_list, 'dorking_status': dorking_status}
elif pagesearch_keyword == 'si':
template_path = pdf_templates_path + '//si_report_temp.html'
context = {'sh_domain': short_domain, 'full_url': url, 'ip_address': ip, 'registrar': res['registrar'],
- 'creation_date': res['creation_date'],'expiration_date': res['expiration_date'],
- 'name_servers': ', '.join(res['name_servers']),'org': res['org'],
- 'mails': mails, 'subdomain_mails': subdomain_mails_cleaned, 'subdomain_socials': social_medias,
- 'subdomain_ip': subdomain_ip,
- 'subdomains': subdomains, 'fb_links': common_socials['Facebook'],
- 'tw_links': common_socials['Twitter'], 'inst_links': common_socials['Instagram'],
- 'tg_links': common_socials['Telegram'], 'tt_links': common_socials['TikTok'],
- 'li_links': common_socials['LinkedIn'], 'vk_links': common_socials['VKontakte'],
- 'yt_links': common_socials['YouTube'], 'wc_links': common_socials['WeChat'],
- 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result,
- 'sitemap_links': sitemap_links_status, 'web_servers': web_servers, 'cms': cms, 'programming_languages': programming_languages, 'web_frameworks': web_frameworks, 'analytics': analytics,
- 'javascript_frameworks': javascript_frameworks,
- 'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
- 'commonName': commonName, 'serialNumber': serialNumber, 'ports': ports, 'hostnames': hostnames, 'cpes': cpes,
- 'tags': tags, 'vulns': vulns, 'a_tsm': total_socials, 'pagesearch_ui_mark': pagesearch_ui_mark,
- 'a_sml': total_links_counter, 'acc_sml': accessed_links_counter, 'add_mails': emails_amount}
+ 'creation_date': res['creation_date'],'expiration_date': res['expiration_date'],
+ 'name_servers': ', '.join(res['name_servers']),'org': res['org'],
+ 'mails': mails, 'subdomain_mails': subdomain_mails_cleaned, 'subdomain_socials': social_medias,
+ 'subdomain_ip': subdomain_ip,
+ 'subdomains': subdomains, 'fb_links': common_socials['Facebook'],
+ 'tw_links': common_socials['Twitter'], 'inst_links': common_socials['Instagram'],
+ 'tg_links': common_socials['Telegram'], 'tt_links': common_socials['TikTok'],
+ 'li_links': common_socials['LinkedIn'], 'vk_links': common_socials['VKontakte'],
+ 'yt_links': common_socials['YouTube'], 'wc_links': common_socials['WeChat'],
+ 'ok_links': common_socials['Odnoklassniki'], 'robots_txt_result': robots_txt_result, 'sitemap_xml_result': sitemap_xml_result,
+ 'sitemap_links': sitemap_links_status, 'web_servers': web_servers, 'cms': cms, 'programming_languages': programming_languages, 'web_frameworks': web_frameworks, 'analytics': analytics,
+ 'javascript_frameworks': javascript_frameworks,
+ 'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
+ 'commonName': commonName, 'serialNumber': serialNumber, 'ports': ports, 'hostnames': hostnames, 'cpes': cpes,
+ 'tags': tags, 'vulns': vulns, 'a_tsm': total_socials, 'pagesearch_ui_mark': pagesearch_ui_mark,
+ 'a_sml': total_links_counter, 'acc_sml': accessed_links_counter, 'add_mails': emails_amount, 'dorking_status': dorking_status}
html_report_name = report_folder + '//' + casename
if generate_report(context, html_report_name, template_path):
print(Fore.GREEN + "HTML report for {} case was created at {}".format(''.join(short_domain), report_ctime) + Style.RESET_ALL)
print(Fore.GREEN + f"Scan elapsed time: {end}" + Style.RESET_ALL)
- robots_content, sitemap_content, sitemap_links_content = fp.get_db_columns(report_folder) #, dorking_content was removed here
+ with open(dorking_file_path, 'r') as df:
+ dorking_content = df.read()
+ robots_content, sitemap_content, sitemap_links_content, dorking_content = fp.get_db_columns(report_folder) #, dorking_content was removed here
pdf_blob = fp.get_blob(html_report_name)
- db.insert_blob('HTML', pdf_blob, db_casename, db_creation_date, case_comment, robots_content, sitemap_content, sitemap_links_content) #, dorking_content was removed here
+ db.insert_blob('HTML', pdf_blob, db_casename, db_creation_date, case_comment, robots_content, sitemap_content, sitemap_links_content, dorking_content) #, dorking_content was removed here
except Exception as e:
print(Fore.RED + 'Unable to create HTML report. See journal for details')
logging.error(f'HTML REPORT CREATION: ERROR. REASON: {e}')
From 14105a2c65deb75c4cf5ccf89bb6391b37b96bca Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 26 Sep 2024 14:08:37 +0300
Subject: [PATCH 51/72] Reactivated dorking results writing in DB
---
service/db_processing.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/service/db_processing.py b/service/db_processing.py
index c82c5a6..429c966 100644
--- a/service/db_processing.py
+++ b/service/db_processing.py
@@ -98,16 +98,16 @@ def db_report_recreate(extracted_folder_name, id_to_extract):
except Exception as e:
print(Fore.RED + "Error appeared when recreating report from database. Reason: {}".format(e))
-def insert_blob(report_file_type, pdf_blob, db_casename, creation_date, case_comment, robots, sitemap_xml, sitemap_links): #, dorking_results was removed here
+def insert_blob(report_file_type, pdf_blob, db_casename, creation_date, case_comment, robots, sitemap_xml, sitemap_links, dorking_results): #, dorking_results was removed here
try:
sqlite_connection = sqlite3.connect('report_storage.db')
cursor = sqlite_connection.cursor()
print(Fore.GREEN + "Connected to report storage database")
sqlite_insert_blob_query = """INSERT INTO report_storage
- (report_file_extension, report_content, creation_date, target, comment, robots_text, sitemap_text, sitemap_file) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"""
+ (report_file_extension, report_content, creation_date, target, comment, sitemap_file, robots_text, sitemap_text, dorks_results) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"""
#dorks_results was removed between comment and robots_text
- data_tuple = (report_file_type, pdf_blob, creation_date, db_casename, case_comment, robots, sitemap_links, sitemap_xml) #dorking_results was removed between case_comments and robots
+ data_tuple = (report_file_type, pdf_blob, creation_date, db_casename, case_comment, sitemap_xml, robots, sitemap_links, dorking_results)
cursor.execute(sqlite_insert_blob_query, data_tuple)
sqlite_connection.commit()
print(Fore.GREEN + "Scanning results are successfully saved in report storage database")
From 48a0bbf6d41a9d8c6fac187f5efb3def002eaf8c Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 26 Sep 2024 14:09:13 +0300
Subject: [PATCH 52/72] Reactivated dorking status return in HTML report
---
datagather_modules/data_assembler.py | 35 ++++++++++++++--------------
1 file changed, 18 insertions(+), 17 deletions(-)
diff --git a/datagather_modules/data_assembler.py b/datagather_modules/data_assembler.py
index d0fd42d..5546664 100644
--- a/datagather_modules/data_assembler.py
+++ b/datagather_modules/data_assembler.py
@@ -96,11 +96,6 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
web_servers, cms, programming_languages, web_frameworks, analytics, javascript_frameworks = np.get_technologies(url)
print(Fore.GREEN + 'Processing Shodan InternetDB search' + Style.RESET_ALL)
ports, hostnames, cpes, tags, vulns = np.query_internetdb(ip, report_file_type)
- print(Fore.GREEN + 'Processing Google Dorking' + Style.RESET_ALL)
- #if report_file_type == 'pdf' or report_file_type == 'html':
- #dorking_status = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain))
- #elif report_file_type == 'xlsx':
- #dorking_status, dorking_results = dp.transfer_results_to_xlsx(dp.get_dorking_query(short_domain))
common_socials = {key: social_medias.get(key, []) + sd_socials.get(key, []) for key in set(social_medias) | set(sd_socials)}
for key in common_socials:
common_socials[key] = list(set(common_socials[key]))
@@ -132,20 +127,22 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
if dorking_flag == 'none':
pass
+ dorking_status = 'Google Dorking mode was not selected for this scan'
+ dorking_file_path = 'Google Dorking mode was not selected for this scan'
elif dorking_flag == 'basic':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'iot':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'files':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
@@ -154,7 +151,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
web_servers, cms, programming_languages, web_frameworks, analytics, javascript_frameworks, ports,
hostnames, cpes, tags, vulns, common_socials, total_socials, ps_emails_return,
accessible_subdomains, emails_amount, files_counter, cookies_counter, api_keys_counter,
- website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter, keywords_messages_list]
+ website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter, keywords_messages_list, dorking_status, dorking_file_path]
elif report_file_type == 'xlsx':
if pagesearch_flag.lower() == 'y':
@@ -181,20 +178,22 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
if dorking_flag == 'none':
pass
+ dorking_status = 'Google Dorking mode was not selected for this scan'
+ dorking_file_path = 'Google Dorking mode was not selected for this scan'
elif dorking_flag == 'basic':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'iot':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'files':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
@@ -203,7 +202,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
web_servers, cms, programming_languages, web_frameworks, analytics, javascript_frameworks, ports,
hostnames, cpes, tags, vulns, common_socials, total_socials, ps_emails_return,
accessible_subdomains, emails_amount, files_counter, cookies_counter, api_keys_counter,
- website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter]
+ website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter, dorking_status, dorking_file_path]
elif report_file_type == 'html':
if pagesearch_flag.lower() == 'y':
@@ -230,20 +229,22 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
if dorking_flag == 'none':
pass
+ dorking_status = 'Google Dorking mode was not selected for this scan'
+ dorking_file_path = 'Google Dorking mode was not selected for this scan'
elif dorking_flag == 'basic':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'iot':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'files':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
@@ -252,7 +253,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
web_servers, cms, programming_languages, web_frameworks, analytics, javascript_frameworks, ports,
hostnames, cpes, tags, vulns, common_socials, total_socials, ps_emails_return,
accessible_subdomains, emails_amount, files_counter, cookies_counter, api_keys_counter,
- website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter, keywords_messages_list]
+ website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter, keywords_messages_list, dorking_status, dorking_file_path]
report_info_array = [casename, db_casename, db_creation_date, report_folder, ctime, report_file_type, report_ctime]
logging.info(f'### THIS LOG PART FOR {casename} CASE, TIME: {ctime} ENDS HERE')
From 3e0552eb4aa0b533395a833801b4e6451180dbe7 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 26 Sep 2024 14:11:36 +0300
Subject: [PATCH 53/72] Reactivated dorks results writing in DB / improved
readability of DB information if something were not found
---
service/files_processing.py | 20 ++++++++++----------
1 file changed, 10 insertions(+), 10 deletions(-)
diff --git a/service/files_processing.py b/service/files_processing.py
index aaa27df..7ff65c0 100644
--- a/service/files_processing.py
+++ b/service/files_processing.py
@@ -17,24 +17,24 @@ def get_db_columns(report_folder):
with open(report_folder + "//" + '01-robots.txt', 'r') as robots_file:
robots_content = robots_file.read()
except:
- robots_content = 0
+ robots_content = 'robots.txt: empty'
pass
try:
with open(report_folder + "//" + '02-sitemap.txt', 'r') as sitemap_xml:
sitemap_content = sitemap_xml.read()
except:
- sitemap_content = 0
+ sitemap_content = 'sitemap.xml: empty'
pass
try:
with open(report_folder + "//" + '03-sitemap_links.txt', 'r') as sitemap_links:
sitemap_links_content = sitemap_links.read()
except:
- sitemap_links_content = 0
+ sitemap_links_content = 'Sitemap links: empty'
pass
- #try:
- #with open(report_folder + "//" + '04-dorking_results.txt', 'r') as dorking_file:
- #dorking_content = dorking_file.read()
- #except:
- #dorking_content = 0
- #pass
- return robots_content, sitemap_content, sitemap_links_content
+ try:
+ with open(report_folder + "//" + '04-dorking_results.txt', 'r') as dorking_file:
+ dorking_content = dorking_file.read()
+ except:
+ dorking_content = 'Dorking content: empty'
+ pass
+ return robots_content, sitemap_content, sitemap_links_content, dorking_content
From 2a7b7f5384c0c9bfcfc7da9f5342bdb3a24e7f7b Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 26 Sep 2024 14:13:36 +0300
Subject: [PATCH 54/72] Added returns in dorking functions and some exception
handlers
---
dorking/dorking_handler.py | 47 +++++++++++++++++++++-----------------
1 file changed, 26 insertions(+), 21 deletions(-)
diff --git a/dorking/dorking_handler.py b/dorking/dorking_handler.py
index 301d953..b217b7e 100644
--- a/dorking/dorking_handler.py
+++ b/dorking/dorking_handler.py
@@ -49,29 +49,34 @@ def solid_google_dorking(query, pages=100):
def save_results_to_txt(folderpath, queries, pages=10):
- txt_writepath = folderpath + '//04-dorking_results.txt'
- total_results = []
+ try:
+ txt_writepath = folderpath + '//04-dorking_results.txt'
+ total_results = []
- with open(txt_writepath, 'w') as f:
- print(Fore.GREEN + "Started Google Dorking. Please, be patient, it may take some time")
- for i, query in enumerate(queries, start=1):
- f.write(f"QUERY #{i}: {query}\n")
- results = solid_google_dorking(query, pages)
- if not results:
- f.write("=> NO RESULT FOUND\n")
- total_results.append((query, 0))
- else:
- total_results.append((query, len(results)))
- for result in results:
- f.write(f"=> {result}\n")
- f.write("\n")
+ with open(txt_writepath, 'w') as f:
+ print(Fore.GREEN + "Started Google Dorking. Please, be patient, it may take some time")
+ for i, query in enumerate(queries, start=1):
+ f.write(f"QUERY #{i}: {query}\n")
+ results = solid_google_dorking(query, pages)
+ if not results:
+ f.write("=> NO RESULT FOUND\n")
+ total_results.append((query, 0))
+ else:
+ total_results.append((query, len(results)))
+ for result in results:
+ f.write(f"=> {result}\n")
+ f.write("\n")
- print(Fore.GREEN + "Google Dorking end. Results successfully saved in TXT file\n" + Style.RESET_ALL)
- print(Fore.GREEN + "During Google Dorking:")
- for query, count in total_results:
- if count == 0:
- count = 'no results'
- print(Fore.GREEN + f"[+] Found results for " + Fore.LIGHTCYAN_EX + f'{query}' + Fore.GREEN + ' query: ' + Fore.LIGHTCYAN_EX + f'{count}' + Style.RESET_ALL)
+ print(Fore.GREEN + "Google Dorking end. Results successfully saved in TXT file\n" + Style.RESET_ALL)
+ print(Fore.GREEN + "During Google Dorking:")
+ for query, count in total_results:
+ if count == 0:
+ count = 'no results'
+ print(Fore.GREEN + f"[+] Found results for " + Fore.LIGHTCYAN_EX + f'{query}' + Fore.GREEN + ' query: ' + Fore.LIGHTCYAN_EX + f'{count}' + Style.RESET_ALL)
+ return 'Successfully dorked domain', txt_writepath
+ except Exception:
+ print(Fore.RED + 'Error appeared while trying to dork target. See journal for details')
+ return 'Domain dorking failed. See journal for details'
def transfer_results_to_xlsx(queries, pages=10):
dorking_return_list = []
From f3e778a60dde14ebab2b7cacde5e7afbaff88df3 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 26 Sep 2024 14:28:35 +0300
Subject: [PATCH 55/72] Reactivated dorking results return to DB / fixed some
typos when writing data to report DB
---
reporting_modules/pdf_report_creation.py | 16 ++++++++++------
1 file changed, 10 insertions(+), 6 deletions(-)
diff --git a/reporting_modules/pdf_report_creation.py b/reporting_modules/pdf_report_creation.py
index bba1933..3d97fa8 100644
--- a/reporting_modules/pdf_report_creation.py
+++ b/reporting_modules/pdf_report_creation.py
@@ -77,6 +77,8 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
total_links_counter = data_array[40]
accessed_links_counter = data_array[41]
keywords_messages_list = data_array[42]
+ dorking_status = data_array[43]
+ dorking_file_path = data_array[44]
casename = report_info_array[0]
db_casename = report_info_array[1]
db_creation_date = report_info_array[2]
@@ -124,7 +126,7 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
'javascript_frameworks': javascript_frameworks,
'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
'commonName': commonName, 'serialNumber': serialNumber, 'ports': ports, 'hostnames': hostnames, 'cpes': cpes,
- 'tags': tags, 'vulns': vulns, 'a_tsm': total_socials, 'pagesearch_ui_mark': pagesearch_ui_mark}
+ 'tags': tags, 'vulns': vulns, 'a_tsm': total_socials, 'pagesearch_ui_mark': pagesearch_ui_mark, 'dorking_status': dorking_status}
elif pagesearch_keyword == 'y':
template_path = pdf_templates_path + '//ps_report_temp.html'
@@ -145,7 +147,7 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
'commonName': commonName, 'serialNumber': serialNumber, 'ports': ports, 'hostnames': hostnames, 'cpes': cpes,
'tags': tags, 'vulns': vulns, 'a_tsm': total_socials, 'pagesearch_ui_mark': pagesearch_ui_mark,
'acc_sd': accessible_subdomains, 'add_mails': emails_amount, 'extr_files': files_counter, 'cookies': cookies_counter, 'apis': api_keys_counter,
- 'wpe': website_elements_counter, 'exp_pass': exposed_passwords_counter, 'kml': keywords_messages_list}
+ 'wpe': website_elements_counter, 'exp_pass': exposed_passwords_counter, 'kml': keywords_messages_list, 'dorking_status': dorking_status}
elif pagesearch_keyword == 'si':
template_path = pdf_templates_path + '//si_report_temp.html'
@@ -165,16 +167,18 @@ def report_assembling(short_domain, url, case_comment, data_array, report_info_a
'ctime': report_ctime, 'a_tsf': subdomains_amount, 'mx_records': mx_records, 'issuer': issuer, 'subject': subject, 'notBefore': notBefore, 'notAfter': notAfter,
'commonName': commonName, 'serialNumber': serialNumber, 'ports': ports, 'hostnames': hostnames, 'cpes': cpes,
'tags': tags, 'vulns': vulns, 'a_tsm': total_socials, 'pagesearch_ui_mark': pagesearch_ui_mark,
- 'a_sml': total_links_counter, 'acc_sml': accessed_links_counter, 'add_mails': emails_amount}
+ 'a_sml': total_links_counter, 'acc_sml': accessed_links_counter, 'add_mails': emails_amount, 'dorking_status': dorking_status}
pdf_report_name = report_folder + '//' + casename
if create_pdf(template_path, pdf_report_name, context):
print(Fore.GREEN + "PDF report for {} case was created at {}".format(''.join(short_domain), report_ctime) + Style.RESET_ALL)
print(Fore.GREEN + f"Scan elapsed time: {end}" + Style.RESET_ALL)
- robots_content, sitemap_content, sitemap_links_content = fp.get_db_columns(report_folder) #, dorking_content was removed here
+ with open(dorking_file_path, 'r') as df:
+ dorking_content = df.read()
+ robots_content, sitemap_content, sitemap_links_content, dorking_content = fp.get_db_columns(report_folder) #, dorking_content was removed here
pdf_blob = fp.get_blob(pdf_report_name)
- db.insert_blob('HTML', pdf_blob, db_casename, db_creation_date, case_comment, robots_content, sitemap_content, sitemap_links_content) #, dorking_content was removed here
+ db.insert_blob('PDF', pdf_blob, db_casename, db_creation_date, case_comment, robots_content, sitemap_content, sitemap_links_content, dorking_content) #, dorking_content was removed here
except Exception as e:
print(Fore.RED + 'Unable to create PDF report. See journal for details')
- logging.error(f'XLSX REPORT CREATION: ERROR. REASON: {e}')
+ logging.error(f'PDF REPORT CREATION: ERROR. REASON: {e}')
From 4028d26b3349b1fc3e1fe38a47d4e94a3107cd8e Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 26 Sep 2024 14:29:42 +0300
Subject: [PATCH 56/72] Fixed bug when 0 found subdomains stops DPULSE when PS
is activated
---
datagather_modules/data_assembler.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/datagather_modules/data_assembler.py b/datagather_modules/data_assembler.py
index 5546664..1d98951 100644
--- a/datagather_modules/data_assembler.py
+++ b/datagather_modules/data_assembler.py
@@ -113,7 +113,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
else:
print(Fore.RED + "Cant start PageSearch because no subdomains were detected")
ps_emails_return = ""
- accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = 'No data was gathered because no subdomains were found'
+ accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = keywords_messages_list = 'No data was gathered because no subdomains were found'
pass
elif pagesearch_flag.lower() == 'si':
print(Fore.LIGHTMAGENTA_EX + "\n[EXTENDED SCAN START: PAGESEARCH SITEMAP INSPECTION]\n" + Style.RESET_ALL)
@@ -163,7 +163,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
print(Fore.LIGHTMAGENTA_EX + "\n[EXTENDED SCAN END: PAGESEARCH]\n" + Style.RESET_ALL)
else:
print(Fore.RED + "Cant start PageSearch because no subdomains were detected")
- accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = 'No data was gathered because no subdomains were found'
+ accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = keywords_messages_list = 'No data was gathered because no subdomains were found'
ps_emails_return = ""
pass
elif pagesearch_flag.lower() == 'si':
@@ -215,7 +215,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
else:
print(Fore.RED + "Cant start PageSearch because no subdomains were detected")
ps_emails_return = ""
- accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = 'No data was gathered because no subdomains were found'
+ accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = keywords_messages_list = 'No data was gathered because no subdomains were found'
pass
elif pagesearch_flag.lower() == 'si':
print(Fore.LIGHTMAGENTA_EX + "\n[EXTENDED SCAN START: PAGESEARCH SITEMAP INSPECTION]\n" + Style.RESET_ALL)
From 06c3154743e34670803030334954cca64ffaf681 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 26 Sep 2024 16:14:42 +0300
Subject: [PATCH 57/72] Fixed wrong keywords search result printing in reports
(PS)
---
datagather_modules/data_assembler.py | 12 +++++++++---
1 file changed, 9 insertions(+), 3 deletions(-)
diff --git a/datagather_modules/data_assembler.py b/datagather_modules/data_assembler.py
index 1d98951..515f330 100644
--- a/datagather_modules/data_assembler.py
+++ b/datagather_modules/data_assembler.py
@@ -108,12 +108,15 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
to_search_array = [subdomains, social_medias, sd_socials]
print(Fore.LIGHTMAGENTA_EX + "\n[EXTENDED SCAN START: PAGESEARCH]\n" + Style.RESET_ALL)
ps_emails_return, accessible_subdomains, emails_amount, files_counter, cookies_counter, api_keys_counter, website_elements_counter, exposed_passwords_counter, keywords_messages_list = normal_search(to_search_array, report_folder, keywords, keywords_flag)
+ if len(keywords_messages_list) == 0:
+ keywords_messages_list = ['No keywords were found']
total_links_counter = accessed_links_counter = 0
print(Fore.LIGHTMAGENTA_EX + "\n[EXTENDED SCAN END: PAGESEARCH]\n" + Style.RESET_ALL)
else:
print(Fore.RED + "Cant start PageSearch because no subdomains were detected")
ps_emails_return = ""
- accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = keywords_messages_list = 'No data was gathered because no subdomains were found'
+ accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = 'No data was gathered because no subdomains were found'
+ keywords_messages_list = ['No data was gathered because no subdomains were found']
pass
elif pagesearch_flag.lower() == 'si':
print(Fore.LIGHTMAGENTA_EX + "\n[EXTENDED SCAN START: PAGESEARCH SITEMAP INSPECTION]\n" + Style.RESET_ALL)
@@ -163,7 +166,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
print(Fore.LIGHTMAGENTA_EX + "\n[EXTENDED SCAN END: PAGESEARCH]\n" + Style.RESET_ALL)
else:
print(Fore.RED + "Cant start PageSearch because no subdomains were detected")
- accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = keywords_messages_list = 'No data was gathered because no subdomains were found'
+ accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = 'No data was gathered because no subdomains were found'
ps_emails_return = ""
pass
elif pagesearch_flag.lower() == 'si':
@@ -211,11 +214,14 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
print(Fore.LIGHTMAGENTA_EX + "\n[EXTENDED SCAN START: PAGESEARCH]\n" + Style.RESET_ALL)
ps_emails_return, accessible_subdomains, emails_amount, files_counter, cookies_counter, api_keys_counter, website_elements_counter, exposed_passwords_counter, keywords_messages_list = normal_search(to_search_array, report_folder, keywords, keywords_flag)
total_links_counter = accessed_links_counter = 0
+ if len(keywords_messages_list) == 0:
+ keywords_messages_list = ['No keywords were found']
print(Fore.LIGHTMAGENTA_EX + "\n[EXTENDED SCAN END: PAGESEARCH]\n" + Style.RESET_ALL)
else:
print(Fore.RED + "Cant start PageSearch because no subdomains were detected")
ps_emails_return = ""
- accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = keywords_messages_list = 'No data was gathered because no subdomains were found'
+ accessible_subdomains = files_counter = cookies_counter = api_keys_counter = website_elements_counter = exposed_passwords_counter = total_links_counter = accessed_links_counter = emails_amount = 'No data was gathered because no subdomains were found'
+ keywords_messages_list = ['No data was gathered because no subdomains were found']
pass
elif pagesearch_flag.lower() == 'si':
print(Fore.LIGHTMAGENTA_EX + "\n[EXTENDED SCAN START: PAGESEARCH SITEMAP INSPECTION]\n" + Style.RESET_ALL)
From 24de157d6ee36e3190c99d7521489d0a1841d6cb Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Thu, 26 Sep 2024 16:23:03 +0300
Subject: [PATCH 58/72] Added possibility to recreate HTML reports from report
storage DB
---
service/db_processing.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/service/db_processing.py b/service/db_processing.py
index 429c966..738e327 100644
--- a/service/db_processing.py
+++ b/service/db_processing.py
@@ -78,6 +78,9 @@ def db_report_recreate(extracted_folder_name, id_to_extract):
elif str(report_file_extension) == 'XLSX':
with open(extracted_folder_name + '//report_extracted.xlsx', 'wb') as file:
file.write(blob_data)
+ elif str(report_file_extension) == 'HTML':
+ with open(extracted_folder_name + '//report_extracted.html', 'wb') as file:
+ file.write(blob_data)
cursor.execute("SELECT dorks_results FROM report_storage WHERE id=?", (id_to_extract,))
dorks_results = (cursor.fetchone())[0]
with open(extracted_folder_name + '//dorks_extracted.txt', 'w') as file:
From 87647479cac266db12b4d2d53d3e98d47b37c557 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Fri, 27 Sep 2024 03:06:35 +0300
Subject: [PATCH 59/72] Added dynamical counter of used dorking queries (#85)
---
dorking/dorking_handler.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/dorking/dorking_handler.py b/dorking/dorking_handler.py
index b217b7e..e4c5a36 100644
--- a/dorking/dorking_handler.py
+++ b/dorking/dorking_handler.py
@@ -52,9 +52,10 @@ def save_results_to_txt(folderpath, queries, pages=10):
try:
txt_writepath = folderpath + '//04-dorking_results.txt'
total_results = []
-
+ total_dorks_amount = len(queries)
with open(txt_writepath, 'w') as f:
print(Fore.GREEN + "Started Google Dorking. Please, be patient, it may take some time")
+ dorked_query_counter = 0
for i, query in enumerate(queries, start=1):
f.write(f"QUERY #{i}: {query}\n")
results = solid_google_dorking(query, pages)
@@ -66,6 +67,9 @@ def save_results_to_txt(folderpath, queries, pages=10):
for result in results:
f.write(f"=> {result}\n")
f.write("\n")
+ dorked_query_counter += 1
+ print(Fore.GREEN + f" Dorking with " + Style.RESET_ALL + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{dorked_query_counter}/{total_dorks_amount}" + Style.RESET_ALL + Fore.GREEN + " dork" + Style.RESET_ALL, end="\r")
+
print(Fore.GREEN + "Google Dorking end. Results successfully saved in TXT file\n" + Style.RESET_ALL)
print(Fore.GREEN + "During Google Dorking:")
From 57bdfd433450ee7eed986ef1d375d964f6c88996 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Tue, 1 Oct 2024 16:34:46 +0300
Subject: [PATCH 60/72] Some cosmetical improvements in GD returns
---
dorking/dorking_handler.py | 8 +++-----
1 file changed, 3 insertions(+), 5 deletions(-)
diff --git a/dorking/dorking_handler.py b/dorking/dorking_handler.py
index e4c5a36..fb74a6c 100644
--- a/dorking/dorking_handler.py
+++ b/dorking/dorking_handler.py
@@ -48,7 +48,7 @@ def solid_google_dorking(query, pages=100):
#return "Google Dorking results file was not created"
-def save_results_to_txt(folderpath, queries, pages=10):
+def save_results_to_txt(folderpath, table, queries, pages=10):
try:
txt_writepath = folderpath + '//04-dorking_results.txt'
total_results = []
@@ -69,15 +69,13 @@ def save_results_to_txt(folderpath, queries, pages=10):
f.write("\n")
dorked_query_counter += 1
print(Fore.GREEN + f" Dorking with " + Style.RESET_ALL + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{dorked_query_counter}/{total_dorks_amount}" + Style.RESET_ALL + Fore.GREEN + " dork" + Style.RESET_ALL, end="\r")
-
-
print(Fore.GREEN + "Google Dorking end. Results successfully saved in TXT file\n" + Style.RESET_ALL)
- print(Fore.GREEN + "During Google Dorking:")
+ print(Fore.GREEN + f"During Google Dorking with {table.upper()}:")
for query, count in total_results:
if count == 0:
count = 'no results'
print(Fore.GREEN + f"[+] Found results for " + Fore.LIGHTCYAN_EX + f'{query}' + Fore.GREEN + ' query: ' + Fore.LIGHTCYAN_EX + f'{count}' + Style.RESET_ALL)
- return 'Successfully dorked domain', txt_writepath
+ return f'Successfully dorked domain with {table.upper()} dorks table', txt_writepath
except Exception:
print(Fore.RED + 'Error appeared while trying to dork target. See journal for details')
return 'Domain dorking failed. See journal for details'
From cc1d5b910c30b4e749825e18a71a8432be3236be Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Tue, 1 Oct 2024 16:42:23 +0300
Subject: [PATCH 61/72] Delete dorking/iot_dorking.db
---
dorking/iot_dorking.db | Bin 20480 -> 0 bytes
1 file changed, 0 insertions(+), 0 deletions(-)
delete mode 100644 dorking/iot_dorking.db
diff --git a/dorking/iot_dorking.db b/dorking/iot_dorking.db
deleted file mode 100644
index 3a1d4c954b9a5d6488714bb946218042a9e63344..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001
literal 20480
zcmeI4J!~UI6vt;g{#qwH&q+3MauRsuhD3MA-JY?|Nku>_6fWoFf+La2+Sq%rBKC5-
z>*GsT+?7`W#YrG4BoGw^Pyq!ER7V5NRaA(t3K~R#1T(RBcI_*mN)f%0_j%|4=Di*N
z}AOR$R1dsp{Kmter2_OL^fCP{L5;!KXo6T3wo}~wob9m&2H;%+DYA1AW
z4BbI@dYXHB&)#g?W_$CgZQGn$yE<_C?m1DpcKb0}s8p-;&LhV?JwNL7{NSZfon$8Z
z)RbN`SM`!slZ!iEucmFa+V(T{p4r-Io2~urZF9f1b#3217cZ)5ZToqVdN{qiw{>-M
z@4&obAFQf==H`BTXR9SHe${TZ*Ywhw#qt%qN{KhFYlh-xeB+H<#sVW3Mk(iI
zFIFq_j5J1S=ng_ZP>DH}=!$#tz=;z}Wk%|wikn`~^<%vPsvwL4*XhTJWzNXuQ9<7y
zcu{hVWnE=LH@N9_)n+TRsb)?z3{|rkRnqkbQQ#lNnMz5O^xSl>B~5L(=N=AUhzpjA
zO1CbG5$lTuMy%0(*VA%d$&Hk}kYnWhXm!(yTNqizy3u$dRu`G-&}=sAO3orj85v6@88uDDVo}CILB@Pu##~OuY*t3bWXxn3)3oFJe_kbU1-^&Z;Y0BR;4SzP
zx_pCsK%fq1c$I(3clo#cXZ|j~bAmwdQb+&^AOR$R1dsp{Kmter2_OL^aB>9Buys0e
zqaX7B7ukhzD&_w#EVIT~o%H|blfVane_il$%ZHVe=X4`{Qu$-v&Q>P`~QVRPW%6PgPk9*PW=B|!aC*u
zXOj+T|DPpt+W*fiGD{o(ups6C%isS~_=AWS4kUmCkN^@u0!RP}AOR$R1dsp{KmthM
Ov=GoKt5Wqp0LH&TS=^%l
From c3a3835a84fc4da4599ceaea43468ef4516e36f9 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Tue, 1 Oct 2024 09:43:05 -0400
Subject: [PATCH 62/72] Fixed ports dorking
---
dorking/iot_dorking.db | Bin 0 -> 20480 bytes
1 file changed, 0 insertions(+), 0 deletions(-)
create mode 100644 dorking/iot_dorking.db
diff --git a/dorking/iot_dorking.db b/dorking/iot_dorking.db
new file mode 100644
index 0000000000000000000000000000000000000000..6328bd0182598d66748fa3262a6792832b0b954e
GIT binary patch
literal 20480
zcmeI4KWyVv9LJyioIg(M_eAWpxh`dj;b{rkO_
zPrJBZt8d!eATd%epYv$h7f!p`4@XEE57nPh@sVEPgzv$@?N8R2qcqf!6
zYO0Sd=WT1%DCoAhx$F0AeY@RpUU&Aa_D;uY@3&gketY}IzH=qI$<{m0Tiklm-rd{2
zzO{E?z2O|J%6`_?erIR9%@@D!v^#4?VXaEDrc+UfKj?Y)rk}?hkA@f5UAeFBHF>I4
z)qht~l_iv8&I{BAKT1qfnFi-@2B+`|oWK#Z;2Ny3zt}lDV_&k5*a>@^?XXSuf~diP
z1dsp{Kmter2_OL^fCP{L50c
zgZ^DN@FJ_UM9I~O<-Y4jv*sct^~r4L4Z>j{jRk2O@@MkEjf~|YCACS#9lz%dqj?i#
zK{yILw;vg0CU@H(4*XHPM%j=~=mmHDL%CUVKGDn_jYHXNPL>=F2cuwk964q|mh`-2
zuVr0sxaZv*A4LU(tmGvvyIEc5LxL>l|^%eDLx>UA^{|T1dsp{Kmter2_OL^fCP}he@>uG
zH}pvcKl1{>CAz6hj3@H{=jhd`mB{~>%CtV6P3QlMv60UI7ff24R;2R(VqB2U|6zr0
zObcf6e-=A4`M;5DmdyX>RmVCo!zq_&*;nGZX*kl1?iA&&KPd
z;{Qy%v2^^eC0nK9f0}es@xLnG|5xA-!oP4J0VIF~kN^@u0!RP}AOR$R1dsp{KmyMS
N0Yjk`MgAXv(%<7Q
Date: Tue, 1 Oct 2024 17:02:32 +0300
Subject: [PATCH 63/72] Added function to get columns amount in dorks db
Added return of dorks amount in DB in pre-scan summary
---
dorking/dorking_handler.py | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/dorking/dorking_handler.py b/dorking/dorking_handler.py
index fb74a6c..dce6198 100644
--- a/dorking/dorking_handler.py
+++ b/dorking/dorking_handler.py
@@ -22,6 +22,15 @@ def get_dorking_query(short_domain, dorking_db_path, table):
conn.close()
return search_query
+def get_columns_amount(dorking_db_path, table):
+ conn = sqlite3.connect(dorking_db_path)
+ cursor = conn.cursor()
+ cursor.execute(f"SELECT COUNT(*) FROM {table}")
+ row_count = cursor.fetchone()[0]
+ conn.close()
+ return row_count
+
+
def solid_google_dorking(query, pages=100):
try:
browser = mechanicalsoup.StatefulBrowser()
From b233c31b19ea780705dbc25de7dfb32255d4e0d7 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Tue, 1 Oct 2024 17:03:01 +0300
Subject: [PATCH 64/72] Added dorking amount output in pre-scan summary
---
dpulse.py | 11 +++++++----
1 file changed, 7 insertions(+), 4 deletions(-)
diff --git a/dpulse.py b/dpulse.py
index 09d0bc2..a5cac2d 100644
--- a/dpulse.py
+++ b/dpulse.py
@@ -9,7 +9,7 @@
from config_processing import create_config, check_cfg_presence, read_config
import db_processing as db
import os
-from dorking_handler import dorks_files_check
+from dorking_handler import dorks_files_check, get_columns_amount
db.db_creation('report_storage.db')
dorks_files_check()
@@ -134,13 +134,16 @@ def run():
break
else:
if dorking_flag.lower() == 'basic':
- dorking_ui_mark = 'Yes, Basic dorking (N dorks)'
+ row_count = get_columns_amount('dorking//basic_dorking.db', 'basic_dorks')
+ dorking_ui_mark = f'Yes, Basic dorking ({row_count} dorks)'
elif dorking_flag.lower() == 'iot':
- dorking_ui_mark = 'Yes, IoT dorking (N dorks)'
+ row_count = get_columns_amount('dorking//iot_dorking.db', 'iot_dorks')
+ dorking_ui_mark = f'Yes, IoT dorking ({row_count} dorks)'
elif dorking_flag.lower() == 'none':
dorking_ui_mark = 'No'
elif dorking_flag.lower() == 'files':
- dorking_ui_mark = 'Yes, Files dorking (N dorks)'
+ row_count = get_columns_amount('dorking//files_dorking.db', 'files_dorks')
+ dorking_ui_mark = f'Yes, Files dorking ({row_count} dorks)'
print(Fore.LIGHTMAGENTA_EX + "\n[PRE-SCAN SUMMARY]\n" + Style.RESET_ALL)
print(Fore.GREEN + "Determined target: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + short_domain + Style.RESET_ALL)
print(Fore.GREEN + "Report type: " + Fore.LIGHTCYAN_EX + Style.BRIGHT + report_filetype.lower() + Style.RESET_ALL)
From 7e478b31111f7f49ab5ccddf72789a47b986cabd Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Tue, 1 Oct 2024 17:33:58 +0300
Subject: [PATCH 65/72] Code clean-up and some exceptions fix
---
dorking/dorking_handler.py | 6 +-----
1 file changed, 1 insertion(+), 5 deletions(-)
diff --git a/dorking/dorking_handler.py b/dorking/dorking_handler.py
index dce6198..a421bd7 100644
--- a/dorking/dorking_handler.py
+++ b/dorking/dorking_handler.py
@@ -30,7 +30,6 @@ def get_columns_amount(dorking_db_path, table):
conn.close()
return row_count
-
def solid_google_dorking(query, pages=100):
try:
browser = mechanicalsoup.StatefulBrowser()
@@ -54,8 +53,6 @@ def solid_google_dorking(query, pages=100):
return result_query
except requests.exceptions.ConnectionError as e:
print(Fore.RED + "Error while establishing connection with domain. No results will appear. Reason: {}".format(e) + Style.RESET_ALL)
- #return "Google Dorking results file was not created"
-
def save_results_to_txt(folderpath, table, queries, pages=10):
try:
@@ -87,7 +84,7 @@ def save_results_to_txt(folderpath, table, queries, pages=10):
return f'Successfully dorked domain with {table.upper()} dorks table', txt_writepath
except Exception:
print(Fore.RED + 'Error appeared while trying to dork target. See journal for details')
- return 'Domain dorking failed. See journal for details'
+ return 'Domain dorking failed. See journal for details', txt_writepath
def transfer_results_to_xlsx(queries, pages=10):
dorking_return_list = []
@@ -112,7 +109,6 @@ def dorks_files_check():
dorks_files_counter += 1
else:
pass
-
if dorks_files_counter == 3:
print(Fore.GREEN + "Dorks databases presence: OK" + Style.RESET_ALL)
else:
From 04a8e31cae0fc4dd6cde4ce407921daeaf9759df Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Tue, 1 Oct 2024 20:54:23 +0300
Subject: [PATCH 66/72] Reactivated dorking results sheet in XLSX report
---
dorking/dorking_handler.py | 10 ++++++++--
1 file changed, 8 insertions(+), 2 deletions(-)
diff --git a/dorking/dorking_handler.py b/dorking/dorking_handler.py
index a421bd7..4c6bbdd 100644
--- a/dorking/dorking_handler.py
+++ b/dorking/dorking_handler.py
@@ -86,7 +86,10 @@ def save_results_to_txt(folderpath, table, queries, pages=10):
print(Fore.RED + 'Error appeared while trying to dork target. See journal for details')
return 'Domain dorking failed. See journal for details', txt_writepath
-def transfer_results_to_xlsx(queries, pages=10):
+def transfer_results_to_xlsx(table, queries, pages=10):
+ print(Fore.GREEN + "Started Google Dorking. Please, be patient, it may take some time")
+ dorked_query_counter = 0
+ total_dorks_amount = len(queries)
dorking_return_list = []
for i, query in enumerate(queries, start=1):
dorking_return_list.append(f"QUERY #{i}: {query}\n")
@@ -96,8 +99,11 @@ def transfer_results_to_xlsx(queries, pages=10):
else:
for result in results:
dorking_return_list.append(f"{result}\n")
+ dorked_query_counter += 1
dorking_return_list.append("\n")
- #return "File with gathered links was successfully created", dorking_return_list
+ print(Fore.GREEN + f" Dorking with " + Style.RESET_ALL + Fore.LIGHTCYAN_EX + Style.BRIGHT + f"{dorked_query_counter}/{total_dorks_amount}" + Style.RESET_ALL + Fore.GREEN + " dork" + Style.RESET_ALL, end="\r")
+ print(Fore.GREEN + "Google Dorking end. Results successfully saved in XLSX report\n" + Style.RESET_ALL)
+ return f'Successfully dorked domain with {table.upper()} dorks table', dorking_return_list
def dorks_files_check():
dorks_path = 'dorking//'
From 59bce2dc2591d6dc96bfe2034993d16ee92c99d7 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Tue, 1 Oct 2024 20:54:47 +0300
Subject: [PATCH 67/72] Reactivated dorking results sheet in XLSX report
---
datagather_modules/data_assembler.py | 22 +++++++++++-----------
1 file changed, 11 insertions(+), 11 deletions(-)
diff --git a/datagather_modules/data_assembler.py b/datagather_modules/data_assembler.py
index 515f330..bf0d4b1 100644
--- a/datagather_modules/data_assembler.py
+++ b/datagather_modules/data_assembler.py
@@ -135,17 +135,17 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
elif dorking_flag == 'basic':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'iot':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'files':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
@@ -182,21 +182,21 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
if dorking_flag == 'none':
pass
dorking_status = 'Google Dorking mode was not selected for this scan'
- dorking_file_path = 'Google Dorking mode was not selected for this scan'
+ dorking_results = 'Google Dorking mode was not selected for this scan'
elif dorking_flag == 'basic':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_results = dp.transfer_results_to_xlsx(table, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'iot':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_results = dp.transfer_results_to_xlsx(table, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'files':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_results = dp.transfer_results_to_xlsx(table, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
@@ -205,7 +205,7 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
web_servers, cms, programming_languages, web_frameworks, analytics, javascript_frameworks, ports,
hostnames, cpes, tags, vulns, common_socials, total_socials, ps_emails_return,
accessible_subdomains, emails_amount, files_counter, cookies_counter, api_keys_counter,
- website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter, dorking_status, dorking_file_path]
+ website_elements_counter, exposed_passwords_counter, total_links_counter, accessed_links_counter, dorking_status, dorking_results]
elif report_file_type == 'html':
if pagesearch_flag.lower() == 'y':
@@ -240,17 +240,17 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
elif dorking_flag == 'basic':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'iot':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
elif dorking_flag == 'files':
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
- dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, dp.get_dorking_query(short_domain, dorking_db_path, table))
+ dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
From 8977ebdf94c8d3c84568dc90d34c4735f7a86036 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Tue, 1 Oct 2024 20:55:06 +0300
Subject: [PATCH 68/72] Reactivated dorking results sheet in XLSX report
---
reporting_modules/xlsx_report_creation.py | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/reporting_modules/xlsx_report_creation.py b/reporting_modules/xlsx_report_creation.py
index e1b9551..c0bc6ba 100644
--- a/reporting_modules/xlsx_report_creation.py
+++ b/reporting_modules/xlsx_report_creation.py
@@ -46,7 +46,7 @@ def create_report(short_domain, url, case_comment, data_array, report_info_array
cpes = data_array[27]
tags = data_array[28]
vulns = data_array[29]
- #dorking_status = data_array[30]
+ dorking_status = data_array[42]
common_socials = data_array[30]
total_socials = data_array[31]
ps_emails_return = data_array[32]
@@ -60,7 +60,7 @@ def create_report(short_domain, url, case_comment, data_array, report_info_array
total_links_counter = data_array[40]
accessed_links_counter = data_array[41]
#keywords_messages_list = data_array[42]
- #dorking_results = data_array[43]
+ dorking_results = data_array[43]
casename = report_info_array[0]
db_casename = report_info_array[1]
db_creation_date = report_info_array[2]
@@ -314,8 +314,8 @@ def create_report(short_domain, url, case_comment, data_array, report_info_array
ws = wb['DORKING RESULTS']
ws.column_dimensions['A'].width = 80
- #for i in range(len(dorking_results)):
- #ws[f"A{i + 1}"] = str(dorking_results[i])
+ for i in range(len(dorking_results)):
+ ws[f"A{i + 1}"] = str(dorking_results[i])
report_file = report_folder + "//" + casename
wb.save(report_file)
From 5e9d74f20ca049c35e207ebb2e20d48693880640 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Tue, 1 Oct 2024 21:04:17 +0300
Subject: [PATCH 69/72] Reactivated Google Dorking status field in XLSX report
---
reporting_modules/xlsx_report_creation.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/reporting_modules/xlsx_report_creation.py b/reporting_modules/xlsx_report_creation.py
index c0bc6ba..7d3af94 100644
--- a/reporting_modules/xlsx_report_creation.py
+++ b/reporting_modules/xlsx_report_creation.py
@@ -150,7 +150,7 @@ def create_report(short_domain, url, case_comment, data_array, report_info_array
ws['B3'] = robots_txt_result
ws['B4'] = sitemap_xml_result
ws['B5'] = sitemap_links_status
- #ws['B6'] = dorking_status
+ ws['B6'] = dorking_status
ws['B7'] = pagesearch_ui_mark
ws['B8'] = report_ctime
From 661c4764d09b2f942b86298385c8f6dc95671818 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Tue, 1 Oct 2024 21:07:56 +0300
Subject: [PATCH 70/72] Updated README.md
---
README.md | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/README.md b/README.md
index c1170b2..8490c51 100644
--- a/README.md
+++ b/README.md
@@ -9,13 +9,14 @@
-
-
+
+
-
+
+
> DPULSE was created as a research tool, and it is not intended for criminal activities. Use DPULSE only on allowed domains and for legal purposes!
> You can visit [DPULSE wiki](https://github.com/OSINT-TECHNOLOGIES/dpulse/wiki/DPULSE-WIKI) in order to get more technical information about this project
From a0094b7840a05a0c89029b0c6cce1d0c772fe2fa Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Tue, 1 Oct 2024 21:09:27 +0300
Subject: [PATCH 71/72] Update poetry.lock for v1.1.1
---
poetry.lock | 49 +++++++++++++++++--------------------------------
1 file changed, 17 insertions(+), 32 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 82810ec..f2f0bd9 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -993,13 +993,13 @@ files = [
[[package]]
name = "pypdf"
-version = "4.3.1"
+version = "5.0.1"
description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
files = [
- {file = "pypdf-4.3.1-py3-none-any.whl", hash = "sha256:64b31da97eda0771ef22edb1bfecd5deee4b72c3d1736b7df2689805076d6418"},
- {file = "pypdf-4.3.1.tar.gz", hash = "sha256:b2f37fe9a3030aa97ca86067a56ba3f9d3565f9a791b305c7355d8392c30d91b"},
+ {file = "pypdf-5.0.1-py3-none-any.whl", hash = "sha256:ff8a32da6c7a63fea9c32fa4dd837cdd0db7966adf6c14f043e3f12592e992db"},
+ {file = "pypdf-5.0.1.tar.gz", hash = "sha256:a361c3c372b4a659f9c8dd438d5ce29a753c79c620dc6e1fd66977651f5547ea"},
]
[package.dependencies]
@@ -1012,17 +1012,6 @@ docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"]
full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"]
image = ["Pillow (>=8.0.0)"]
-[[package]]
-name = "pypng"
-version = "0.20220715.0"
-description = "Pure Python library for saving and loading PNG images"
-optional = false
-python-versions = "*"
-files = [
- {file = "pypng-0.20220715.0-py3-none-any.whl", hash = "sha256:4a43e969b8f5aaafb2a415536c1a8ec7e341cd6a3f957fd5b5f32a4cfeed902c"},
- {file = "pypng-0.20220715.0.tar.gz", hash = "sha256:739c433ba96f078315de54c0db975aee537cbc3e1d0ae4ed9aab0ca1e427e2c1"},
-]
-
[[package]]
name = "python-bidi"
version = "0.6.0"
@@ -1244,36 +1233,32 @@ files = [
[[package]]
name = "qrcode"
-version = "7.4.2"
+version = "8.0"
description = "QR Code image generator"
optional = false
-python-versions = ">=3.7"
+python-versions = "<4.0,>=3.9"
files = [
- {file = "qrcode-7.4.2-py3-none-any.whl", hash = "sha256:581dca7a029bcb2deef5d01068e39093e80ef00b4a61098a2182eac59d01643a"},
- {file = "qrcode-7.4.2.tar.gz", hash = "sha256:9dd969454827e127dbd93696b20747239e6d540e082937c90f14ac95b30f5845"},
+ {file = "qrcode-8.0-py3-none-any.whl", hash = "sha256:9fc05f03305ad27a709eb742cf3097fa19e6f6f93bb9e2f039c0979190f6f1b1"},
+ {file = "qrcode-8.0.tar.gz", hash = "sha256:025ce2b150f7fe4296d116ee9bad455a6643ab4f6e7dce541613a4758cbce347"},
]
[package.dependencies]
-colorama = {version = "*", markers = "platform_system == \"Windows\""}
-pypng = "*"
-typing-extensions = "*"
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
[package.extras]
-all = ["pillow (>=9.1.0)", "pytest", "pytest-cov", "tox", "zest.releaser[recommended]"]
-dev = ["pytest", "pytest-cov", "tox"]
-maintainer = ["zest.releaser[recommended]"]
+all = ["pillow (>=9.1.0)", "pypng"]
pil = ["pillow (>=9.1.0)"]
-test = ["coverage", "pytest"]
+png = ["pypng"]
[[package]]
name = "reportlab"
-version = "4.2.2"
+version = "4.2.5"
description = "The Reportlab Toolkit"
optional = false
python-versions = "<4,>=3.7"
files = [
- {file = "reportlab-4.2.2-py3-none-any.whl", hash = "sha256:927616931637e2f13e2ee3b3b6316d7a07803170e258621cff7d138bde17fbb5"},
- {file = "reportlab-4.2.2.tar.gz", hash = "sha256:765eecbdd68491c56947e29c38b8b69b834ee5dbbdd2fb7409f08ebdebf04428"},
+ {file = "reportlab-4.2.5-py3-none-any.whl", hash = "sha256:eb2745525a982d9880babb991619e97ac3f661fae30571b7d50387026ca765ee"},
+ {file = "reportlab-4.2.5.tar.gz", hash = "sha256:5cf35b8fd609b68080ac7bbb0ae1e376104f7d5f7b2d3914c7adc63f2593941f"},
]
[package.dependencies]
@@ -1393,13 +1378,13 @@ files = [
[[package]]
name = "tzdata"
-version = "2024.1"
+version = "2024.2"
description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
files = [
- {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
- {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
+ {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"},
+ {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"},
]
[[package]]
From b1f25566980ca32a5a7afdb60e05452d7f75f585 Mon Sep 17 00:00:00 2001
From: OSINT-TECHNOLOGIES
<77023667+OSINT-TECHNOLOGIES@users.noreply.github.com>
Date: Tue, 1 Oct 2024 21:13:30 +0300
Subject: [PATCH 72/72] Backuped README.md
---
README.md | 12 ++++--------
1 file changed, 4 insertions(+), 8 deletions(-)
diff --git a/README.md b/README.md
index 8490c51..19a2c92 100644
--- a/README.md
+++ b/README.md
@@ -9,14 +9,13 @@
-
+
+
-
-
+
-
> DPULSE was created as a research tool, and it is not intended for criminal activities. Use DPULSE only on allowed domains and for legal purposes!
> You can visit [DPULSE wiki](https://github.com/OSINT-TECHNOLOGIES/dpulse/wiki/DPULSE-WIKI) in order to get more technical information about this project
@@ -142,7 +141,7 @@ If you have problems with starting installer.sh, you should try to use `dos2unix
# Tasks to complete before new release
-- [x] Rework Google Dorking module in separate mode
+- [ ] Rework Google Dorking module in separate mode
- [ ] Rework Google Dorks list into separate databases with different pre-configured dorks for various purposes
- [ ] Allow user to create their own dorks DB
- [ ] Add separate API search mode with different free APIs
@@ -191,9 +190,6 @@ If you have problems with starting installer.sh, you should try to use `dos2unix
### [by C.I.T Security](https://t.me/citsecurity/8578)
-### [by Adityaa_oky](https://t.me/adityaa_oky/960)
-
-### [by Реальний OSINT](https://t.me/realOSINT/462)