Skip to content

Commit 9010620

Browse files
Merge pull request #90 from OSINT-TECHNOLOGIES/rolling
Stabilized v1.1.2
2 parents 8ac0538 + da06773 commit 9010620

16 files changed

+436
-329
lines changed

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,8 +143,8 @@ If you have problems with starting installer.sh, you should try to use `dos2unix
143143

144144
# Tasks to complete before new release
145145
- [x] Rework Google Dorking module in separate mode
146-
- [ ] Rework Google Dorks list into separate databases with different pre-configured dorks for various purposes
147-
- [ ] Allow user to create their own dorks DB
146+
- [x] Rework Google Dorks list into separate databases with different pre-configured dorks for various purposes
147+
- [x] Allow user to create their own dorks DB
148148
- [ ] Add separate API search mode with different free APIs
149149

150150
# DPULSE mentions in social medias

datagather_modules/crawl_processor.py

Lines changed: 23 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -113,25 +113,27 @@ def sm_gather(url):
113113
'VKontakte': [], 'YouTube': [], 'Odnoklassniki': [], 'WeChat': []}
114114

115115
for link in links:
116-
if 'facebook.com' in link:
116+
parsed_url = urlparse(link)
117+
hostname = parsed_url.hostname
118+
if hostname and hostname.endswith('facebook.com'):
117119
categorized_links['Facebook'].append(urllib.parse.unquote(link))
118-
elif 'twitter.com' in link:
120+
elif hostname and hostname.endswith('twitter.com'):
119121
categorized_links['Twitter'].append(urllib.parse.unquote(link))
120-
elif 'instagram.com' in link:
122+
elif hostname and hostname.endswith('instagram.com'):
121123
categorized_links['Instagram'].append(urllib.parse.unquote(link))
122-
elif 't.me' in link:
124+
elif hostname and hostname.endswith('t.me'):
123125
categorized_links['Telegram'].append(urllib.parse.unquote(link))
124-
elif 'tiktok.com' in link:
126+
elif hostname and hostname.endswith('tiktok.com'):
125127
categorized_links['TikTok'].append(urllib.parse.unquote(link))
126-
elif 'linkedin.com' in link:
128+
elif hostname and hostname.endswith('linkedin.com'):
127129
categorized_links['LinkedIn'].append(urllib.parse.unquote(link))
128-
elif 'vk.com' in link:
130+
elif hostname and hostname.endswith('vk.com'):
129131
categorized_links['VKontakte'].append(urllib.parse.unquote(link))
130-
elif 'youtube.com' in link:
132+
elif hostname and hostname.endswith('youtube.com'):
131133
categorized_links['YouTube'].append(urllib.parse.unquote(link))
132-
elif 'wechat.com' in link:
134+
elif hostname and hostname.endswith('wechat.com'):
133135
categorized_links['WeChat'].append(urllib.parse.unquote(link))
134-
elif 'ok.ru' in link:
136+
elif hostname and hostname.endswith('ok.ru'):
135137
categorized_links['Odnoklassniki'].append(urllib.parse.unquote(link))
136138

137139
if not categorized_links['Odnoklassniki']:
@@ -211,25 +213,26 @@ def domains_reverse_research(subdomains, report_file_type):
211213

212214
for inner_list in subdomain_socials_grouped:
213215
for link in inner_list:
214-
if 'facebook.com' in link:
216+
hostname = urlparse(link).hostname
217+
if hostname and hostname.endswith('facebook.com'):
215218
sd_socials['Facebook'].append(urllib.parse.unquote(link))
216-
elif 'twitter.com' in link:
219+
elif hostname and hostname.endswith('twitter.com'):
217220
sd_socials['Twitter'].append(urllib.parse.unquote(link))
218-
elif 'instagram.com' in link:
221+
elif hostname and hostname.endswith('instagram.com'):
219222
sd_socials['Instagram'].append(urllib.parse.unquote(link))
220-
elif 't.me' in link:
223+
elif hostname and hostname.endswith('t.me'):
221224
sd_socials['Telegram'].append(urllib.parse.unquote(link))
222-
elif 'tiktok.com' in link:
225+
elif hostname and hostname.endswith('tiktok.com'):
223226
sd_socials['TikTok'].append(urllib.parse.unquote(link))
224-
elif 'linkedin.com' in link:
227+
elif hostname and hostname.endswith('linkedin.com'):
225228
sd_socials['LinkedIn'].append(urllib.parse.unquote(link))
226-
elif 'vk.com' in link:
229+
elif hostname and hostname.endswith('vk.com'):
227230
sd_socials['VKontakte'].append(urllib.parse.unquote(link))
228-
elif 'youtube.com' in link:
231+
elif hostname and hostname.endswith('youtube.com'):
229232
sd_socials['YouTube'].append(urllib.parse.unquote(link))
230-
elif 'wechat.com' in link:
233+
elif hostname and hostname.endswith('wechat.com'):
231234
sd_socials['WeChat'].append(urllib.parse.unquote(link))
232-
elif 'ok.ru' in link:
235+
elif hostname and hostname.endswith('ok.ru'):
233236
sd_socials['Odnoklassniki'].append(urllib.parse.unquote(link))
234237

235238
sd_socials = {k: list(set(v)) for k, v in sd_socials.items()}

datagather_modules/data_assembler.py

Lines changed: 34 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -22,36 +22,46 @@
2222
sys.exit()
2323

2424
def establishing_dork_db_connection(dorking_flag):
25-
if dorking_flag == 'basic':
26-
dorking_db_path = 'dorking//basic_dorking.db'
27-
table = 'basic_dorks'
28-
elif dorking_flag == 'iot':
29-
dorking_db_path = 'dorking//iot_dorking.db'
30-
table = 'iot_dorks'
31-
elif dorking_flag == 'files':
32-
dorking_db_path = 'dorking//files_dorking.db'
33-
table = 'files_dorks'
25+
dorking_db_paths = {
26+
'basic': 'dorking//basic_dorking.db',
27+
'iot': 'dorking//iot_dorking.db',
28+
'files': 'dorking//files_dorking.db',
29+
'admins': 'dorking//adminpanels_dorking.db',
30+
'web': 'dorking//webstructure_dorking.db',
31+
}
32+
dorking_tables = {
33+
'basic': 'basic_dorks',
34+
'iot': 'iot_dorks',
35+
'files': 'files_dorks',
36+
'admins': 'admins_dorks',
37+
'web': 'web_dorks',
38+
}
39+
if dorking_flag in dorking_db_paths:
40+
dorking_db_path = dorking_db_paths[dorking_flag]
41+
table = dorking_tables[dorking_flag]
42+
elif dorking_flag.startswith('custom'):
43+
lst = dorking_flag.split('+')
44+
dorking_db_name = lst[1]
45+
dorking_db_path = 'dorking//' + dorking_db_name
46+
table = 'dorks'
47+
else:
48+
raise ValueError(f"Invalid dorking flag: {dorking_flag}")
3449
return dorking_db_path, table
3550

3651
class DataProcessing():
3752
def report_preprocessing(self, short_domain, report_file_type):
3853
report_ctime = datetime.now().strftime('%d-%m-%Y, %H:%M:%S')
3954
files_ctime = datetime.now().strftime('(%d-%m-%Y, %Hh%Mm%Ss)')
4055
files_body = short_domain.replace(".", "") + '_' + files_ctime
41-
if report_file_type == 'pdf':
42-
casename = files_body + '.pdf'
43-
elif report_file_type == 'xlsx':
44-
casename = files_body + '.xlsx'
45-
elif report_file_type == 'html':
46-
casename = files_body + '.html'
56+
casename = f"{files_body}.{report_file_type}"
4757
foldername = files_body
4858
db_casename = short_domain.replace(".", "")
4959
now = datetime.now()
5060
db_creation_date = str(now.year) + str(now.month) + str(now.day)
51-
report_folder = "report_{}".format(foldername)
52-
robots_filepath = report_folder + '//01-robots.txt'
53-
sitemap_filepath = report_folder + '//02-sitemap.txt'
54-
sitemap_links_filepath = report_folder + '//03-sitemap_links.txt'
61+
report_folder = f"report_{foldername}"
62+
robots_filepath = os.path.join(report_folder, '01-robots.txt')
63+
sitemap_filepath = os.path.join(report_folder, '02-sitemap.txt')
64+
sitemap_links_filepath = os.path.join(report_folder, '03-sitemap_links.txt')
5565
os.makedirs(report_folder, exist_ok=True)
5666
return casename, db_casename, db_creation_date, robots_filepath, sitemap_filepath, sitemap_links_filepath, report_file_type, report_folder, files_ctime, report_ctime
5767

@@ -129,20 +139,9 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
129139
pass
130140

131141
if dorking_flag == 'none':
132-
pass
133142
dorking_status = 'Google Dorking mode was not selected for this scan'
134143
dorking_file_path = 'Google Dorking mode was not selected for this scan'
135-
elif dorking_flag == 'basic':
136-
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
137-
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
138-
dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
139-
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
140-
elif dorking_flag == 'iot':
141-
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
142-
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
143-
dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
144-
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
145-
elif dorking_flag == 'files':
144+
else:
146145
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
147146
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
148147
dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
@@ -180,23 +179,12 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
180179
pass
181180

182181
if dorking_flag == 'none':
183-
pass
184182
dorking_status = 'Google Dorking mode was not selected for this scan'
185-
dorking_results = 'Google Dorking mode was not selected for this scan'
186-
elif dorking_flag == 'basic':
187-
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
188-
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
189-
dorking_status, dorking_results = dp.transfer_results_to_xlsx(table, dp.get_dorking_query(short_domain, dorking_db_path, table))
190-
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
191-
elif dorking_flag == 'iot':
192-
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
193-
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
194-
dorking_status, dorking_results = dp.transfer_results_to_xlsx(table, dp.get_dorking_query(short_domain, dorking_db_path, table))
195-
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
196-
elif dorking_flag == 'files':
183+
dorking_file_path = 'Google Dorking mode was not selected for this scan'
184+
else:
197185
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
198186
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
199-
dorking_status, dorking_results = dp.transfer_results_to_xlsx(table, dp.get_dorking_query(short_domain, dorking_db_path, table))
187+
dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
200188
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
201189

202190
data_array = [ip, res, mails, subdomains, subdomains_amount, social_medias, subdomain_mails, sd_socials,
@@ -234,20 +222,9 @@ def data_gathering(self, short_domain, url, report_file_type, pagesearch_flag, k
234222
pass
235223

236224
if dorking_flag == 'none':
237-
pass
238225
dorking_status = 'Google Dorking mode was not selected for this scan'
239226
dorking_file_path = 'Google Dorking mode was not selected for this scan'
240-
elif dorking_flag == 'basic':
241-
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
242-
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
243-
dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
244-
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
245-
elif dorking_flag == 'iot':
246-
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
247-
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
248-
dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))
249-
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN END: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
250-
elif dorking_flag == 'files':
227+
else:
251228
dorking_db_path, table = establishing_dork_db_connection(dorking_flag.lower())
252229
print(Fore.LIGHTMAGENTA_EX + f"\n[EXTENDED SCAN START: {dorking_flag.upper()} DORKING]\n" + Style.RESET_ALL)
253230
dorking_status, dorking_file_path = dp.save_results_to_txt(report_folder, table, dp.get_dorking_query(short_domain, dorking_db_path, table))

datagather_modules/networking_processor.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ def get_ssl_certificate(short_domain, port=443):
4040
try:
4141
logging.info('SSL CERTIFICATE GATHERING: OK')
4242
context = ssl.create_default_context()
43+
context.minimum_version = ssl.TLSVersion.TLSv1_2
4344
conn = socket.create_connection((short_domain, port))
4445
sock = context.wrap_socket(conn, server_hostname=short_domain)
4546
cert = sock.getpeercert()

dorking/adminpanels_dorking.db

28 KB
Binary file not shown.

dorking/basic_dorking.db

0 Bytes
Binary file not shown.

dorking/db_creator.py

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
import sqlite3
2+
from colorama import Fore
3+
import os
4+
5+
def manage_dorks(db_name):
6+
db_prep_string = str(db_name) + '.db'
7+
if os.path.exists('dorking//' + db_prep_string):
8+
print(Fore.RED + f"Sorry, but {db_prep_string} database is already exists. Choose other name for your custom DB")
9+
pass
10+
else:
11+
conn = sqlite3.connect('dorking//' + str(db_prep_string))
12+
cursor = conn.cursor()
13+
14+
cursor.execute('''
15+
CREATE TABLE IF NOT EXISTS dorks (
16+
dork_id INTEGER PRIMARY KEY,
17+
dork TEXT NOT NULL
18+
)
19+
''')
20+
conn.commit()
21+
22+
def add_dork(dork_id, dork):
23+
try:
24+
cursor.execute('INSERT INTO dorks (dork_id, dork) VALUES (?, ?)', (dork_id, dork))
25+
conn.commit()
26+
print(Fore.GREEN + "Successfully added new dork")
27+
except sqlite3.IntegrityError:
28+
print(Fore.RED + "Attention, dork_id variable must be unique")
29+
30+
while True:
31+
dork_id = input(Fore.YELLOW + "Enter dork_id (or 'q' to quit this mode and save changes) >> ")
32+
if dork_id.lower() == 'q':
33+
break
34+
dork = input(Fore.YELLOW + "Enter new dork >> ")
35+
add_dork(int(dork_id), dork)
36+
conn.close()

0 commit comments

Comments
 (0)