Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Lucksi
GitHub Repository: Lucksi/Mr.Holmes
Path: blob/master/Core/Searcher.py
1071 views
1
# ORIGINAL CREATOR: Luca Garofalo (Lucksi)
2
# AUTHOR: Luca Garofalo (Lucksi)
3
# Copyright (C) 2021-2024 Lucksi <[email protected]>
4
# License: GNU General Public License v3.0
5
6
import os
7
import urllib
8
import json
9
from Core.Support import Font
10
from Core.Support import Creds
11
from Core.Support import FileTransfer
12
from Core.Support import Proxies
13
from Core.Support import Requests_Search
14
from Core.Support.Username import Scraper
15
from Core.Support import Clear
16
from Core.Support import Dorks
17
from Core.Support import Logs
18
from Core.Support import Banner_Selector as banner
19
from Core.Support import Language
20
from Core.Support import Notification
21
from Core.Support import Recap
22
from Core.Support import DateFormat
23
from datetime import datetime
24
from Core.Support import Encoding
25
from Core.Support import Site_Counter as CO
26
from time import sleep
27
28
filename = Language.Translation.Get_Language()
29
filename
30
31
class MrHolmes:
32
33
@staticmethod
34
def Scraping(report, username, http_proxy,InstagramParams,PostLocations, PostGpsCoordinates,TwitterParams):
35
os.chdir("GUI/Reports/Usernames/{}".format(username))
36
if os.path.isdir("Profile_pics"):
37
pass
38
else:
39
os.mkdir("Profile_pics")
40
os.chdir("../../../../")
41
#http_proxy = None
42
try:
43
Scraper.info.Instagram(report, username, http_proxy, InstagramParams,
44
PostLocations, PostGpsCoordinates, "Usernames", username)
45
except Exception as e:
46
print(Font.Color.RED + "[!]" + Font.Color.WHITE + "Something went wrong")
47
try:
48
Scraper.info.Twitter(report, username, http_proxy, TwitterParams,
49
"Usernames", username)
50
except Exception as e:
51
print(Font.Color.RED + "[!]" + Font.Color.WHITE + "Something went wrong")
52
try:
53
Scraper.info.TikTok(report, username, http_proxy, "Usernames", username)
54
except Exception as e:
55
print(Font.Color.RED + "[!]" + Font.Color.WHITE + "Something went wrong")
56
57
try:
58
Scraper.info.Github(
59
report, username, http_proxy, "Usernames", username)
60
except Exception as e:
61
print(Font.Color.RED + "[!]" + Font.Color.WHITE + "Something went wrong")
62
63
try:
64
Scraper.info.GitLab(
65
report, username, http_proxy, "Usernames", username)
66
except Exception as e:
67
print(Font.Color.RED + "[!]" + Font.Color.WHITE + "Something went wrong")
68
69
try:
70
Scraper.info.Ngl(
71
report, username, http_proxy, "Usernames", username)
72
except Exception as e:
73
print(Font.Color.RED + "[!]" + Font.Color.WHITE + "Something went wrong")
74
try:
75
Scraper.info.Tellonym(
76
report, username, http_proxy, "Usernames", username)
77
except Exception as e:
78
print(Font.Color.RED + "[!]" + Font.Color.WHITE + "Something went wrong")
79
80
try:
81
Scraper.info.Gravatar(
82
report, username, http_proxy, "Usernames", username)
83
except Exception as e:
84
print(Font.Color.RED + "[!]" + Font.Color.WHITE + "Something went wrong")
85
86
try:
87
Scraper.info.Joinroll(
88
report, username, http_proxy, "Usernames", username)
89
except Exception as e:
90
print(Font.Color.RED + "[!]" + Font.Color.WHITE + "Something went wrong")
91
92
try:
93
Scraper.info.Chess(
94
report, username, http_proxy, "Usernames", username)
95
except Exception as e:
96
print(Font.Color.RED + "[!]" + Font.Color.WHITE + "Something went wrong")
97
98
99
@staticmethod
100
def Controll(username, nomefile, identity, report, subject, successfull, ScraperSites, Writable, http_proxy2, successfullName, http_proxy, choice, Tags, MostTags):
101
f = open(nomefile,)
102
print(Font.Color.GREEN + "\n[+]" + Font.Color.WHITE +
103
Language.Translation.Translate_Language(filename, "Default", "Proxy", "None").format(http_proxy2))
104
if identity != "None":
105
print(Font.Color.GREEN + "[+]" + Font.Color.WHITE + identity)
106
else:
107
pass
108
json_file = "GUI/Reports/Usernames/{}/{}.json".format(
109
username, username)
110
json_file2 = "GUI/Reports/Usernames/{}/{}.json".format(
111
username, "Name")
112
data = json.loads(f.read())
113
for sites in data:
114
for data1 in sites:
115
site1 = sites[data1]["user"].replace("{}", username)
116
site2 = sites[data1]["user2"].replace("{}", username)
117
name = sites[data1]["name"]
118
main = sites[data1]["main"]
119
error = sites[data1]["Error"]
120
exception_char = sites[data1]["exception"]
121
is_scrapable = sites[data1]["Scrapable"]
122
Tag = sites[data1]["Tag"]
123
print(Font.Color.GREEN +
124
"\n[+]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Attempt", "None") .format(name))
125
for errors in exception_char:
126
if errors in username:
127
alert = "NOT-CORRECT"
128
break
129
else:
130
alert = "CORRECT"
131
if alert == "NOT-CORRECT":
132
print(
133
Font.Color.YELLOW2 + "[U]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Username", "Default", "Not_Valid"))
134
else:
135
try:
136
Requests_Search.Search.search(error, report, site1, site2, http_proxy, sites, data1, username,
137
subject, successfull, name, successfullName, is_scrapable, ScraperSites, Writable, main, json_file, json_file2, Tag, Tags, MostTags)
138
except Exception as e:
139
print(
140
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
141
http_proxy = None
142
try:
143
Requests_Search.Search.search(error, report, site1, site2, http_proxy, sites, data1, username,
144
subject, successfull, name, successfullName, is_scrapable, ScraperSites, Writable, main, json_file, json_file2, Tag, Tags, MostTags)
145
except Exception as e:
146
print(
147
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Site_Error", "None"))
148
if choice == 1:
149
http_proxy = Proxies.proxy.final_proxis
150
http_proxy2 = Proxies.proxy.choice3
151
source = "http://ip-api.com/json/" + http_proxy2
152
access = urllib.request.urlopen(source)
153
try:
154
content = access.read()
155
final = json.loads(content)
156
identity = Language.Translation.Translate_Language(
157
filename, "Default", "ProxyLoc", "None").format(final["regionName"], final["country"])
158
except Exception as e:
159
print("SOMETHING WENT WRONG SORRY")
160
http_proxy = None
161
http_proxy2 = str(http_proxy)
162
identity = "None"
163
164
else:
165
http_proxy = None
166
http_proxy2 = str(http_proxy)
167
identity = "None"
168
169
@staticmethod
170
def Banner(Mode):
171
Clear.Screen.Clear()
172
Folder = "Banners/Username"
173
banner.Random.Get_Banner(Folder, Mode)
174
175
@staticmethod
176
def Google_dork(username):
177
report = "GUI/Reports/Usernames/Dorks/{}_Dorks.txt".format(username)
178
nomefile = "Site_lists/Username/Google_dorks.txt"
179
Type = "GOOGLE"
180
if os.path.isfile(report):
181
os.remove(report)
182
print(Font.Color.BLUE + "\n[I]" + Font.Color.WHITE +
183
Language.Translation.Translate_Language(filename, "Dorks", "Remove", "None").format(username))
184
else:
185
pass
186
Dorks.Search.dork(username, report, nomefile, Type)
187
188
@staticmethod
189
def Yandex_dork(username):
190
report = "GUI/Reports/Usernames/Dorks/{}_Dorks.txt".format(username)
191
nomefile = "Site_lists/Username/Yandex_dorks.txt"
192
Type = "YANDEX"
193
Dorks.Search.dork(username, report, nomefile, Type)
194
195
@staticmethod
196
def search(username, Mode):
197
successfull = []
198
successfullName = []
199
ScraperSites = []
200
Tags = []
201
MostTags = []
202
Writable = True
203
MrHolmes.Banner(Mode)
204
print(Font.Color.BLUE + "\n[I]" + Font.Color.WHITE + "INFO:" + "[{}]".format(Font.Color.GREEN + Language.Translation.Translate_Language(filename,"Username","Default","Explanation") + Font.Color.WHITE) )
205
subject = "USERNAME"
206
found = 0
207
Percent = 0
208
nomefile = "Site_lists/Username/site_list.json"
209
folder = "GUI/Reports/Usernames/" + username + "/"
210
report = folder + username + ".txt"
211
report2 = folder + username + ".mh"
212
Recap1 = folder + "Recap.txt"
213
Recap2 = folder + "Recap.mh"
214
InstagramParams = []
215
TwitterParams = []
216
PostLocations = []
217
PostGpsCoordinates = []
218
if os.path.exists(report):
219
os.remove(report)
220
if os.path.exists(folder + "Name.json"):
221
os.remove(folder + "Name.json")
222
if os.path.exists(report.replace(".txt", ".json")):
223
os.remove(report.replace(".txt", ".json"))
224
if os.path.exists(Recap1):
225
os.remove(Recap1)
226
elif os.path.exists(Recap2):
227
os.remove(Recap2)
228
print(Font.Color.BLUE + "\n[I]" + Font.Color.WHITE +
229
Language.Translation.Translate_Language(filename, "Default", "Delete", "None").format(username))
230
elif os.path.exists(report2):
231
os.remove(report2)
232
if os.path.exists(folder + "Name.json"):
233
os.remove(folder + "Name.json")
234
if os.path.exists(report.replace(".mh", ".json")):
235
os.remove(report2.replace(".mh", ".json"))
236
if os.path.exists(Recap1):
237
os.remove(Recap1)
238
elif os.path.exists(Recap2):
239
os.remove(Recap2)
240
print(Font.Color.BLUE + "\n[I]" + Font.Color.WHITE +
241
Language.Translation.Translate_Language(filename, "Default", "Delete", "None").format(username))
242
else:
243
os.mkdir(folder)
244
now = datetime.now()
245
dataformat = DateFormat.Get.Format()
246
dt_string = now.strftime(dataformat)
247
Date = "Date: " + str(dt_string)
248
choice = int(input(
249
Font.Color.BLUE + "\n[+]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "choice", "None") + Font.Color.GREEN + "[#MR.HOLMES#]" + Font.Color.WHITE + "-->"))
250
if choice == 1:
251
http_proxy = Proxies.proxy.final_proxis
252
http_proxy2 = Proxies.proxy.choice3
253
source = "http://ip-api.com/json/" + http_proxy2
254
access = urllib.request.urlopen(source)
255
try:
256
content = access.read()
257
final = json.loads(content)
258
identity = Language.Translation.Translate_Language(
259
filename, "Default", "ProxyLoc", "None").format(final["regionName"], final["country"])
260
except Exception as e:
261
print("SOMETHING WENT WRONG SORRY")
262
http_proxy = None
263
http_proxy2 = str(http_proxy)
264
identity = "None"
265
266
else:
267
http_proxy = None
268
http_proxy2 = str(http_proxy)
269
identity = "None"
270
if os.path.isfile(report):
271
os.remove(report)
272
print(Font.Color.BLUE + "\n[I]" + Font.Color.WHITE +
273
Language.Translation.Translate_Language(filename, "Default", "Delete", "None").format(username))
274
folder = "Username"
275
Logs.Log.Checker(username, folder)
276
f = open(report, "a")
277
f.write(Language.Translation.Translate_Language(
278
filename, "Report", "Default", "Date").format(Date) + "\r\n")
279
f.write(Language.Translation.Translate_Language(
280
filename, "Report", "Username", "Found"))
281
f.close()
282
opt = int(input(Font.Color.BLUE + "\n[+]" + Font.Color.GREEN + "[INSERT AN OPTION]:" + Font.Color.WHITE + "\n(1)USERNAME-RESEARCH (SEARCH USERNAME ON DIFFERENT WEBSITES)\n(2)PROFILE-SCRAPING (SCRAPE USERNAME PROFILE DIRECTLY)" + Font.Color.GREEN + "\n\n[#MR.HOLMES#]" + Font.Color.WHITE + "-->"))
283
if opt == 1:
284
i1 = CO.Counter.Site(nomefile)
285
MrHolmes.Controll(username, nomefile, identity, report, subject,
286
successfull, ScraperSites, Writable, http_proxy2, successfullName, http_proxy, choice, Tags, MostTags)
287
Nsfw = int(input(Font.Color.BLUE + "\n[?]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Username", "Default", "Nsfw") +
288
Font.Color.GREEN + "[#MR.HOLMES#]" + Font.Color.WHITE + "-->"))
289
if Nsfw == 1:
290
nomefile = "Site_lists/Username/NSFW_site_list.json"
291
i2 = CO.Counter.Site(nomefile)
292
MrHolmes.Controll(username, nomefile, identity, report, subject,
293
successfull, ScraperSites, Writable, http_proxy2, successfullName, http_proxy, choice, Tags, MostTags)
294
Count = i1 + i2
295
else:
296
Count = i1
297
pass
298
print(Font.Color.GREEN + "\n[+]" + Font.Color.WHITE +
299
Language.Translation.Translate_Language(filename, "Default", "TotFound", "None").format(subject, username))
300
sleep(3)
301
if len(successfull):
302
for names in successfull:
303
print(Font.Color.YELLOW + "[v]" + Font.Color.WHITE + names)
304
found = found + 1
305
if len(ScraperSites):
306
os.chdir("GUI/Reports/Usernames/{}".format(username))
307
if os.path.isdir("Profile_pics"):
308
pass
309
else:
310
os.mkdir("Profile_pics")
311
os.chdir("../../../../")
312
choice = int(input(Font.Color.BLUE + "\n[?]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Username", "Default", "Scraper") +
313
Font.Color.GREEN + "[*MR.HOLMES*]" + Font.Color.WHITE + "-->"))
314
if choice == 1:
315
ScrapeOp = "Positive"
316
choice = int(input(
317
Font.Color.BLUE + "\n[?]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "choice", "None") + Font.Color.GREEN + "[#MR.HOLMES#]" + Font.Color.WHITE + "-->"))
318
if choice == 1:
319
http_proxy = Proxies.proxy.final_proxis
320
http_proxy2 = Proxies.proxy.choice3
321
source = "http://ip-api.com/json/" + http_proxy2
322
access = urllib.request.urlopen(source)
323
content = access.read()
324
final = json.loads(content)
325
identity = Language.Translation.Translate_Language(
326
filename, "Default", "ProxyLoc", "None").format(final["regionName"], final["country"])
327
328
else:
329
http_proxy = None
330
http_proxy2 = str(http_proxy)
331
identity = "None"
332
print(Font.Color.GREEN + "\n[+]" + Font.Color.WHITE +
333
Language.Translation.Translate_Language(filename, "Default", "Proxy", "None").format(http_proxy2))
334
if identity != "None":
335
print(Font.Color.GREEN +
336
"[+]" + Font.Color.WHITE + identity)
337
else:
338
pass
339
340
if "Instagram" in ScraperSites:
341
try:
342
Scraper.info.Instagram(
343
report, username, http_proxy, InstagramParams, PostLocations, PostGpsCoordinates, "Usernames", username)
344
except ConnectionError:
345
print(
346
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
347
http_proxy = None
348
Scraper.info.Instagram(
349
report, username, http_proxy, InstagramParams, PostLocations, PostGpsCoordinates, "Usernames", username)
350
except Exception as e:
351
pass
352
else:
353
pass
354
355
if "TikTok" in ScraperSites:
356
try:
357
Scraper.info.TikTok(
358
report, username, http_proxy, "Usernames", username)
359
except ConnectionError:
360
print(
361
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
362
http_proxy = None
363
Scraper.info.TikTok(
364
report, username, http_proxy, "Usernames", username)
365
except Exception as e:
366
pass
367
else:
368
pass
369
370
if "Disqus" in ScraperSites:
371
try:
372
Scraper.info.Disqus(
373
report, username, http_proxy, "Usernames", username)
374
except ConnectionError:
375
print(
376
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
377
http_proxy = None
378
Scraper.info.Disqus(
379
report, username, http_proxy, "Usernames", username)
380
except Exception as e:
381
pass
382
else:
383
pass
384
385
if "Imgur" in ScraperSites:
386
try:
387
Scraper.info.Imgur(
388
report, username, http_proxy, "Usernames", username)
389
except Exception as e:
390
print(
391
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
392
http_proxy = None
393
Scraper.info.Imgur(
394
report, username, http_proxy, "Usernames", username)
395
else:
396
pass
397
398
if "Pr0gramm" in ScraperSites:
399
try:
400
Scraper.info.Pr0gramm(
401
report, username, http_proxy, "Usernames", username)
402
except Exception as e:
403
print(
404
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
405
http_proxy = None
406
Scraper.info.Pr0gramm(
407
report, username, http_proxy, "Usernames", username)
408
else:
409
pass
410
411
if "BinarySearch" in ScraperSites:
412
try:
413
Scraper.info.Binarysearch(
414
report, username, http_proxy, "Usernames", username)
415
except Exception as e:
416
print(
417
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
418
http_proxy = None
419
Scraper.info.Binarysearch(
420
report, username, http_proxy, "Usernames", username)
421
else:
422
pass
423
424
if "MixCloud" in ScraperSites:
425
try:
426
Scraper.info.MixCloud(
427
report, username, http_proxy, "Usernames", username)
428
except Exception as e:
429
print(
430
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
431
http_proxy = None
432
Scraper.info.MixCloud(
433
report, username, http_proxy, "Usernames", username)
434
else:
435
pass
436
437
if "Twitter" in ScraperSites:
438
try:
439
Scraper.info.Twitter(
440
report, username, http_proxy, TwitterParams, "Usernames", username)
441
except ConnectionError:
442
print(
443
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
444
http_proxy = None
445
Scraper.info.Twitter(
446
report, username, http_proxy, TwitterParams, "Usernames", username)
447
except Exception as e:
448
print(str(e))
449
pass
450
else:
451
pass
452
453
if "DockerHub" in ScraperSites:
454
try:
455
Scraper.info.Dockerhub(
456
report, username, http_proxy, "Usernames", username)
457
except Exception as e:
458
print(
459
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
460
http_proxy = None
461
Scraper.info.Dockerhub(
462
report, username, http_proxy, "Usernames", username)
463
else:
464
pass
465
466
if "Kik" in ScraperSites:
467
try:
468
Scraper.info.Kik(report, username,
469
http_proxy, "Usernames", username)
470
except Exception as e:
471
print(
472
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
473
http_proxy = None
474
Scraper.info.Kik(report, username,
475
http_proxy, "Usernames", username)
476
else:
477
pass
478
479
if "GitLab" in ScraperSites:
480
try:
481
Scraper.info.GitLab(
482
report, username, http_proxy, "Usernames", username)
483
except Exception as e:
484
print(
485
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
486
http_proxy = None
487
Scraper.info.GitLab(
488
report, username, http_proxy, "Usernames", username)
489
else:
490
pass
491
492
if "Wattpad" in ScraperSites:
493
try:
494
Scraper.info.Wattpad(
495
report, username, http_proxy, "Usernames", username)
496
except Exception as e:
497
print(
498
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
499
http_proxy = None
500
Scraper.info.Wattpad(
501
report, username, http_proxy, "Usernames", username)
502
else:
503
pass
504
505
if "GitHub" in ScraperSites:
506
try:
507
Scraper.info.Github(
508
report, username, http_proxy, "Usernames", username)
509
except Exception as e:
510
print(
511
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
512
http_proxy = None
513
Scraper.info.Github(
514
report, username, http_proxy, "Usernames", username)
515
else:
516
pass
517
518
if "Gravatar" in ScraperSites:
519
try:
520
Scraper.info.Gravatar(
521
report, username, http_proxy, "Usernames", username)
522
except Exception as e:
523
print(
524
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
525
http_proxy = None
526
Scraper.info.Gravatar(
527
report, username, http_proxy, "Usernames", username)
528
else:
529
pass
530
531
if "Chess.com" in ScraperSites:
532
try:
533
Scraper.info.Chess(
534
report, username, http_proxy, "Usernames", username)
535
except Exception as e:
536
print(
537
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
538
http_proxy = None
539
Scraper.info.Chess(
540
report, username, http_proxy, "Usernames", username)
541
542
if "Minecraft" in ScraperSites:
543
try:
544
Scraper.info.Minecraft(
545
report, username, http_proxy, "Usernames", username)
546
except Exception as e:
547
print(
548
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
549
http_proxy = None
550
Scraper.info.Minecraft(
551
report, username, http_proxy, "Usernames", username)
552
else:
553
pass
554
555
if "JoinRoll" in ScraperSites:
556
try:
557
Scraper.info.Joinroll(
558
report, username, http_proxy, "Usernames", username)
559
except Exception as e:
560
print(
561
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
562
http_proxy = None
563
Scraper.info.Joinroll(
564
report, username, http_proxy, "Usernames", username)
565
566
if "Ngl.link" in ScraperSites:
567
try:
568
Scraper.info.Ngl(
569
report, username, http_proxy, "Usernames", username)
570
except Exception as e:
571
print(
572
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
573
http_proxy = None
574
Scraper.info.Ngl(
575
report, username, http_proxy, "Usernames", username)
576
else:
577
pass
578
579
if "Tellonym" in ScraperSites:
580
try:
581
Scraper.info.Tellonym(
582
report, username, http_proxy, "Usernames", username)
583
except Exception as e:
584
print(
585
Font.Color.BLUE + "\n[N]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error1", "None"))
586
http_proxy = None
587
Scraper.info.Tellonym(
588
report, username, http_proxy, "Usernames", username)
589
else:
590
pass
591
else:
592
ScrapeOp = "Negative"
593
else:
594
print(Font.Color.RED + "\n[!]" + Font.Color.WHITE +
595
Language.Translation.Translate_Language(filename, "Username", "Default", "NoScrape"))
596
ScrapeOp = "Negative"
597
else:
598
print(Font.Color.RED + "[!]" + Font.Color.WHITE +
599
Language.Translation.Translate_Language(filename, "Username", "Default", "NoFound").format(username))
600
ScrapeOp = "Negative"
601
else:
602
MrHolmes.Scraping(report,username,http_proxy,InstagramParams,PostLocations, PostGpsCoordinates,TwitterParams)
603
if PostGpsCoordinates == [] and PostLocations == []:
604
pass
605
else:
606
n = 0
607
print(Font.Color.GREEN +
608
"\n[+]" + Font.Color.WHITE + "GETTING LATEST POST GEOLOCATION")
609
f = open(report, "a")
610
f.write("\nGETTING LATEST POST GEOLOCATION:\n")
611
for Locations in PostGpsCoordinates:
612
print(Font.Color.YELLOW + "[v]" + Font.Color.WHITE + Locations)
613
f.write(Locations+"\n")
614
n = n + 1
615
print(Font.Color.GREEN +
616
"\n[+]" + Font.Color.WHITE + "GETTING LATEST PLACE VISITED")
617
f = open(report, "a")
618
f.write("\nGETTING LATEST PLACE VISITED:\n")
619
for Locations in PostLocations:
620
print(Font.Color.YELLOW + "[v]" + Font.Color.WHITE + Locations)
621
f.write(Locations+"\n")
622
f.close()
623
624
count = 1
625
Recaps = int(input(Font.Color.BLUE + "\n[?]" + Font.Color.WHITE + Language.Translation.Translate_Language(
626
filename, "Default", "Hypo", "None") + Font.Color.GREEN + "[#MR.HOLMES#]" + Font.Color.WHITE + "-->"))
627
if Recaps == 1:
628
if opt == 1:
629
Percent = found/Count*100
630
Recap.Stats.Printer(username, found, Count, Percent, subject,
631
Tags, InstagramParams, TwitterParams, ScraperSites, ScrapeOp, MostTags)
632
else:
633
if len(InstagramParams):
634
print(Font.Color.GREEN +
635
"\n[+]" + Font.Color.WHITE + "INSTAGRAM HYPOTHESIS")
636
Recap.Stats.Hypotesys(InstagramParams, username, Recap1)
637
if len(TwitterParams):
638
print(Font.Color.GREEN + "\n[+]" +
639
Font.Color.WHITE + "TWITTER HYPOTHESIS")
640
Recap.Stats.Hypotesys(TwitterParams, username, Recap1)
641
report = "GUI/Reports/Usernames/{}/Recap.txt".format(username)
642
if len(PostLocations):
643
Recap.Stats.Places(PostLocations,report,InstagramParams,username,MostTags)
644
if len(MostTags):
645
Hobby2 = MostTags
646
else:
647
if len(Tags):
648
Hobby2 = Tags
649
else:
650
Hobby2 = "False"
651
if Hobby2 != "False":
652
print(Font.Color.GREEN +
653
"\n[+]" + Font.Color.WHITE + "GETTING POSSIBLE HOBBIES/INTERESTS:")
654
f = open(report, "a")
655
f.write("\nGETTING POSSIBLE HOBBIES/INTERESTS:\n")
656
sleep(3)
657
for PossibleHobby in Hobby2:
658
print(Font.Color.YELLOW + "[v]" + Font.Color.WHITE + PossibleHobby)
659
f.write(PossibleHobby+"\n")
660
f.close()
661
662
Encoding.Encoder.Encode(report)
663
else:
664
pass
665
if count == 1:
666
choice = int(input(
667
Font.Color.BLUE + "\n[?]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Dorks", "None") + Font.Color.GREEN + "[#MR.HOLMES#]" + Font.Color.WHITE + "-->"))
668
if choice == 1:
669
MrHolmes.Google_dork(username)
670
MrHolmes.Yandex_dork(username)
671
print(Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Report", "None") +
672
report)
673
report = "GUI/Reports/Usernames/{}/{}.txt".format(username,username)
674
f = open(report, "a")
675
f.write(Language.Translation.Translate_Language(
676
filename, "Report", "Default", "By"))
677
f.close()
678
Notification.Notifier.Start(Mode)
679
Creds.Sender.mail(report, username)
680
choice = int(input(
681
Font.Color.BLUE + "\n[?]" + Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Transfer", "Question", "None") + Font.Color.GREEN + "[#MR.HOLMES#]" + Font.Color.WHITE + "-->"))
682
report = "GUI/Reports/Usernames/{}/{}.txt".format(username,username)
683
if choice == 1:
684
FileTransfer.Transfer.File(report, username, ".txt")
685
Encoding.Encoder.Encode(report)
686
print(Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Report", "None") +
687
report)
688
inp = input(Language.Translation.Translate_Language(
689
filename, "Default", "Continue", "None"))
690
691