Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Lucksi
GitHub Repository: Lucksi/Mr.Holmes
Path: blob/master/Core/Support/Requests_Search.py
1071 views
1
# ORIGINAL CREATOR: Luca Garofalo (Lucksi)
2
# AUTHOR: Luca Garofalo (Lucksi)
3
# Copyright (C) 2021-2023 Lucksi <[email protected]>
4
# License: GNU General Public License v3.0
5
6
import requests
7
import json
8
from Core.Support import Font
9
from Core.Support import Language
10
from Core.Support import Headers
11
12
filename = Language.Translation.Get_Language()
13
filename
14
15
16
class Search:
17
18
@staticmethod
19
def search(error, report, site1, site2, http_proxy, sites, data1, username, subject, successfull, name, successfullName, is_scrapable, ScraperSites, Writable, main, json_file, json_file2, Tag, Tags, MostTags):
20
Unique = ["Chess","Books","Pokemon","Lol/League of Legends","Minecraft","Roblox","Modelling","Anime","Shopping","Writing","Stories","OSU","ThemeForest","Meme","Python","Ruby","Npm","Health","Map","File-Sharing","Colors","Crypto","Speedrun","Steam","BitCoin","Playstation","Gallery","Chess.com","Badge"]
21
headers = Headers.Get.classic()
22
if name == "Twitter":
23
headers = Headers.Get.Twitter()
24
searcher = requests.get(
25
url=site2, headers=headers, proxies=http_proxy, timeout=10, allow_redirects=True)
26
f = open(report, "a")
27
if error == "Status-Code":
28
if searcher.status_code == 200:
29
print(Font.Color.YELLOW + "[v]" + Font.Color.WHITE +
30
Language.Translation.Translate_Language(filename, "Default", "Found", "None").format(subject, username))
31
print(Font.Color.YELLOW +
32
"[v]" + Font.Color.WHITE + "LINK: {}".format(site1))
33
if Writable == True:
34
f.write(site1 + "\r\n")
35
print(Font.Color.BLUE +
36
"[I]" + Font.Color.WHITE + "TAGS:[{}]".format(Font.Color.GREEN + ",".join(Tag) + Font.Color.WHITE))
37
if subject != "PHONE-NUMBER":
38
for tags in Tag:
39
if tags in Unique:
40
MostTags.append(tags)
41
if tags in Tags:
42
if tags in MostTags:
43
pass
44
else:
45
MostTags.append(tags)
46
else:
47
Tags.append(tags)
48
else:
49
f.write("{}:{}\r\n".format(name, main))
50
successfull.append(site1)
51
successfullName.append(name)
52
if is_scrapable == "True":
53
ScraperSites.append(name)
54
elif searcher.status_code == 404 or searcher.status_code == 204:
55
print(Font.Color.RED + "[!]" + Font.Color.WHITE +
56
Language.Translation.Translate_Language(filename, "Default", "NotFound", "None").format(subject, username))
57
else:
58
print(Font.Color.BLUE + "[N]" +
59
Font.Color.WHITE + Language.Translation.Translate_Language(filename, "Default", "Connection_Error2", "None") + searcher.status_code)
60
elif error == "Message":
61
text = sites[data1]["text"]
62
if text in searcher.text:
63
print(Font.Color.RED + "[!]" + Font.Color.WHITE +
64
Language.Translation.Translate_Language(filename, "Default", "NotFound", "None").format(subject, username))
65
else:
66
print(Font.Color.YELLOW + "[v]" + Font.Color.WHITE +
67
Language.Translation.Translate_Language(filename, "Default", "Found", "None").format(subject, username))
68
print(Font.Color.YELLOW +
69
"[v]" + Font.Color.WHITE + "LINK: {}".format(site1))
70
if Writable == True:
71
f.write(site1 + "\r\n")
72
print(Font.Color.BLUE +
73
"[I]" + Font.Color.WHITE + "TAGS:[{}]".format((Font.Color.GREEN + ",".join(Tag) + Font.Color.WHITE)))
74
if subject != "PHONE-NUMBER":
75
for tags in Tag:
76
if tags in Unique:
77
MostTags.append(tags)
78
if tags in Tags:
79
if tags in MostTags:
80
pass
81
else:
82
MostTags.append(tags)
83
else:
84
Tags.append(tags)
85
else:
86
f.write("{}:{}\r\n".format(name, main))
87
successfull.append(site1)
88
successfullName.append(name)
89
if is_scrapable == "True":
90
ScraperSites.append(name)
91
92
elif error == "Response-Url":
93
response = sites[data1]["response"]
94
if searcher.url == response:
95
print(Font.Color.RED + "[!]" + Font.Color.WHITE +
96
Language.Translation.Translate_Language(filename, "Default", "NotFound", "None").format(subject, username))
97
else:
98
print(Font.Color.YELLOW + "[v]" + Font.Color.WHITE +
99
Language.Translation.Translate_Language(filename, "Default", "Found", "None").format(subject, username))
100
print(Font.Color.YELLOW +
101
"[v]" + Font.Color.WHITE + "LINK: {}".format(site1))
102
if Writable == True:
103
f.write(site1 + "\r\n")
104
print(Font.Color.BLUE +
105
"[I]" + Font.Color.WHITE + "TAGS:[{}]".format((Font.Color.GREEN + ",".join(Tag) + Font.Color.WHITE)))
106
if subject != "PHONE-NUMBER":
107
for tags in Tag:
108
if tags in Unique:
109
MostTags.append(tags)
110
if tags in Tags:
111
if tags in MostTags:
112
pass
113
else:
114
MostTags.append(tags)
115
else:
116
Tags.append(tags)
117
else:
118
f.write("{}:{}\r\n".format(name, main))
119
successfull.append(site1)
120
successfullName.append(name)
121
if is_scrapable == "True":
122
ScraperSites.append(name)
123
124
d = open(json_file2, "w")
125
d.write('''{
126
"Names":[
127
128
]
129
}''')
130
d.close()
131
132
f = open(json_file, "w")
133
f.write('''{
134
"List":[
135
136
]
137
}''')
138
f.close()
139
140
for element in successfullName:
141
data = {
142
"name": "{}".format(element)
143
}
144
with open(json_file2, 'r+') as file2:
145
file_data2 = json.load(file2)
146
file_data2["Names"].append(data)
147
file2.seek(0)
148
json.dump(file_data2, file2, indent=4)
149
150
for element in successfull:
151
data = {
152
"site": "{}".format(element)
153
}
154
with open(json_file, 'r+') as file:
155
file_data = json.load(file)
156
file_data["List"].append(data)
157
file.seek(0)
158
json.dump(file_data, file, indent=4)
159
160