-
Notifications
You must be signed in to change notification settings - Fork 1
/
scan.py
137 lines (98 loc) · 4 KB
/
scan.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
import json
import os
import sys
from urllib.parse import urlparse, urlsplit, parse_qs
from colorama import Fore
import argparse
from collections import Counter
#Any changes to these two lists
common_extensions = ['','.js','.png','.gif','.jpeg','.jpg']
interesting_words = ['sql','api','swagger','tomcat','edit','upload','admin','password','uname','git','pass','username','credentials','user','debug','servlet','key','token']
def alert(name, type, url):
global NOCOLOR
if NOCOLOR:
message = f"[+] {type}\t| {name}\t - " + url
else:
url = url.replace(name, Fore.RED + name.lower() + Fore.GREEN)
message = Fore.YELLOW + f"[+] {type}\t|" + Fore.BLUE + f" {name}\t" + Fore.GREEN + " - " + url
return message
def load_json_from_file(file_name):
file_path = os.path.join('./', file_name)
with open(file_path, 'r') as file:
data = json.load(file)
return data
def run_scan(file_name):
json_data = load_json_from_file(file_name)
for result in json_data:
page = result.get('page', {})
task = result.get('task', {})
if 'ip' in page:
url = page.get('url')
lower_url = page.get('url').lower()
split_url = urlsplit(lower_url)
file_extension = os.path.splitext(split_url.path)[1]
query_params = parse_qs(split_url.query)
path = split_url.path
for word in interesting_words:
if word in query_params:
print(alert(word, "Parameter", url))
if word in path:
print(alert(word, "Path", url))
if file_extension not in common_extensions:
print(alert(file_extension, "uncommon", url))
return None
def get_urls(file_name):
urlexport = set()
json_data = load_json_from_file(file_name)
for result in json_data:
page = result.get('page', {})
if 'ip' in page:
url = page.get('url')
urlexport.add(url)
return urlexport
def count_items(urls, attribute):
if attribute == "domain":
items = [urlparse(url).netloc for url in urls]
elif attribute == "extension":
items = [os.path.splitext(urlparse(url).path)[1] for url in urls]
else:
raise ValueError("Unknown attribute: {}".format(attribute))
return Counter(items)
def most_common_attributes(urls):
domain_counts = count_items(urls, "domain")
sorted_domain_counts = domain_counts.most_common()
extension_counts = count_items(urls, "extension")
sorted_extension_counts = extension_counts.most_common()
result = []
for index, (domain, count) in enumerate(sorted_domain_counts, 1):
result.append(f"[domain] {domain} seen {count} times")
result.append("\n") #since we are just basically print()'ing the list
for index, (ext, count) in enumerate(sorted_extension_counts, 1):
result.append(f"[File] '{ext}' seen {count} times")
return "\n".join(result)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Urlscan.io json parser - Moopinger')
parser.add_argument('-f', '--file', type=str, help='JSON File generated by fetch.py - contains the Urlscan.io JSON search results')
parser.add_argument('-m', '--mode', help='Either "scan" "list" or "info"')
parser.add_argument('--no-color', action='store_true', help="Suppress color codes")
args = parser.parse_args()
NOCOLOR = args.no_color
if os.name == 'nt':
NOCOLOR = True
targets = []
lines = []
if args.mode is None or args.file is None:
parser.print_help()
sys.exit(1)
file_name = args.file
if args.mode.lower() == "scan":
run_scan(file_name)
elif args.mode.lower() == "list":
urls = get_urls(file_name)
for line in urls:
print(line)
elif args.mode.lower() == "info":
urls = get_urls(file_name)
print(most_common_attributes(urls))
else:
print(f"Unknown mode:{args.mode.lower()}")