@@ -10,27 +10,27 @@ import (
10
10
)
11
11
12
12
//ManageRequests handles the request workers
13
- func ManageRequests () {
13
+ func ( gState * State ) ManageRequests () {
14
14
//manages net request workers
15
15
for {
16
16
page := <- gState .Chans .pagesChan
17
17
if gState .Blacklist [page .URL ] {
18
18
gState .wg .Done ()
19
- PrintOutput (fmt .Sprintf ("Not testing blacklisted URL: %s" , page .URL ), Info , 0 )
19
+ gState . PrintOutput (fmt .Sprintf ("Not testing blacklisted URL: %s" , page .URL ), Info , 0 )
20
20
continue
21
21
}
22
22
for _ , method := range gState .Methods {
23
23
if page .Result == nil && ! gState .Cfg .NoBase {
24
24
gState .Chans .workersChan <- struct {}{}
25
25
gState .wg .Add (1 )
26
- go testURL (method , page .URL , gState .Client )
26
+ go gState . testURL (method , page .URL , gState .Client )
27
27
}
28
28
if gState .Cfg .Wordlist != "" && string (page .URL [len (page .URL )- 1 ]) == "/" { //if we are testing a directory
29
29
30
30
//check for wildcard response
31
31
32
32
// maxDirs <- struct{}{}
33
- dirBust (page )
33
+ gState . dirBust (page )
34
34
}
35
35
}
36
36
gState .wg .Done ()
@@ -39,16 +39,19 @@ func ManageRequests() {
39
39
}
40
40
41
41
//ManageNewURLs will take in any URL, and decide if it should be added to the queue for bustin', or if we discovered something new
42
- func ManageNewURLs () {
42
+ func ( gState * State ) ManageNewURLs () {
43
43
//decides on whether to add to the directory list, or add to file output
44
44
for {
45
- candidate := <- gState .Chans .newPagesChan
45
+ candidate , more := <- gState .Chans .newPagesChan
46
+ if ! more {
47
+ return
48
+ }
46
49
//check the candidate is an actual URL
47
50
u , err := url .Parse (strings .TrimSpace (candidate .URL ))
48
51
49
52
if err != nil {
50
53
gState .wg .Done ()
51
- PrintOutput (err .Error (), Error , 0 )
54
+ gState . PrintOutput (err .Error (), Error , 0 )
52
55
continue //probably a better way of doing this
53
56
}
54
57
@@ -68,7 +71,7 @@ func ManageNewURLs() {
68
71
gState .CMut .Unlock ()
69
72
gState .wg .Add (1 )
70
73
gState .Chans .pagesChan <- SpiderPage {URL : actualURL , Reference : candidate .Reference , Result : candidate .Result }
71
- PrintOutput ("URL Added: " + actualURL , Debug , 3 )
74
+ gState . PrintOutput ("URL Added: " + actualURL , Debug , 3 )
72
75
73
76
//also add any directories in the supplied path to the 'to be hacked' queue
74
77
path := ""
@@ -106,7 +109,7 @@ func ManageNewURLs() {
106
109
}
107
110
}
108
111
109
- func testURL (method string , urlString string , client * http.Client ) {
112
+ func ( gState * State ) testURL (method string , urlString string , client * http.Client ) {
110
113
defer func () {
111
114
gState .wg .Done ()
112
115
atomic .AddUint64 (gState .TotalTested , 1 )
@@ -115,7 +118,7 @@ func testURL(method string, urlString string, client *http.Client) {
115
118
case gState .Chans .testChan <- method + ":" + urlString :
116
119
default : //this is to prevent blocking, it doesn't _really_ matter if it doesn't get written to output
117
120
}
118
- headResp , content , good := evaluateURL (method , urlString , client )
121
+ headResp , content , good := gState . evaluateURL (method , urlString , client )
119
122
120
123
if ! good && ! gState .Cfg .ShowAll {
121
124
return
@@ -133,15 +136,15 @@ func testURL(method string, urlString string, client *http.Client) {
133
136
if ! gState .Cfg .NoSpider && good && ! gState .Cfg .NoRecursion {
134
137
urls , err := getUrls (content )
135
138
if err != nil {
136
- PrintOutput (err .Error (), Error , 0 )
139
+ gState . PrintOutput (err .Error (), Error , 0 )
137
140
}
138
141
for _ , x := range urls { //add any found pages into the pool
139
142
//add all the directories
140
143
newPage := SpiderPage {}
141
144
newPage .URL = x
142
145
newPage .Reference = headResp .Request .URL
143
146
144
- PrintOutput (
147
+ gState . PrintOutput (
145
148
fmt .Sprintf ("Found URL on page: %s" , x ),
146
149
Debug , 3 ,
147
150
)
@@ -152,23 +155,23 @@ func testURL(method string, urlString string, client *http.Client) {
152
155
}
153
156
}
154
157
155
- func dirBust (page SpiderPage ) {
158
+ func ( gState * State ) dirBust (page SpiderPage ) {
156
159
//ugh
157
160
u , err := url .Parse (page .URL )
158
161
if err != nil {
159
- PrintOutput ("This should never occur, url parse error on parsed url?" + err .Error (), Error , 0 )
162
+ gState . PrintOutput ("This should never occur, url parse error on parsed url?" + err .Error (), Error , 0 )
160
163
return
161
164
}
162
165
//check to make sure we aren't dirbusting a wildcardyboi (NOTE!!! USES FIRST SPECIFIED MEHTOD TO DO SOFT 404!)
163
166
if ! gState .Cfg .NoWildcardChecks {
164
167
gState .Chans .workersChan <- struct {}{}
165
- h , _ , res := evaluateURL (gState .Methods [0 ], page .URL + RandString (), gState .Client )
168
+ h , _ , res := gState . evaluateURL (gState .Methods [0 ], page .URL + RandString (), gState .Client )
166
169
//fmt.Println(page.URL, h, res)
167
170
if res { //true response indicates a good response for a guid path, unlikely good
168
171
if detectSoft404 (h , gState .Hosts .Get404 (u .Host ), gState .Cfg .Ratio404 ) {
169
172
//it's a soft404 probably, guess we can continue (this logic seems wrong??)
170
173
} else {
171
- PrintOutput (
174
+ gState . PrintOutput (
172
175
fmt .Sprintf ("Wildcard response detected, skipping dirbusting of %s" , page .URL ),
173
176
Info , 0 )
174
177
return
@@ -177,7 +180,7 @@ func dirBust(page SpiderPage) {
177
180
}
178
181
179
182
if ! gState .Cfg .NoStartStop {
180
- PrintOutput (
183
+ gState . PrintOutput (
181
184
fmt .Sprintf ("Dirbusting %s" , page .URL ),
182
185
Info , 0 ,
183
186
)
@@ -195,7 +198,7 @@ func dirBust(page SpiderPage) {
195
198
case <- gState .StopDir :
196
199
//<-maxDirs
197
200
if ! gState .Cfg .NoStartStop {
198
- PrintOutput (fmt .Sprintf ("Finished dirbusting: %s" , page .URL ), Info , 0 )
201
+ gState . PrintOutput (fmt .Sprintf ("Finished dirbusting: %s" , page .URL ), Info , 0 )
199
202
}
200
203
return
201
204
default :
@@ -209,7 +212,7 @@ func dirBust(page SpiderPage) {
209
212
}
210
213
gState .Chans .workersChan <- struct {}{}
211
214
gState .wg .Add (1 )
212
- go testURL (method , page .URL + word + "." + ext , gState .Client )
215
+ go gState . testURL (method , page .URL + word + "." + ext , gState .Client )
213
216
gState .Checked [method + page .URL + word + "." + ext ] = true
214
217
gState .CMut .Unlock ()
215
218
}
@@ -222,7 +225,7 @@ func dirBust(page SpiderPage) {
222
225
}
223
226
gState .Chans .workersChan <- struct {}{}
224
227
gState .wg .Add (1 )
225
- go testURL (method , page .URL + word + "/" , gState .Client )
228
+ go gState . testURL (method , page .URL + word + "/" , gState .Client )
226
229
gState .Checked [method + page .URL + word + "/" ] = true
227
230
gState .CMut .Unlock ()
228
231
}
@@ -233,7 +236,7 @@ func dirBust(page SpiderPage) {
233
236
}
234
237
gState .Chans .workersChan <- struct {}{}
235
238
gState .wg .Add (1 )
236
- go testURL (method , page .URL + word , gState .Client )
239
+ go gState . testURL (method , page .URL + word , gState .Client )
237
240
gState .Checked [method + page .URL + word ] = true
238
241
gState .CMut .Unlock ()
239
242
//if gState.Cfg.MaxDirs == 1 {
@@ -244,19 +247,19 @@ func dirBust(page SpiderPage) {
244
247
}
245
248
//<-maxDirs
246
249
if ! gState .Cfg .NoStartStop {
247
- PrintOutput (fmt .Sprintf ("Finished dirbusting: %s" , page .URL ), Info , 0 )
250
+ gState . PrintOutput (fmt .Sprintf ("Finished dirbusting: %s" , page .URL ), Info , 0 )
248
251
}
249
252
}
250
253
251
254
//StartBusting will add a suppllied url to the queue to be tested
252
- func StartBusting (randURL string , u url.URL ) {
255
+ func ( gState * State ) StartBusting (randURL string , u url.URL ) {
253
256
defer gState .wg .Done ()
254
257
if ! gState .Cfg .NoWildcardChecks {
255
- resp , err := HTTPReq ("GET" , randURL , gState .Client )
258
+ resp , err := gState . HTTPReq ("GET" , randURL , gState .Client )
256
259
<- gState .Chans .workersChan
257
260
if err != nil {
258
261
if gState .Cfg .InputList != "" {
259
- PrintOutput (
262
+ gState . PrintOutput (
260
263
err .Error (),
261
264
Error ,
262
265
0 ,
@@ -266,7 +269,7 @@ func StartBusting(randURL string, u url.URL) {
266
269
panic ("Canary Error, check url is correct: " + randURL + "\n " + err .Error ())
267
270
268
271
}
269
- PrintOutput (
272
+ gState . PrintOutput (
270
273
fmt .Sprintf ("Canary sent: %s, Response: %v" , randURL , resp .Status ),
271
274
Debug , 2 ,
272
275
)
@@ -288,12 +291,12 @@ func StartBusting(randURL string, u url.URL) {
288
291
Reference : & u ,
289
292
}
290
293
gState .Checked [u .String ()+ "/" ] = true
291
- PrintOutput ("URL Added: " + u .String ()+ "/" , Debug , 3 )
294
+ gState . PrintOutput ("URL Added: " + u .String ()+ "/" , Debug , 3 )
292
295
}
293
296
if ok := gState .Checked [x .URL ]; ! ok {
294
297
gState .wg .Add (1 )
295
298
gState .Chans .pagesChan <- x
296
299
gState .Checked [x .URL ] = true
297
- PrintOutput ("URL Added: " + x .URL , Debug , 3 )
300
+ gState . PrintOutput ("URL Added: " + x .URL , Debug , 3 )
298
301
}
299
302
}
0 commit comments