-
Notifications
You must be signed in to change notification settings - Fork 55
/
utils.go
499 lines (453 loc) · 17.5 KB
/
utils.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
package main
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"strconv"
"strings"
"sync"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/fatih/color"
)
func formatURL(urls []string) []string {
modifiedURLs := make([]string, 0, len(urls))
for _, url := range urls {
if !strings.HasPrefix(url, "http://") && !strings.HasPrefix(url, "https://") {
url = "http://" + url
}
if !strings.HasSuffix(url, "/") {
url += "/"
}
modifiedURLs = append(modifiedURLs, url)
}
return modifiedURLs
}
func readFile(fileName string) {
file, err := os.Open(fileName)
if err != nil {
log.Fatalln("[Error] Looks like the tool is facing some issue while loading the specified file. [", err.Error(), "]")
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
url := strings.ReplaceAll(scanner.Text(), " ", "")
if urlValidation.MatchString(url) {
allURLs = append(allURLs, url)
}
}
if err := scanner.Err(); err != nil {
log.Fatalln("[Error] Looks like the tool is facing some issue while loading the specified file. [", err.Error(), "]")
}
}
func listFilesOtherURLs(bucketURL string, fullScan bool) (otherbucketFiles [][]string, otherbucketSizes [][]string, err error) {
// Make an HTTP GET request to the provided URL
client := &http.Client{
CheckRedirect: func(req *http.Request, via []*http.Request) error {
// You can customize redirect handling here if needed.
return nil
},
}
resp, err := client.Get(bucketURL)
if err != nil {
return nil, nil, err
}
defer resp.Body.Close()
// Check response status code for errors
if resp.StatusCode != http.StatusOK {
return nil, nil, fmt.Errorf("failed to retrieve data from %s. Status code: %d", bucketURL, resp.StatusCode)
}
// Read response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, nil, err
}
// Initialize slices to store bucketFiles and bucketSizes
var allFiles [][]string
var allFileSizes [][]string
if fullScan {
fmt.Println("Listing files using Full Mode... [Other URLs]")
//CHECKS FOR PLATFORMS
awsHeader := resp.Header.Get("X-Amz-Bucket-Region")
//CHECK IF THE SET BUCKET IS AN AWS-POWERED BUCKET
if awsHeader != "" {
fmt.Println("\nAWS S3 Bucket detected!")
if awsCreds == "AccessKey:SecretKey" || !strings.Contains(awsCreds, ":") {
fmt.Println("Invalid S3 credentials provided! Either use the correct credentials or re-run the scan without the full mode. [Other URLs]")
fmt.Println("Switching back to scrape mode... [Other URLs]")
allFiles = bucketFileRE.FindAllStringSubmatch(string(body), -1)
allFileSizes = bucketSizeRE.FindAllStringSubmatch(string(body), -1)
} else {
awsBucketNameRes := awsBucketNameRe.FindAllStringSubmatch(string(body), -1)
if awsBucketNameRes != nil {
awsBucketName := awsBucketNameRes[0][1]
awsKeys := strings.Split(awsCreds, ":")
// Initialize a new AWS session
sess, err := session.NewSession(&aws.Config{
Region: aws.String(awsHeader), // Provide the appropriate AWS region
Credentials: credentials.NewStaticCredentials(awsKeys[0], awsKeys[1], ""),
})
if err != nil {
fmt.Println("Failed to create session [Other URLs]:", err)
return nil, nil, err
}
// Create a new S3 service client
svc := s3.New(sess)
// Retrieve the list of objects in the bucket
params := &s3.ListObjectsInput{
Bucket: aws.String(awsBucketName),
MaxKeys: aws.Int64(1000),
}
err = svc.ListObjectsPages(params, func(page *s3.ListObjectsOutput, lastPage bool) bool {
for _, obj := range page.Contents {
// Perform your desired operations with each object here
filePath := *obj.Key
fileSize := fmt.Sprintf("%d", *obj.Size)
// Append the values to the respective 2D arrays
allFiles = append(allFiles, []string{"", filePath})
allFileSizes = append(allFileSizes, []string{"", fileSize})
}
return !lastPage
})
if err != nil {
if _, ok := err.(awserr.Error); ok {
bucketlootOutput.Errors = append(bucketlootOutput.Errors, string(err.Error()))
fmt.Println("Tool encountered an issue while scanning the bucket in Full Mode! Retrying with scrape mode... [Other URLs]")
allFiles = bucketFileRE.FindAllStringSubmatch(string(body), -1)
allFileSizes = bucketSizeRE.FindAllStringSubmatch(string(body), -1)
} else {
bucketlootOutput.Errors = append(bucketlootOutput.Errors, string(err.Error()))
fmt.Println("Tool encountered an issue while scanning the bucket in Full Mode! Retrying with scrape mode... [Other URLs]")
allFiles = bucketFileRE.FindAllStringSubmatch(string(body), -1)
allFileSizes = bucketSizeRE.FindAllStringSubmatch(string(body), -1)
}
}
} else {
fmt.Println("Tool encountered an issue while scanning the bucket in Full Mode! Retrying with scrape mode... [Other URLs]")
allFiles = bucketFileRE.FindAllStringSubmatch(string(body), -1)
allFileSizes = bucketSizeRE.FindAllStringSubmatch(string(body), -1)
}
}
} else {
fmt.Println("Unknown platform! Switching back to scrape mode... [Other URLs]")
allFiles = bucketFileRE.FindAllStringSubmatch(string(body), -1)
allFileSizes = bucketSizeRE.FindAllStringSubmatch(string(body), -1)
}
} else {
// Parse HTML to extract S3 object keys
allFiles = bucketFileRE.FindAllStringSubmatch(string(body), -1)
allFileSizes = bucketSizeRE.FindAllStringSubmatch(string(body), -1)
}
return allFiles, allFileSizes, nil
}
func listS3BucketFiles(bucketURLs []string) {
var wg sync.WaitGroup
var scannable []string
var notScannable []string
var listURL fileListEntry
var totalFiles = 0
var totalIntFiles = 0
var bucketFiles [][]string
var bucketSizes [][]string
for _, bucketURL := range bucketURLs {
var allFiles []string
var intFiles []string
wg.Add(1)
go func(bucketURL string) {
defer wg.Done()
// Make HTTP request to S3 bucket URL
client := &http.Client{
CheckRedirect: func(req *http.Request, via []*http.Request) error {
// You can customize redirect handling here if needed.
return nil
},
}
resp, err := client.Get(bucketURL)
if err != nil {
notScannable = append(notScannable, bucketURL)
bucketlootOutput.Errors = append(bucketlootOutput.Errors, bucketURL+" encountered an error during the GET request: "+err.Error())
return
}
defer resp.Body.Close()
// Check response status code for errors
if resp.StatusCode != http.StatusOK {
notScannable = append(notScannable, bucketURL)
return
}
// Read response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
notScannable = append(notScannable, bucketURL)
return
}
// CHECK IF FULLSCAN FLAG IS TRUE, IF YES TRY THE AWS MODULE FIRST, ELSE USE GET REQUEST DATA
if *fullScan {
fmt.Println("Listing files using Full Mode...")
//CHECKS FOR PLATFORMS
awsHeader := resp.Header.Get("X-Amz-Bucket-Region")
//CHECK IF THE SET BUCKET IS AN AWS-POWERED BUCKET
if awsHeader != "" {
fmt.Println("\nAWS S3 Bucket detected!")
if awsCreds == "AccessKey:SecretKey" || !strings.Contains(awsCreds, ":") {
fmt.Println("Invalid S3 credentials provided! Either use the correct credentials or re-run the scan without the full mode.")
fmt.Println("Switching back to scrape mode...")
bucketFiles = bucketFileRE.FindAllStringSubmatch(string(body), -1)
bucketSizes = bucketSizeRE.FindAllStringSubmatch(string(body), -1)
} else {
awsBucketNameRes := awsBucketNameRe.FindAllStringSubmatch(string(body), -1)
if awsBucketNameRes != nil {
awsBucketName := awsBucketNameRes[0][1]
awsKeys := strings.Split(awsCreds, ":")
// Initialize a new AWS session
sess, err := session.NewSession(&aws.Config{
Region: aws.String(awsHeader), // Provide the appropriate AWS region
Credentials: credentials.NewStaticCredentials(awsKeys[0], awsKeys[1], ""),
})
if err != nil {
fmt.Println("Failed to create session:", err)
return
}
// Create a new S3 service client
svc := s3.New(sess)
// Retrieve the list of objects in the bucket
params := &s3.ListObjectsInput{
Bucket: aws.String(awsBucketName),
MaxKeys: aws.Int64(1000),
}
err = svc.ListObjectsPages(params, func(page *s3.ListObjectsOutput, lastPage bool) bool {
for _, obj := range page.Contents {
// Perform your desired operations with each object here
filePath := *obj.Key
fileSize := fmt.Sprintf("%d", *obj.Size)
// Append the values to the respective 2D arrays
bucketFiles = append(bucketFiles, []string{"", filePath})
bucketSizes = append(bucketSizes, []string{"", fileSize})
}
return !lastPage
})
if err != nil {
if _, ok := err.(awserr.Error); ok {
bucketlootOutput.Errors = append(bucketlootOutput.Errors, string(err.Error()))
fmt.Println("Tool encountered an issue while scanning the bucket in Full Mode! Retrying with scrape mode...")
bucketFiles = bucketFileRE.FindAllStringSubmatch(string(body), -1)
bucketSizes = bucketSizeRE.FindAllStringSubmatch(string(body), -1)
} else {
bucketlootOutput.Errors = append(bucketlootOutput.Errors, string(err.Error()))
fmt.Println("Tool encountered an issue while scanning the bucket in Full Mode! Retrying with scrape mode...")
bucketFiles = bucketFileRE.FindAllStringSubmatch(string(body), -1)
bucketSizes = bucketSizeRE.FindAllStringSubmatch(string(body), -1)
}
}
} else {
fmt.Println("Tool encountered an issue while scanning the bucket in Full Mode! Retrying with scrape mode...")
bucketFiles = bucketFileRE.FindAllStringSubmatch(string(body), -1)
bucketSizes = bucketSizeRE.FindAllStringSubmatch(string(body), -1)
}
}
} else {
fmt.Println("Unknown platform! Switching back to scrape mode...")
bucketFiles = bucketFileRE.FindAllStringSubmatch(string(body), -1)
bucketSizes = bucketSizeRE.FindAllStringSubmatch(string(body), -1)
}
} else {
// Parse HTML to extract S3 object keys
bucketFiles = bucketFileRE.FindAllStringSubmatch(string(body), -1)
bucketSizes = bucketSizeRE.FindAllStringSubmatch(string(body), -1)
}
for i := 0; i < len(bucketFiles) && i < len(bucketSizes); i++ {
bucketFile := bucketFiles[i]
bucketFileSize := bucketSizes[i]
isBlacklisted = 0
for _, blacklistExtension := range blacklistExtensions {
if strings.HasSuffix(strings.ToLower(bucketFile[1]), blacklistExtension) {
isBlacklisted = 1
break
}
}
if isBlacklisted == 0 {
if maxFileSize != "" {
buckfileSize, err := strconv.ParseInt(bucketFileSize[1], 10, 64)
if err == nil {
maxbucketfilesize, err := strconv.ParseInt(maxFileSize, 10, 64)
if err == nil {
if buckfileSize <= maxbucketfilesize {
intFiles = append(intFiles, bucketURL+bucketFile[1])
totalIntFiles += 1
}
}
}
} else {
intFiles = append(intFiles, bucketURL+bucketFile[1])
totalIntFiles += 1
}
}
allFiles = append(allFiles, bucketURL+bucketFile[1])
totalFiles += 1
}
if len(allFiles) > 0 {
scannable = append(scannable, bucketURL)
listURL = fileListEntry{URL: bucketURL, AllFiles: allFiles, IntFiles: intFiles}
urlsFileList = append(urlsFileList, listURL)
iniFileListData.ScanData = append(iniFileListData.ScanData, listURL)
} else {
if *digMode {
if !strings.HasPrefix(string(body), "<?xml") { // IF THE STRING IS NOT A BUCKET
log.Println(bucketURL, "doesn't seems to be a storage bucket! Trying to extract URLs if any from the response. [Dig Mode]")
diggedURLs = uniqueStrings(urlsRE.FindAllString(string(body), -1))
if len(diggedURLs) > 0 {
log.Println("Found", len(diggedURLs), "URLs in", bucketURL)
for _, otherURL := range diggedURLs { // ITERATE OVER ALL THE URLS DISCOVERED
otherURL += "/"
otherbucketFiles, otherBucketSizes, err := listFilesOtherURLs(otherURL, *fullScan)
if err == nil {
if len(otherbucketFiles) > 0 { //ACTION TO PERFORM IF THE TOOL DISCOVERS FILES FROM THE BUCKET EXTRACTED
fmt.Printf("Discovered %v : %s\n", color.MagentaString("storage bucket with files"), otherURL)
for i := 0; i < len(otherbucketFiles) && i < len(otherBucketSizes); i++ { // ITERATE OVER ALL THE BUCKET FILES DISCOVERED
othbucketFile := otherbucketFiles[i]
othbucketFileSize := otherBucketSizes[i]
isBlacklisted = 0
for _, blacklistExtension := range blacklistExtensions {
if strings.HasSuffix(strings.ToLower(othbucketFile[1]), blacklistExtension) {
isBlacklisted = 1
break
}
}
if isBlacklisted == 0 {
if maxFileSize != "" {
buckfileSize, err := strconv.ParseInt(othbucketFileSize[1], 10, 64)
if err == nil {
maxbucketfilesize, err := strconv.ParseInt(maxFileSize, 10, 64)
if err == nil {
if buckfileSize <= maxbucketfilesize {
intFiles = append(intFiles, bucketURL+othbucketFile[1])
totalIntFiles += 1
}
}
}
} else {
intFiles = append(intFiles, bucketURL+othbucketFile[1])
totalIntFiles += 1
}
}
allFiles = append(allFiles, otherURL+othbucketFile[1])
totalFiles += 1
} //FINISH ITERATING OVER ALL THE FILES DISCOVERED
//////////////////////////////EDIT HERE///////////////////
scannable = append(scannable, otherURL)
listURL = fileListEntry{URL: otherURL, AllFiles: allFiles, IntFiles: intFiles}
urlsFileList = append(urlsFileList, listURL)
iniFileListData.ScanData = append(iniFileListData.ScanData, listURL)
} else { // TOOL DOESN'T CATCHES ANY FILES FROM THE BUCKET DISCOVERED
unscannable = append(unscannable, bucketURL)
}
} else { // IF THERE WAS AN ERROR MAKING THE REQUEST
unscannable = append(unscannable, bucketURL)
}
}
} else { // IF NO URLS WERE DISCOVERED
unscannable = append(unscannable, bucketURL)
}
} else {
unscannable = append(unscannable, bucketURL)
}
if len(unscannable) > 0 {
notScannable = append(notScannable, uniqueStrings(unscannable)...)
}
} else { // WHEN DIGMODE IS SET TO FALSE
notScannable = append(notScannable, bucketURL)
}
}
}(bucketURL)
}
wg.Wait()
iniFileListData.Scannable = append(iniFileListData.Scannable, uniqueStrings(scannable)...)
iniFileListData.NotScannable = append(iniFileListData.NotScannable, uniqueStrings(notScannable)...)
iniFileListData.TotalFiles = totalFiles
iniFileListData.TotalIntFiles = totalIntFiles
}
func toJSON() {
jsonData, err := json.MarshalIndent(bucketlootOutput, "", " ")
if err != nil {
fmt.Println("Error marshaling JSON:", err)
return
} else {
fmt.Println("\n" + string(jsonData))
if saveOutput != "" {
file, err := os.Create(saveOutput)
if err != nil {
fmt.Println("Error creating file:", err)
return
}
defer file.Close() // Close the file when the function exits
_, err = file.Write(jsonData)
if err != nil {
fmt.Println("Error writing to file:", err)
return
}
fmt.Println("Data successfully saved to", saveOutput)
return
}
}
}
func readCredsFile() {
file, err := ioutil.ReadFile("credentials.json")
if err != nil {
log.Fatalln("Error reading credentials.json file, Exiting! \n", err)
return
}
var data platformCreds
err = json.Unmarshal(file, &data)
if err != nil {
fmt.Println("Error decoding JSON:", err)
return
}
for _, entry := range data {
if entry.Platform == "AWS" {
awsCreds = entry.Credentials
}
}
}
func loadNotifyConfig() error {
fileContent, err := ioutil.ReadFile("notifyConfig.json")
if err != nil {
return err
}
err = json.Unmarshal(fileContent, &platforms)
if err != nil {
return err
}
return nil
}
func notifyDiscord(webhookURL, message string) error {
jsonData := fmt.Sprintf(`{"content":"%s"}`, message)
resp, err := http.Post(webhookURL, "application/json", bytes.NewBufferString(jsonData))
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
return nil
}
func notifySlack(webhookURL, message string) error {
jsonData := fmt.Sprintf(`{"text":"%s"}`, message)
resp, err := http.Post(webhookURL, "application/json", bytes.NewBufferString(jsonData))
if err != nil {
return err
}
defer resp.Body.Close()
fmt.Println(resp.StatusCode)
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
return nil
}