forked from Karmaz95/crimson
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathcrimson_target
executable file
·742 lines (642 loc) · 30.7 KB
/
crimson_target
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
#!/bin/bash
pid=$$
# Delete below line if you install Crimson from source:
PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/go/bin:/root/go/bin:"
#
### CREATED BY KARMAZ
#
#
#
### FUNCTIONS:
#
# 1. FULL RANGE PORT SCANING && NSE ON OPENED PORTS
# 2. VULNERABILITY SCANING
# 3. DOMAIN CRAWLING
# 4. DIRECTORY BRUTEFORCING
# 5. GATHERING SOURCE CODE OF SCRAPED / BRUTEFORCED URLS
# 6. EXTRACTING NEW PATHS, API KEYS, ENDPOINTS FROM GATHERED SOURCE CODE
# 7. MERGING PATHS WITH DOMAIN AND PROBING FOR NEW ENDPOINTS
# 8. PROXING LIVE RESULTS TO BURP SUITE
# 9. PREPARING params.txt && dirs.txt FOR EXPLOIT MODULE
# 10. CHECK WAF && POTENTIAL BACKUP FILES && CMS
# 11. TESTING HOP-BY-HOP DELETION
#
### LISTS:
#
# 1) recon.txt - FILE WITH RECON OUTPUT
# 2) urls.txt - FILE WITH GATHERED URLS
# 3) status_params.txt - STATUS CODES OF urls.txt
# 4) ffuf.txt - DIR BRUTEFORCING OUTPUT
# 5) status_dir.txt - STATUS CODE OF ffuf.txt
# 9) exp/params.txt - FILE PREPARED FOR crimson_exploit WITH PARAMS
# 10) exp/dirs.txt - FILE PREPARED FOR crimson_exploit WITH DIRECTORIES
# 11) backups.txt - POTENTIALLY BACKUP FILES
# 12) arjun.txt - FILE WITH BRUTEFORCED PARAMETERS
# 13) nmap.txt - FILE WITH TCP/UDP PORT SCANNING OUTPUT
# 15) exp/nmap.gnmap - FILE WITH TCP/UDP PORT SCANNING OUTPUT IN GREPABLE FORMAT
#
### WORKFLOW
#
# 0. Start Burp - optional step
# - Create new project - www.example.tld
# - Turn off interception
# - Make active scan for proxied urls only in scope
# 1. Start the script
# - If you didn't choose -a flag, go to /bounty/tested.domain.tld/temp and remove manually false positives entries in ferox.txt
# 2. Check the output listed above (LISTS)
# 3. Manually browse the application, click on all functionalities
# 4. Copy whole target scope from Burp after manually browsing the target
# 5. Paste it to exp/all.txt and run crimson_exploit
#
###
### Catch crash in trap and save the function name in anchor.log
trap 'echo $anchor > anchor.log && exit 1' SIGINT
trap 'echo $anchor > anchor.log && exit 1' SIGHUP
trap 'echo $anchor > anchor.log && exit 1' SIGKILL
trap 'echo $anchor > anchor.log && exit 1' SIGTERM
anchor_check() {
### 0.RETRY FUNCTION IF IT'S NOT IN anchor.log
anchor="$1"
[ ! -f "$HOME/bounty/$DOMAIN/$domain/anchor.log" ] && return 0
if grep -q "$1" "$HOME/bounty/$DOMAIN/$domain/anchor.log"; then
rm "$HOME/bounty/$DOMAIN/$domain/anchor.log"; return 0
else
return 1
fi
}
clear_log() {
rm files.txt >/dev/null 2>&1
rm status_ffuf.txt >/dev/null 2>&1
rm status_new_endpoints.txt >/dev/null 2>&1
### REMOVE EMPTY FILES AND DIRECTORIES
find . -type d -empty -print -delete -o -type f -empty -print -delete >/dev/null 2>&1
rm -rf nuclei-templates >/dev/null 2>&1
### REMOVING LOG IF PROGRAM EXIT NORMALLY
if [ -f "$HOME/bounty/$DOMAIN/$domain/anchor.log" ]; then
rm "$HOME/bounty/$DOMAIN/$domain/anchor.log"
fi
}
### ---
### ALL FUNCTIONS:
tcp_scanning() {
### 1.RUSTSCAN
anchor_check "${FUNCNAME[0]}" || return 0
echo "[TCP SCANNING] ---------------------------------------------------------" | tee -a recon.txt
echo TARGET: "$TARGET" | tee -a recon.txt
rustscan -a "$TARGET" -- -n -A -Pn --append-output -T4 -oG exp/nmap.gnmap -oN nmap.txt # --scan-order "Random"
cat nmap.txt >> recon.txt
rm nmap.txt
}
udp_scanning() {
### 2.NMAP
anchor_check "${FUNCNAME[0]}" || return 0
echo "[UDP SCANNING] ---------------------------------------------------------" | tee -a recon.txt
echo TARGET: "$TARGET" | tee -a recon.txt
nmap -sU "$domain" -T4 -A --resolve-all -Pn --append-output -oG exp/udpnmap.gnmap -oN udpnmap.txt | sudo nmap -sU "$domain" --resolve-all -T4 -A -Pn --append-output -oG exp/udpnmap.gnmap -oN udpnmap.txt
cat udpnmap.txt >> recon.txt
}
get_the_sitemap() {
### 3.SITEMAP-URLS.SH
anchor_check "${FUNCNAME[0]}" || return 0
echo "[SITEMAP 80] ----------------------------------------------------------" | tee -a recon.txt
"$HOME"/tools/CRIMSON/scripts/sitemap-urls.sh http://"$domain"/sitemap.xml >> recon.txt
echo >> recon.txt
echo "[SITEMAP 443] ---------------------------------------------------------" | tee -a recon.txt
"$HOME"/tools/CRIMSON/scripts/sitemap-urls.sh https://"$domain"/sitemap.xml >> recon.txt
echo >> recon.txt
}
get_the_robots(){
### 4.CURL
anchor_check "${FUNCNAME[0]}" || return 0
echo "[ROBOTS 80] -----------------------------------------------------------" | tee -a recon.txt
curl -k --max-time 30 http://"$domain"/robots.txt >> recon.txt
echo >> recon.txt
echo "[ROBOTS 443] ----------------------------------------------------------" | tee -a recon.txt
curl -k --max-time 30 https://"$domain"/robots.txt >> recon.txt
echo >> recon.txt
}
check_waf() {
### 5.WAFW00F
anchor_check "${FUNCNAME[0]}" || return 0
echo "[WAFW00F] -------------------------------------------------------------" | tee -a recon.txt
wafw00f "$domain" | tail -n +16 | tee -a recon.txt
}
whatweb_enum() {
### 6.WHATWEB
anchor_check "${FUNCNAME[0]}" || return 0
echo "[WHATWEB 80] ---------------------------------------------------------" | tee -a recon.txt
whatweb -a 3 "http://$domain" -H "$cookie" | tee -a recon.txt
echo "[WHATWEB 443] ---------------------------------------------------------" | tee -a recon.txt
whatweb -a 3 "https://$domain" -H "$cookie" | tee -a recon.txt
}
cmseek_enum() {
### 7.CMSEEK
anchor_check "${FUNCNAME[0]}" || return 0
echo "[CMSEEK] ---------------------------------------------------------" | tee -a recon.txt
python3 "$HOME"/tools/CMSeeK/cmseek.py -u "$domain" --follow-redirect -r --batch
cat "$HOME"/tools/CMSeeK/Result/"$domain"/cms.json | jq . >> recon.txt
rm -rf "$HOME"/tools/CMSeeK/Result/"$domain"/
}
nuclei_scan_domain(){
### 8.NUCLEI
anchor_check "${FUNCNAME[0]}" || return 0
git clone https://github.com/projectdiscovery/nuclei-templates.git > /dev/null
rm -rf nuclei-templates/token-spray
echo "[NUCLEI 80] ---------------------------------------------------------" | tee -a recon.txt
nuclei -c 200 -H "User-Agent: Mozilla/5.0 Windows NT 10.0 Win64 AppleWebKit/537.36 Chrome/69.0.3497.100" -u http://"$domain" -t nuclei-templates/ -stats -silent | tee -a recon.txt
echo "[NUCLEI 443] ---------------------------------------------------------" | tee -a recon.txt
nuclei -c 200 -H "User-Agent: Mozilla/5.0 Windows NT 10.0 Win64 AppleWebKit/537.36 Chrome/69.0.3497.100" -u https://"$domain" -t nuclei-templates/ -stats -silent | tee -a recon.txt
}
nikto_enum_443() {
### 9.NIKTO
anchor_check "${FUNCNAME[0]}" || return 0
echo "[NIKTO 443] ---------------------------------------------------------" | tee -a recon.txt
"$HOME"/tools/nikto/program/nikto.pl -host https://"$domain" -useragent "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36${cr}${nl}$cookie" --maxtime 1500 | tee -a recon.txt
}
nikto_enum_80() {
### 10.NIKTO
anchor_check "${FUNCNAME[0]}" || return 0
echo "[NIKTO 80] ----------------------------------------------------------" | tee -a recon.txt
"$HOME"/tools/nikto/program/nikto.pl -host http://"$domain" -useragent "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36${cr}${nl}$cookie" --maxtime 1500 | tee -a recon.txt
}
gospider_enum() {
### 11.GOSPIDER
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [1]"
gospider -q -r -w -a --sitemap --depth 3 --robots -t 150 -c 10 -s https://"$domain" -H "$cookie" >> urls.txt
}
paramspider_enum() {
### 12.GOSPIDER
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [2]"
python3 "$HOME"/tools/ParamSpider/paramspider.py -d "$domain" --output ./paramspider.txt --level high > /dev/null 2>&1
cat paramspider.txt | grep http | sort -u | grep "$domain" >> urls.txt
cat ../paramspider.txt | grep http | sort -u | grep "$domain" | anew urls.txt > /dev/null
}
gau_enum() {
### 13.GAU
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [3]"
timeout 20m gau "$domain" >> urls.txt
}
waybackurls_enum() {
### 14.WAYBACKURLS
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [4]"
timeout 20m waybackurls "$domain" >> urls.txt
}
hakrawler_enum() {
### 15.HAKRAWLER
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [5]"
echo http://"$domain" | hakrawler -insecure -subs -u -h "$cookie" >> urls.txt
echo https://"$domain" | hakrawler -insecure -subs -u -h "$cookie" >> urls.txt
}
galer_enum_and_merging() {
### 16.GALER
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m SPIDER [6]"
galer -u http://"$domain" -s >> urls.txt
galer -u https://"$domain" -s >> urls.txt
### MERGE SPIDERS AND DELETE DUPLICATES >> urls.txt
echo -e "\033[0;31m [+]\033[0m MERGING SPIDERS RESULTS"
cat ../urls.txt | grep "$domain" | anew urls.txt 2>/dev/null
cat urls.txt | qsreplace -a > temp1.txt
mv temp1.txt urls.txt
### GET NEW ENDPOINTS FROM SPIDERS AND ADD THEM TO WORDLIST FOR DIRECOTRY BRUTEFORCING > custom_dir.txt
echo -e "\033[0;31m [+]\033[0m GATHERING NEW PATHS FROM SPIDERS RESULTS"
cat urls.txt | unfurl paths > temp1.txt
sort -u "$HOME"/tools/CRIMSON/words/dir > "$HOME"/tools/CRIMSON/words/custom_dir.txt
sort -u temp1.txt | anew "$HOME"/tools/CRIMSON/words/custom_dir.txt > /dev/null
rm temp1.txt
}
ffuf_bruteforce() {
### 17.FFUF
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m STARTING DIRECTORY BRUTEFORCING"
if [ $https_on == 1 ]
then
ffuf -noninteractive -H "User-Agent: Mozilla/5.0 Windows NT 10.0 Win64 AppleWebKit/537.36 Chrome/69.0.3497.100" -w "$HOME"/tools/CRIMSON/words/custom_dir.txt -t 200 -u https://"$domain"/FUZZ -mc all -fc 400 -H "$cookie" -o ffuf.json > /dev/null
else
ffuf -noninteractive -H "User-Agent: Mozilla/5.0 Windows NT 10.0 Win64 AppleWebKit/537.36 Chrome/69.0.3497.100" -w "$HOME"/tools/CRIMSON/words/custom_dir.txt -t 200 -u http://"$domain"/FUZZ -mc all -fc 400 -H "$cookie" -o ffuf.json > /dev/null
fi
cat ffuf.json | jq -c '.results[] | {url:.url,status: .status}' > status_ffuf.txt
rm ffuf.json
### REMOVE TRASH RESPONSES FROM PREVIOUS SCAN > ffuf.txt
echo -e "\033[0;31m [+]\033[0m REMOVING TRASH RESPONSES FROM status_ffuf.txt"
python3 "$HOME"/tools/CRIMSON/scripts/clever_ffuf.py
sort -u temp_ffuf.txt > ffuf.txt
rm temp_ffuf.txt
}
ferox_bruteforce() {
### 18.FEROX
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m STARTING DIRECTORY BRUTEFORCING"
if [ $https_on == 1 ]
then
feroxbuster --extract-links --no-recursion --redirects --wordlist "$HOME"/tools/CRIMSON/words/custom_dir.txt -o temp/ferox.txt -u "https://$domain" -H "$cookie" -k
else
feroxbuster --extract-links --no-recursion --redirects --wordlist "$HOME"/tools/CRIMSON/words/custom_dir.txt -o temp/ferox.txt -u "http://$domain" -H "$cookie"
fi
### MANUAL CHECK - DELETE TRASH FROM FEROXBUSTER OUTPUT
echo -e "\033[0;31m [+]\033[0m CHECK $HOME/bounty/$DOMAIN/$domain/temp/ferox.txt"
echo -e "\033[0;31m [+][+]\033[0m REMOVE FALSE-POSITIVES MANUALLY"
echo -e "\033[0;31m [+][+][+]\033[0m RE-RUN CRIMSON_TARGET"
echo "ferox_manual" > "$HOME/bounty/$DOMAIN/$domain/anchor.log"
exit 1
}
ferox_manual() {
# 19.CONTINUE THE EXECUTION...
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m FEROXBUSTER SUCCESSFULLY EDITED - CONTINUING THE SCRIPT EXECUTION"
### CONVERT THE OUTPUT TO PROPER FORMAT AND DELETE DUPLICATES
cat temp/ferox.txt | cut -s -d "h" -f 2-1000 | sed "s/^/h/" | qsreplace -a > ffuf.txt
}
code_gatherer() {
# 20.CURL
anchor_check "${FUNCNAME[0]}" || return 0
while IFS= read -r url
do echo -e "\033[0;31m [++]\033[0m GATHERING SOURCE CODE OF: $url"
if (( $(echo -n "$url" | sed "s/^https:\/\///" | sed "s/^http:\/\///" | sed "s/\//_/g" |wc -c) < 255 ))
then
curl -k -s -D - "$url" -H "$cookie" -A "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36" > all_source_code/$(echo "$url" | sed "s/^https:\/\///" | sed "s/^http:\/\///" | sed "s/\//_/g")
else
small_url=${url:0:220}
curl -k -s -D - "$url" -H "$cookie" -A "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36" > all_source_code/$(echo "$small_url" | sed "s/^https:\/\///" | sed "s/^http:\/\///" | sed "s/\//_/g")
fi
done < ffuf.txt
}
gahter_js_links() {
# 21.GETJS
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m GATHERING .js LINKS"
cat urls.txt ffuf.txt | getJS --insecure --complete --nocolors -H "$cookie" | grep "^http" | grep "$DOMAIN" | sed "s/\?.*//" | anew exp/jsfiles.txt
### GET LINKS TO JS FILES FROM urls.txt > exp/jsfiles.txt
cat urls.txt | grep "\.js$" | grep "^http" | grep "$DOMAIN" | sed "s/\?.*//" | anew exp/jsfiles.txt > /dev/null
}
check_js_live() {
# 22.HTTPX
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m CHECKING FOR LIVE .js LINKS"
httpx -silent -silent -threads 200 -H "User-Agent: Mozilla/5.0 Windows NT 10.0 Win64 AppleWebKit/537.36 Chrome/69.0.3497.100" -l exp/jsfiles.txt -H "$cookie" >> exp/temp_jsfiles.txt
mv exp/temp_jsfiles.txt exp/jsfiles.txt
}
gather_js_code(){
# 23.CURL
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m GATHERING SOURCE CODE FROM LIVE .js LINKS"
while IFS= read -r url
do echo -e "\033[0;31m [++]\033[0m GATHERING SOURCE CODE OF: $url"
if (( $(echo -n "$url" | sed "s/^https:\/\///" | sed "s/^http:\/\///" | sed "s/\//_/g" |wc -c) < 255 ))
then
curl -k -s "$url" -H "$cookie" -A "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36" | js-beautify > all_source_code/$(echo "$url" | sed "s/^https:\/\///" | sed "s/^http:\/\///" | sed "s/\//_/g") 2>/dev/null
else
small_url=${url:0:220}
curl -k -s "$url" -H "$cookie" -A "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36" | js-beautify > all_source_code/$(echo "$small_url" | sed "s/^https:\/\///" | sed "s/^http:\/\///" | sed "s/\//_/g") 2>/dev/null
fi
done < exp/jsfiles.txt
}
zile_enum() {
# 24.ZILE
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m GATHERING API KEYS && NEW PATHS"
cd all_source_code || exit
python3 "$HOME"/tools/CRIMSON/scripts/zile/zile.py --file >> ../temp/temp_zile.txt
cd ..
awk '!seen[$0]++' temp/temp_zile.txt | grep -v "[+]" > zile.txt 2>/dev/null
### GET ENDPOINTS FROM ZILE > extracted.txt
cat zile.txt | cut -s -d " " -f 2 | sed "s/^..//g" | sed "s/..$//g" | sed "s/\"//g" | unfurl path | sed "s/'$//" > temp/temp_zile_endpoints.txt
awk '!seen[$0]++' temp/temp_zile_endpoints.txt >> extracted.txt
}
mutate_urls_check_duplicates() {
# 25.UNIQ
anchor_check "${FUNCNAME[0]}" || return 0
while IFS= read -r line; do echo http://"$domain""$line" >> temp/temp_new.txt; done < extracted.txt
while IFS= read -r line; do echo https://"$domain""$line" >> temp/temp_new.txt; done < extracted.txt
rm extracted.txt
awk '!seen[$0]++' temp/temp_new.txt > temp/temp_new_endpoints.txt
sort ffuf.txt temp/temp_new_endpoints.txt | uniq -d > temp/temp_duplicates.txt
grep -v -x -f temp/temp_duplicates.txt temp/temp_new_endpoints.txt > new_endpoints.txt
}
check_new_endpoints_status() {
# 26.WFUZZ
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m CHECKING STATUS CODE OF NEW PATHS"
wfuzz -t 100 -f status_new_endpoints.txt,raw -L -Z -z file,new_endpoints.txt -z file,"$HOME"/tools/CRIMSON/words/blank -H "$cookie" FUZZFUZ2Z > /dev/null 2>&1
rm new_endpoints.txt
### REMOVE 400 && 404 RESPONSES
cat status_new_endpoints.txt | grep "http" | grep -E "C=400 |C=404 " -v | grep -v "Pycurl" | cut -s -d "\"" -f2 > filtered_new_endpoints.txt
### MERGE ffuf.txt WITH NEW ENDPOINTS >> ffuf.txt
echo -e "\033[0;31m [+]\033[0m ADDING LIVE PATHS TO ffuf.txt"
cat filtered_new_endpoints.txt | anew ffuf.txt > /dev/null
rm filtered_new_endpoints.txt
### ADD http://$domain AND https://$domain EVEN IF THEY ARE 404/400/X status code >> ffuf.txt
echo -e "http://$domain\nhttps://$domain" | anew ffuf.txt > /dev/null
}
proxy_directories_to_burp() {
# 27.WFUZZ
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m PROXING ALL DIRECTORIES && FILES TO BURP SUITE"
wfuzz -t 100 -f status_dir.txt,raw -L -Z -z file,ffuf.txt -z file,"$HOME"/tools/CRIMSON/words/blank -p host.docker.internal:8080 -H "$cookie" FUZZFUZ2Z > /dev/null 2>&1
}
proxy_urls_to_burp() {
# 28.WFUZZ
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m PROXING urls.txt TO BURP"
cat urls.txt | grep "?" > temp/temp_params.txt
wfuzz -f status_params.txt,raw -L -Z -z file,temp/temp_params.txt -z file,"$HOME"/tools/CRIMSON/words/blank -p host.docker.internal:8080 -H "$cookie" FUZZFUZ2Z > /dev/null 2>&1
}
check_directories_status(){
# 29.WFUZZ
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m CHECKING STATUS CODE OF (ffuf.txt) BRUTEFORCED DIRECTORIES"
wfuzz -t 100 -f status_dir.txt,raw -L -Z -z file,ffuf.txt -z file,"$HOME"/tools/CRIMSON/words/blank -H "$cookie" FUZZFUZ2Z > /dev/null 2>&1
}
check_urls_status(){
# 30.WFUZZ
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m CHECKING STATUS CODE OF (urls.txt) CRAWLED QUERIES"
cat urls.txt | grep "?" > temp/temp_params.txt
wfuzz -t 100 -f status_params.txt,raw -L -Z -z file,temp/temp_params.txt -z file,"$HOME"/tools/CRIMSON/words/blank -H "$cookie" FUZZFUZ2Z > /dev/null 2>&1
}
preapre_params_and_files() {
# 31.QSREPLACE
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m PREPARING FILES FOR crimson_exploit MODULE"
#cat status_params.txt | grep -v "C=404 " | grep http | grep -v "Pycurl" | cut -s -d "\"" -f2 | sort -u | qsreplace -a > exp/params.txt
cat urls.txt | unfurl keys | sort -u > exp/params.txt
cat status_dir.txt | grep -v "C=400\|C=429\|C=404" | grep http | cut -s -d "\"" -f2 | grep -v -e "Pycurl\|\.woff\|\.svg\|\.png\|\.gif\|\.jpg\|\.png\|\.css\|\.mp3\|\.mp4\|\.js" | sed "s/\/$//g" | sort -u > files.txt
cat files.txt > exp/dirs.txt
}
check_for_backups() {
# 32.CRIMSON_BACKUPER
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m CHECKING EXISTANCE OF BRUTEFORCED FILES BACKUPS"
#python2 "$HOME"/tools/CRIMSON/scripts/crimson_backuper.py -w files.txt -e "$HOME"/tools/CRIMSON/words/BCK_EXT -c "$cookie" -o backups.txt
ffuf -noninteractive -H 'User-Agent: Mozilla/5.0 Windows NT 10.0 Win64 AppleWebKit/537.36 Chrome/69.0.3497.100' -t 300 -mc 200 -noninteractive -u FUZZ1FUZZ2 -w files.txt:FUZZ1 -w "$HOME"/tools/CRIMSON/words/BCK_EXT:FUZZ2 -s > backups.txt
}
check_for_cors() {
# 33.CORSME
anchor_check "${FUNCNAME[0]}" || return 0
echo "[CORS] ---------------------------------------------------------" | tee -a recon.txt
cat ffuf.txt | CorsMe -t 50 -header "$cookie" -output corsme.txt > /dev/null 2>&1
cat corsme.txt >> recon.txt
rm corsme.txt
rm error_requests.txt
}
dig_for_secrets() {
# 34.DETECT-SECRETS
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m CHECKING SECRETS IN GATHERED SOURCE CODE"
grep -EHirn "accesskey|admin|aes|api_key|apikey|checkClientTrusted|crypt|password|pinning|secret|SHA256|SharedPreferences|superuser|token|X509TrustManager|google_api|google_api|google_captcha|google_oauth|amazon_aws_access_key_id|amazon_mws_auth_toke|amazon_aws_url|facebook_access_token|authorization_basic|authorization_bearer|authorization_api|mailgun_api_key|twilio_api_key|twilio_account_sid|twilio_app_sid|paypal_braintree_access_token|square_oauth_secret|square_access_token|stripe_standard_api|stripe_restricted_api|github_access_tokenttp" all_source_code/ --color=always > apikeys.txt
detect-secrets scan all_source_code/ --all-files > detect-secrets.txt
}
arjun_enum() {
# 35.ARJUN
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m STARTING ARJUN ON exp/files.txt - IT WILL TAKE A WHILE..."
arjun -i files.txt -oT arjun.txt -q --headers "$cookie"
}
vhost_brute_http() {
# 36.FFUF
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m BRUTEFORCING VIRTUAL HOSTNAMES "
cat "$HOME"/tools/CRIMSON/words/vhosts.txt | sed "s/TARGET_DOMAIN/$domain/" > vhosts.txt
cat ../subdomains.txt >> vhosts.txt
echo "[VHOSTS 80] ---------------------------------------------------------" | tee -a recon.txt
glength=$(curl -s -H "Host: \"$domain\"" http://\"$domain\" |wc -c)
blength=$(curl -s -H "Host: badhost" http://\"$domain\" |wc -c)
ffuf -noninteractive -H 'User-Agent: Mozilla/5.0 Windows NT 10.0 Win64 AppleWebKit/537.36 Chrome/69.0.3497.100' -t 200 -w vhosts.txt -u http://"$domain" -H "Host: FUZZ" -fs "$glength","$blength" -s | tee -a recon.txt
}
https_vhost_brute() {
# 37.FFUF
anchor_check "${FUNCNAME[0]}" || return 0
echo "[VHOSTS 443] ---------------------------------------------------------" | tee -a recon.txt
glength=$(curl -k -s -H "Host: \"$domain\"" https://\"$domain\" |wc -c)
blength=$(curl -k -s -H "Host: badhost" https://\"$domain\" |wc -c)
ffuf -noninteractive -H 'User-Agent: Mozilla/5.0 Windows NT 10.0 Win64 AppleWebKit/537.36 Chrome/69.0.3497.100' -t 200 -w vhosts.txt -u https://"$domain" -H "Host: FUZZ" -fs "$glength","$blength" -s | tee -a recon.txt
}
https_hop_by_hop() {
# 38.hop-by-hop/hbh-header-abuse-test.py
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m TESTING HOP-BY-HOP DELETION"
echo "[HBH 443] ---------------------------------------------------------" | tee -a recon.txt
cat "$HOME"/tools/CRIMSON/words/exp/hbh-headers | while IFS= read -r HEADER; do python2 "$HOME"/tools/hop-by-hop/hbh-header-abuse-test.py -u "https://$domain" -x "$HEADER"; sleep 1; done | tee -a recon.txt
}
hop_by_hop_http() {
# 39.hop-by-hop/hbh-header-abuse-test.py
anchor_check "${FUNCNAME[0]}" || return 0
echo -e "\033[0;31m [+]\033[0m TESTING HOP-BY-HOP DELETION"
echo "[HBH 80] ---------------------------------------------------------" | tee -a recon.txt
cat "$HOME"/tools/CRIMSON/words/exp/hbh-headers | while IFS= read -r HEADER; do python2 "$HOME"/tools/hop-by-hop/hbh-header-abuse-test.py -u "http://$domain" -x "$HEADER"; sleep 1; done | tee -a recon.txt
}
to_discord() {
notify -bulk recon.txt
}
#---
### MAIN()
echo -e "\033[0;31m
██████╗██████╗ ██╗███╗ ███╗███████╗ ██████╗ ███╗ ██╗ ████████╗ █████╗ ██████╗ ██████╗ ███████╗████████╗
██╔════╝██╔══██╗██║████╗ ████║██╔════╝██╔═══██╗████╗ ██║ ╚══██╔══╝██╔══██╗██╔══██╗██╔════╝ ██╔════╝╚══██╔══╝
██║ ██████╔╝██║██╔████╔██║███████╗██║ ██║██╔██╗ ██║ ██║ ███████║██████╔╝██║ ███╗█████╗ ██║
██║ ██╔══██╗██║██║╚██╔╝██║╚════██║██║ ██║██║╚██╗██║ ██║ ██╔══██║██╔══██╗██║ ██║██╔══╝ ██║
╚██████╗██║ ██║██║██║ ╚═╝ ██║███████║╚██████╔╝██║ ╚████║███████╗██║ ██║ ██║██║ ██║╚██████╔╝███████╗ ██║
╚═════╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═╝ ╚═══╝╚══════╝╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝ ╚═════╝ ╚══════╝ ╚═╝
\033[0m"
rust_on=0
fully_automated=0
proxy_on=0
udp_on=0
arjun_on=0
vhost_on=0
while getopts "c:vD:puayb" OPTION; do
case $OPTION in
c)
cookie=$OPTARG
;;
v)
vhost_on=1
;;
D)
domain=$OPTARG
;;
p)
rust_on=1
;;
u)
udp_on=1
;;
a)
fully_automated=1
;;
y)
proxy_on=1
;;
b)
arjun_on=1
;;
*)
echo "Incorrect options provided"
exit 1
;;
esac
done
if [ -z "$domain" ]
then
echo "./crimson_target -D \"example.domain.com\"
# Optional flags are shown below:
-c \"Cookie: auth1=123;\"
-v # Virtual host discovering
-p # TCP (1-65535) ports scanning
-u # UDP (nmap default) ports scanning
-a # Without this flag, you have to manually check for false-positives after bruteforcing
-y # Proxy urls.txt and ffuf.txt to Burp (host.docker.internal:8080)
-b # Parameter bruteforcing with Arjun"
exit 1
else
### PREPARE DIRECTORIES AND VARIABLES
echo -e "\033[0;31m [+]\033[0m PREPARING DIRECTORIES AND VARIABLES"
export domain
DOMAIN=$(tldextract "$domain" | cut -s -d " " -f 2-3 | sed "s/\ /\./")
export DOMAIN
TARGET=$(dig +short "$domain" | tr "\n" ",")
export TARGET
mkdir "$HOME"/bounty/"$DOMAIN"/"$domain"/temp -p 2>/dev/null
mkdir "$HOME"/bounty/"$DOMAIN"/"$domain"/exp 2>/dev/null
mkdir "$HOME"/bounty/"$DOMAIN"/"$domain"/all_source_code 2>/dev/null
cd "$HOME"/bounty/"$DOMAIN"/"$domain" || exit
if [ -z "$cookie" ]
then
export cookie="Cookie: a=1;";
else
export cookie=$cookie;
fi
### CHECK IF THERE IS SSL
if wget --tries=3 --spider --no-check-certificate https://"$domain" 2>/dev/null; then
https_on=1
else
https_on=0
fi
echo "----------------------------------------------------------------" >> recon.txt
echo "SCANNING START: $(date +'[%m-%d %H:%M:%S]')" >> recon.txt
echo "----------------------------------------------------------------" >> recon.txt
### --- PORT SCANNING SECTION --- ### -p
if [ $rust_on == 1 ]
then
### RESOLVE IP AND SCAN OPENED TCP PORTS > recon.txt
tcp_scanning
fi
if [ $udp_on == 1 ]
then
### RESOLVE IP AND SCAN OPENED UDP PORTS > recon.txt
udp_scanning
fi
### GET THE CONTENT OF SITEMAP IF EXISTS >> recon.txt2
get_the_sitemap
### GET THE CONTENT OF ROBOTS IF EXISTS >> recon.txt
get_the_robots
### CHECKING WAF >> recon.txt
check_waf
### IDENTIFY TECHNOLOGY >> recon.txt
whatweb_enum
### CMS SCAN >> recon.txt
cmseek_enum
### DOWNLOAD COMMUNITY-CURATED TEMPLATES & RUN THE AGAINST DOMAIN
nuclei_scan_domain
### PREPARE COOKIE HEADER FO NIKTO ENUM
nl=$'\n'
cr=$'\r'
### PERFORM NIKTO VULNERABILITY SCAN ON BOTH 443 / 80 PORT
nikto_enum_443
nikto_enum_80
### SPIDER 1 > urls.txt
gospider_enum
### SPIDER 2 >> urls.txt
paramspider_enum
### SPIDER 3 >> urls.txt
gau_enum
### SPIDER 4 >> urls.txt
waybackurls_enum
### SPIDER 5 >> urls.txt
hakrawler_enum
### SPIDER 6 >> urls.txt
galer_enum_and_merging
### --- AUTOMATED SECTION --- ### -a
if [ $fully_automated == 1 ]
then
### DIRECTORY BRUTEFORCING USING FFUF > status_ffuf.txt
ffuf_bruteforce
else
### DIRECTORY BRUTEFORCING USING FEROXBUSTER (output is called ffuf.txt for comatibility reasons)
ffuf_bruteforce
#ferox_bruteforce
#ferox_manual
fi
### GATHER ALL SOURCE CODE FROM ffuf.txt AND STORE IT IN DIRECTORY > all_source_code/
code_gatherer
### GET LINKS TO JS FILES AND PREPARE IT FOR EXPLOIT MODULE > exp/jsfiles.txt
gahter_js_links
### CHECK FOR LIVE JS LINKS
check_js_live
### GATHER SOURCE CODE FROM JS LINKS AND STORE IT IN >> all_source_code/
gather_js_code
### DIG API KEYS / ENDPOINTS ETC. > zile.txt
zile_enum
### MUTATE URLS && CHECK IF THERE ARE NO DUPLICATES WITH ffuf.txt > new_endpoints.txt
mutate_urls_check_duplicates
### CHECK STATUS OF NEW URLS > status_new_endpoints.txt
check_new_endpoints_status
### --- PROXY SECTION --- ### -y
if [ $proxy_on == 1 ]
then
### PROXY ALL BRUTEFORCED FILES AND DIRECTORIES TO BURP > status_dir.txt
proxy_directories_to_burp
### CHECK STATUS OF URLS WITH QUERIES && PROXY TO BURP > status_params.txt
proxy_urls_to_burp
else
### IF THERE IS NO PROXY FLAG - JUST CHECK THE STATUS CODE AND SAVE THE RESULTS FOR FURTHER PROCESSING
check_directories_status
check_urls_status
fi
### EXTRACT UNIQUE QUERIES > exp/params.txt && PREPARE FILES WORDLIST FOR BACKUPER AND ARJUN &AND DIRECTORIES FOR EXPLOIT MODULE (filtering static content) > exp/files.txt && exp/dirs.txt
preapre_params_and_files
### CHECK FOR BACKUP FILES > backups.txt
check_for_backups
### CORS MISCONFIGURATION SCAN >> recon.txt
check_for_cors
### DIG SECRETS FROM all_source_code/ SAVE COLORED OUTPUT > apikeys.txt
dig_for_secrets
if [ $arjun_on == 1 ]
then
### GET PARAMETER NAMES BY BRUTEFORCING THEM > exp/arjun.txt
arjun_enum
fi
### --- VHOST SECTION --- ### -v
if [ $vhost_on == 1 ]
then
### VHOST BRUTEFORCING = vhost.txt
vhost_brute_http
https_vhost_brute
fi
### CLEAR LOGS && TRASH
clear_log
### MARK THE END
echo "----------------------------------------------------------------"
echo "SCANNING END: $(date +'[%m-%d %H:%M:%S]')" >> recon.txt
echo "----------------------------------------------------------------"
### PRINT THE RESULTS
echo -e "\033[0;31m [+]\033[0m 1. recon.txt :"
cat recon.txt
#echo -e "\033[0;31m [+]\033[0m 2. zile.txt :"
# cat zile.txt | cut -s -d " " -f 2 | sed "s/^..//g" | sed "s/..$//g" | sort -u | sed "s/^.//" | sed "s/.$//"
echo -e "\033[0;31m [+]\033[0m 3. CHECK :"
echo " - status_dir.txt"
echo " - status_params.txt"
echo " - arjun.txt"
echo " - backups.txt"
echo " - apikeys.txt"
echo " - detect-secrets.txt"
echo " - use pywhat && ciphey for 'strange' findings"
echo " - robots.txt and sitemap.xml at : https://httpstatus.io/"
echo " - Use trufflehog if there is a git repository"
fi