From f93a27fd6f5f174b49da105bf7bc1b526884a974 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Wed, 5 Apr 2023 11:19:44 +0200
Subject: [PATCH 01/20] fix files, filenames and others
---
README.md | 10 +-
cmd/cariddi/main.go | 4 +-
pkg/crawler/colly.go | 54 ++++-
pkg/crawler/scan.go | 194 +++++++++++++++++
pkg/crawler/utils.go | 235 +++------------------
pkg/input/flags.go | 4 +-
pkg/output/{beautify.go => banner.go} | 4 +-
pkg/output/examples.go | 12 +-
pkg/output/help.go | 2 +-
pkg/output/{json.go => jsonl.go} | 26 +--
pkg/output/{json_test.go => jsonl_test.go} | 0
11 files changed, 301 insertions(+), 244 deletions(-)
create mode 100644 pkg/crawler/scan.go
rename pkg/output/{beautify.go => banner.go} (96%)
rename pkg/output/{json.go => jsonl.go} (85%)
rename pkg/output/{json_test.go => jsonl_test.go} (100%)
diff --git a/README.md b/README.md
index 1df7c54..005347a 100644
--- a/README.md
+++ b/README.md
@@ -107,9 +107,7 @@ You need [Go](https://golang.org/).
Get Started 🎉
----------
-`cariddi -h` prints the help in the command line.
-
-*Note*: Don't rely on the CLI output, use always `-ot/-oh` to save the output.
+`cariddi -h` prints the help.
```
Usage of cariddi:
@@ -177,7 +175,7 @@ Examples 💡
- `cat urls | cariddi -d 2` (2 seconds between a page crawled and another)
- `cat urls | cariddi -c 200` (Set the concurrency level to 200)
- `cat urls | cariddi -e` (Hunt for juicy endpoints)
- - `cat urls | cariddi -plain` (Print only useful things)
+ - `cat urls | cariddi -plain` (Print only results)
- `cat urls | cariddi -ot target_name` (Results in txt file)
- `cat urls | cariddi -oh target_name` (Results in html file)
- `cat urls | cariddi -ext 2` (Hunt for juicy (level 2 out of 7) files)
@@ -189,7 +187,7 @@ Examples 💡
- `cat urls | cariddi -t 5` (Set the timeout for the requests)
- `cat urls | cariddi -intensive` (Crawl searching also subdomains, same as `*.target.com`)
- `cat urls | cariddi -rua` (Use a random browser user agent on every request)
- - `cat urls | cariddi -proxy http://127.0.0.1:8080` (Set a Proxy (http and socks5 supported))
+ - `cat urls | cariddi -proxy http://127.0.0.1:8080` (Set a Proxy, http and socks5 supported)
- `cat urls | cariddi -headers "Cookie: auth=admin;type=2;; X-Custom: customHeader"`
- `cat urls | cariddi -headersfile headers.txt` (Read from an external file custom headers)
- `cat urls | cariddi -err` (Hunt for errors in websites)
@@ -222,7 +220,7 @@ If there aren't errors, go ahead :)
**Help me building this!**
-Special thanks to: [go-colly](http://go-colly.org/), [zricethezav](https://github.com/zricethezav/gitleaks/blob/master/config/default.go), [projectdiscovery](https://github.com/projectdiscovery/nuclei-templates/tree/master/file/keys), [tomnomnom](https://github.com/tomnomnom/gf/tree/master/examples) and [RegexPassive](https://github.com/hahwul/RegexPassive).
+Special thanks to: [go-colly](http://go-colly.org/), [zricethezav](https://github.com/zricethezav/gitleaks/blob/master/config/default.go), [projectdiscovery](https://github.com/projectdiscovery/nuclei-templates/tree/master/file/keys), [tomnomnom](https://github.com/tomnomnom/gf/tree/master/examples), [RegexPassive](https://github.com/hahwul/RegexPassive) and the contributors.
**To do:**
diff --git a/cmd/cariddi/main.go b/cmd/cariddi/main.go
index 1555076..23f9c31 100644
--- a/cmd/cariddi/main.go
+++ b/cmd/cariddi/main.go
@@ -44,7 +44,7 @@ func main() {
// Print version and exit.
if flags.Version {
- output.Beautify()
+ output.Banner()
os.Exit(0)
}
@@ -62,7 +62,7 @@ func main() {
// If it's possible print the cariddi banner.
if !flags.Plain {
- output.Beautify()
+ output.Banner()
}
// Setup the config according to the flags that were
diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go
index 0d3f472..a21b920 100644
--- a/pkg/crawler/colly.go
+++ b/pkg/crawler/colly.go
@@ -35,6 +35,7 @@ import (
"net/http"
"os"
"os/signal"
+ "strings"
"time"
fileUtils "github.com/edoardottt/cariddi/internal/file"
@@ -88,6 +89,18 @@ type Scan struct {
EndpointsSlice []string
}
+type Event struct {
+ ProtocolTemp string
+ TargetTemp string
+ Target string
+ Intensive bool
+ Ignore bool
+ Debug bool
+ JSON bool
+ IgnoreSlice []string
+ URLs *[]string
+}
+
// New it's the actual crawler engine.
// It controls all the behaviours of a scan
// (event handlers, secrets, errors, extensions and endpoints scanning).
@@ -151,7 +164,7 @@ func New(scan *Scan) *Results {
Intensive: scan.Intensive,
Ignore: ignoreBool,
Debug: scan.Debug,
- JSON: scan.JSON,
+ JSON: scan.JSON,
IgnoreSlice: ignoreSlice,
URLs: &results.URLs,
}
@@ -363,7 +376,7 @@ func CreateColly(delayTime int, concurrency int, cache bool, timeout int,
func registerHTMLEvents(c *colly.Collector, event *Event) {
// On every request that Colly is making, print the URL it's currently visiting
c.OnRequest(func(e *colly.Request) {
- if (!event.JSON){
+ if !event.JSON {
fmt.Println(e.URL.String())
}
})
@@ -435,3 +448,40 @@ func registerXMLEvents(c *colly.Collector, event *Event) {
visitXMLLink(e.Text, event, e, c)
})
}
+
+// visitHTMLLink checks if the collector should visit a link or not.
+func visitHTMLLink(link string, event *Event, e *colly.HTMLElement, c *colly.Collector) {
+ if len(link) != 0 && !strings.HasPrefix(link, "data:image") {
+ absoluteURL := urlUtils.AbsoluteURL(event.ProtocolTemp, event.TargetTemp, e.Request.AbsoluteURL(link))
+ // Visit link found on page
+ // Only those links are visited which are in AllowedDomains
+ visitLink(event, c, absoluteURL)
+ }
+}
+
+// visitXMLLink checks if the collector should visit a link or not.
+func visitXMLLink(link string, event *Event, e *colly.XMLElement, c *colly.Collector) {
+ if len(link) != 0 && !strings.HasPrefix(link, "data:image") {
+ absoluteURL := urlUtils.AbsoluteURL(event.ProtocolTemp, event.TargetTemp, e.Request.AbsoluteURL(link))
+ // Visit link found on page
+ // Only those links are visited which are in AllowedDomains
+ visitLink(event, c, absoluteURL)
+ }
+}
+
+// visitLink is a protocol agnostic wrapper to visit a link.
+func visitLink(event *Event, c *colly.Collector, absoluteURL string) {
+ if (!event.Intensive && urlUtils.SameDomain(event.ProtocolTemp+"://"+event.Target, absoluteURL)) ||
+ (event.Intensive && intensiveOk(event.TargetTemp, absoluteURL, event.Debug)) {
+ if !event.Ignore || (event.Ignore && !IgnoreMatch(absoluteURL, &event.IgnoreSlice)) {
+ err := c.Visit(absoluteURL)
+ if !errors.Is(err, colly.ErrAlreadyVisited) {
+ *event.URLs = append(*event.URLs, absoluteURL)
+
+ if err != nil && event.Debug {
+ log.Println(err)
+ }
+ }
+ }
+ }
+}
diff --git a/pkg/crawler/scan.go b/pkg/crawler/scan.go
new file mode 100644
index 0000000..8947c98
--- /dev/null
+++ b/pkg/crawler/scan.go
@@ -0,0 +1,194 @@
+/*
+==========
+Cariddi
+==========
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program. If not, see http://www.gnu.org/licenses/.
+
+ @Repository: https://github.com/edoardottt/cariddi
+
+ @Author: edoardottt, https://www.edoardoottavianelli.it
+
+ @License: https://github.com/edoardottt/cariddi/blob/main/LICENSE
+
+*/
+
+package crawler
+
+import (
+ "regexp"
+ "strings"
+
+ urlUtils "github.com/edoardottt/cariddi/internal/url"
+ "github.com/edoardottt/cariddi/pkg/scanner"
+)
+
+// huntSecrets hunts for secrets.
+func huntSecrets(target, body string, secretsFile *[]string) []scanner.SecretMatched {
+ secrets := SecretsMatch(target, body, secretsFile)
+ return secrets
+}
+
+// SecretsMatch checks if a body matches some secrets.
+func SecretsMatch(url, body string, secretsFile *[]string) []scanner.SecretMatched {
+ var secrets []scanner.SecretMatched
+
+ if len(*secretsFile) == 0 {
+ for _, secret := range scanner.GetSecretRegexes() {
+ if matched, err := regexp.Match(secret.Regex, []byte(body)); err == nil && matched {
+ re := regexp.MustCompile(secret.Regex)
+ matches := re.FindAllStringSubmatch(body, -1)
+
+ // Avoiding false positives
+ var isFalsePositive = false
+
+ for _, match := range matches {
+ for _, falsePositive := range secret.FalsePositives {
+ if strings.Contains(strings.ToLower(match[0]), falsePositive) {
+ isFalsePositive = true
+ break
+ }
+ }
+
+ if !isFalsePositive {
+ secretFound := scanner.SecretMatched{Secret: secret, URL: url, Match: match[0]}
+ secrets = append(secrets, secretFound)
+ }
+ }
+ }
+ }
+ } else {
+ for _, secret := range *secretsFile {
+ if matched, err := regexp.Match(secret, []byte(body)); err == nil && matched {
+ re := regexp.MustCompile(secret)
+ matches := re.FindAllStringSubmatch(body, -1)
+ for _, match := range matches {
+ secretScanned := scanner.Secret{Name: "CustomFromFile", Description: "", Regex: secret, Poc: ""}
+ secretFound := scanner.SecretMatched{Secret: secretScanned, URL: url, Match: match[0]}
+ secrets = append(secrets, secretFound)
+ }
+ }
+ }
+ }
+
+ return scanner.RemoveDuplicateSecrets(secrets)
+}
+
+// huntEndpoints hunts for juicy endpoints.
+func huntEndpoints(target string, endpointsFile *[]string) []scanner.EndpointMatched {
+ endpoints := EndpointsMatch(target, endpointsFile)
+ return endpoints
+}
+
+// EndpointsMatch check if an endpoint matches a juicy parameter.
+func EndpointsMatch(target string, endpointsFile *[]string) []scanner.EndpointMatched {
+ endpoints := []scanner.EndpointMatched{}
+ matched := []scanner.Parameter{}
+ parameters := urlUtils.RetrieveParameters(target)
+
+ if len(*endpointsFile) == 0 {
+ for _, parameter := range scanner.GetJuicyParameters() {
+ for _, param := range parameters {
+ if strings.ToLower(param) == parameter.Parameter {
+ matched = append(matched, parameter)
+ }
+ }
+ }
+ endpoints = append(endpoints, scanner.EndpointMatched{Parameters: matched, URL: target})
+ } else {
+ for _, parameter := range *endpointsFile {
+ for _, param := range parameters {
+ if param == parameter {
+ matched = append(matched, scanner.Parameter{Parameter: parameter, Attacks: []string{}})
+ }
+ }
+ }
+ endpoints = append(endpoints, scanner.EndpointMatched{Parameters: matched, URL: target})
+ }
+
+ return endpoints
+}
+
+// huntExtensions hunts for extensions.
+func huntExtensions(target string, severity int) scanner.FileTypeMatched {
+ extension := scanner.FileTypeMatched{}
+ copyTarget := target
+
+ for _, ext := range scanner.GetExtensions() {
+ if ext.Severity <= severity {
+ firstIndex := strings.Index(target, "?")
+ if firstIndex > -1 {
+ target = target[:firstIndex]
+ }
+
+ if strings.ToLower(target[len(target)-len("."+ext.Extension):]) == "."+ext.Extension {
+ extension = scanner.FileTypeMatched{Filetype: ext, URL: copyTarget}
+ }
+ }
+ }
+
+ return extension
+}
+
+// huntErrors hunts for errors.
+func huntErrors(target, body string) []scanner.ErrorMatched {
+ errorsSlice := ErrorsMatch(target, body)
+ return errorsSlice
+}
+
+// ErrorsMatch checks the patterns for errors.
+func ErrorsMatch(url, body string) []scanner.ErrorMatched {
+ errors := []scanner.ErrorMatched{}
+
+ for _, errorItem := range scanner.GetErrorRegexes() {
+ for _, errorRegex := range errorItem.Regex {
+ if matched, err := regexp.Match(errorRegex, []byte(body)); err == nil && matched {
+ re := regexp.MustCompile(errorRegex)
+ matches := re.FindAllStringSubmatch(body, -1)
+
+ for _, match := range matches {
+ errorFound := scanner.ErrorMatched{Error: errorItem, URL: url, Match: match[0]}
+ errors = append(errors, errorFound)
+ }
+ }
+ }
+ }
+
+ return scanner.RemoveDuplicateErrors(errors)
+}
+
+// huntInfos hunts for infos.
+func huntInfos(target, body string) []scanner.InfoMatched {
+ infosSlice := InfoMatch(target, body)
+ return infosSlice
+}
+
+// InfoMatch checks the patterns for infos.
+func InfoMatch(url, body string) []scanner.InfoMatched {
+ infos := []scanner.InfoMatched{}
+
+ for _, infoItem := range scanner.GetInfoRegexes() {
+ if matched, err := regexp.Match(infoItem.Regex, []byte(body)); err == nil && matched {
+ re := regexp.MustCompile(infoItem.Regex)
+ matches := re.FindAllStringSubmatch(body, -1)
+
+ for _, match := range matches {
+ infoFound := scanner.InfoMatched{Info: infoItem, URL: url, Match: match[0]}
+ infos = append(infos, infoFound)
+ }
+ }
+ }
+
+ return scanner.RemoveDuplicateInfos(infos)
+}
diff --git a/pkg/crawler/utils.go b/pkg/crawler/utils.go
index 3b79522..994c392 100644
--- a/pkg/crawler/utils.go
+++ b/pkg/crawler/utils.go
@@ -1,224 +1,37 @@
-package crawler
-
-import (
- "errors"
- "fmt"
- "log"
- "regexp"
- "strings"
-
- urlUtils "github.com/edoardottt/cariddi/internal/url"
- "github.com/edoardottt/cariddi/pkg/scanner"
- "github.com/gocolly/colly"
-)
-
-type Event struct {
- ProtocolTemp string
- TargetTemp string
- Target string
- Intensive bool
- Ignore bool
- Debug bool
- JSON bool
- IgnoreSlice []string
- URLs *[]string
-}
-
-// visitHTMLLink checks if the collector should visit a link or not.
-func visitHTMLLink(link string, event *Event, e *colly.HTMLElement, c *colly.Collector) {
- if len(link) != 0 && !strings.HasPrefix(link, "data:image") {
- absoluteURL := urlUtils.AbsoluteURL(event.ProtocolTemp, event.TargetTemp, e.Request.AbsoluteURL(link))
- // Visit link found on page
- // Only those links are visited which are in AllowedDomains
- visitLink(event, c, absoluteURL)
- }
-}
-
-// visitXMLLink checks if the collector should visit a link or not.
-func visitXMLLink(link string, event *Event, e *colly.XMLElement, c *colly.Collector) {
- if len(link) != 0 && !strings.HasPrefix(link, "data:image") {
- absoluteURL := urlUtils.AbsoluteURL(event.ProtocolTemp, event.TargetTemp, e.Request.AbsoluteURL(link))
- // Visit link found on page
- // Only those links are visited which are in AllowedDomains
- visitLink(event, c, absoluteURL)
- }
-}
+/*
+==========
+Cariddi
+==========
-// visitLink is a protocol agnostic wrapper to visit a link.
-func visitLink(event *Event, c *colly.Collector, absoluteURL string) {
- if (!event.Intensive && urlUtils.SameDomain(event.ProtocolTemp+"://"+event.Target, absoluteURL)) ||
- (event.Intensive && intensiveOk(event.TargetTemp, absoluteURL, event.Debug)) {
- if !event.Ignore || (event.Ignore && !IgnoreMatch(absoluteURL, &event.IgnoreSlice)) {
- err := c.Visit(absoluteURL)
- if !errors.Is(err, colly.ErrAlreadyVisited) {
- *event.URLs = append(*event.URLs, absoluteURL)
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
- if err != nil && event.Debug {
- log.Println(err)
- }
- }
- }
- }
-}
-
-// huntSecrets hunts for secrets.
-func huntSecrets(target, body string, secretsFile *[]string) []scanner.SecretMatched {
- secrets := SecretsMatch(target, body, secretsFile)
- return secrets
-}
-
-// SecretsMatch checks if a body matches some secrets.
-func SecretsMatch(url, body string, secretsFile *[]string) []scanner.SecretMatched {
- var secrets []scanner.SecretMatched
-
- if len(*secretsFile) == 0 {
- for _, secret := range scanner.GetSecretRegexes() {
- if matched, err := regexp.Match(secret.Regex, []byte(body)); err == nil && matched {
- re := regexp.MustCompile(secret.Regex)
- matches := re.FindAllStringSubmatch(body, -1)
-
- // Avoiding false positives
- var isFalsePositive = false
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
- for _, match := range matches {
- for _, falsePositive := range secret.FalsePositives {
- if strings.Contains(strings.ToLower(match[0]), falsePositive) {
- isFalsePositive = true
- break
- }
- }
+You should have received a copy of the GNU General Public License
+along with this program. If not, see http://www.gnu.org/licenses/.
- if !isFalsePositive {
- secretFound := scanner.SecretMatched{Secret: secret, URL: url, Match: match[0]}
- secrets = append(secrets, secretFound)
- }
- }
- }
- }
- } else {
- for _, secret := range *secretsFile {
- if matched, err := regexp.Match(secret, []byte(body)); err == nil && matched {
- re := regexp.MustCompile(secret)
- matches := re.FindAllStringSubmatch(body, -1)
- for _, match := range matches {
- secretScanned := scanner.Secret{Name: "CustomFromFile", Description: "", Regex: secret, Poc: ""}
- secretFound := scanner.SecretMatched{Secret: secretScanned, URL: url, Match: match[0]}
- secrets = append(secrets, secretFound)
- }
- }
- }
- }
+ @Repository: https://github.com/edoardottt/cariddi
- return scanner.RemoveDuplicateSecrets(secrets)
-}
+ @Author: edoardottt, https://www.edoardoottavianelli.it
-// huntEndpoints hunts for juicy endpoints.
-func huntEndpoints(target string, endpointsFile *[]string) []scanner.EndpointMatched {
- endpoints := EndpointsMatch(target, endpointsFile)
- return endpoints
-}
+ @License: https://github.com/edoardottt/cariddi/blob/main/LICENSE
-// EndpointsMatch check if an endpoint matches a juicy parameter.
-func EndpointsMatch(target string, endpointsFile *[]string) []scanner.EndpointMatched {
- endpoints := []scanner.EndpointMatched{}
- matched := []scanner.Parameter{}
- parameters := urlUtils.RetrieveParameters(target)
+*/
- if len(*endpointsFile) == 0 {
- for _, parameter := range scanner.GetJuicyParameters() {
- for _, param := range parameters {
- if strings.ToLower(param) == parameter.Parameter {
- matched = append(matched, parameter)
- }
- }
- }
- endpoints = append(endpoints, scanner.EndpointMatched{Parameters: matched, URL: target})
- } else {
- for _, parameter := range *endpointsFile {
- for _, param := range parameters {
- if param == parameter {
- matched = append(matched, scanner.Parameter{Parameter: parameter, Attacks: []string{}})
- }
- }
- }
- endpoints = append(endpoints, scanner.EndpointMatched{Parameters: matched, URL: target})
- }
-
- return endpoints
-}
-
-// huntExtensions hunts for extensions.
-func huntExtensions(target string, severity int) scanner.FileTypeMatched {
- extension := scanner.FileTypeMatched{}
- copyTarget := target
-
- for _, ext := range scanner.GetExtensions() {
- if ext.Severity <= severity {
- firstIndex := strings.Index(target, "?")
- if firstIndex > -1 {
- target = target[:firstIndex]
- }
-
- if strings.ToLower(target[len(target)-len("."+ext.Extension):]) == "."+ext.Extension {
- extension = scanner.FileTypeMatched{Filetype: ext, URL: copyTarget}
- }
- }
- }
-
- return extension
-}
-
-// huntErrors hunts for errors.
-func huntErrors(target, body string) []scanner.ErrorMatched {
- errorsSlice := ErrorsMatch(target, body)
- return errorsSlice
-}
-
-// ErrorsMatch checks the patterns for errors.
-func ErrorsMatch(url, body string) []scanner.ErrorMatched {
- errors := []scanner.ErrorMatched{}
-
- for _, errorItem := range scanner.GetErrorRegexes() {
- for _, errorRegex := range errorItem.Regex {
- if matched, err := regexp.Match(errorRegex, []byte(body)); err == nil && matched {
- re := regexp.MustCompile(errorRegex)
- matches := re.FindAllStringSubmatch(body, -1)
-
- for _, match := range matches {
- errorFound := scanner.ErrorMatched{Error: errorItem, URL: url, Match: match[0]}
- errors = append(errors, errorFound)
- }
- }
- }
- }
-
- return scanner.RemoveDuplicateErrors(errors)
-}
-
-// huntInfos hunts for infos.
-func huntInfos(target, body string) []scanner.InfoMatched {
- infosSlice := InfoMatch(target, body)
- return infosSlice
-}
-
-// InfoMatch checks the patterns for infos.
-func InfoMatch(url, body string) []scanner.InfoMatched {
- infos := []scanner.InfoMatched{}
-
- for _, infoItem := range scanner.GetInfoRegexes() {
- if matched, err := regexp.Match(infoItem.Regex, []byte(body)); err == nil && matched {
- re := regexp.MustCompile(infoItem.Regex)
- matches := re.FindAllStringSubmatch(body, -1)
+package crawler
- for _, match := range matches {
- infoFound := scanner.InfoMatched{Info: infoItem, URL: url, Match: match[0]}
- infos = append(infos, infoFound)
- }
- }
- }
+import (
+ "fmt"
+ "strings"
- return scanner.RemoveDuplicateInfos(infos)
-}
+ urlUtils "github.com/edoardottt/cariddi/internal/url"
+)
// RetrieveBody retrieves the body (in the response) of a url.
func RetrieveBody(target *string) string {
diff --git a/pkg/input/flags.go b/pkg/input/flags.go
index e6fef19..c3fcfb0 100644
--- a/pkg/input/flags.go
+++ b/pkg/input/flags.go
@@ -76,7 +76,7 @@ func ScanFlag() Input {
concurrencyPtr := flag.Int("c", DefaultConcurrency, "Concurrency level.")
helpPtr := flag.Bool("h", false, "Print the help.")
examplesPtr := flag.Bool("examples", false, "Print the examples.")
- plainPtr := flag.Bool("plain", false, "Print only the results.")
+ plainPtr := flag.Bool("plain", false, "Print only results.")
JSONPtr := flag.Bool("json", false, "Print the output as JSON in stdout.")
outputHTMLPtr := flag.String("oh", "", "Write the output into an HTML file.")
outputTXTPtr := flag.String("ot", "", "Write the output into a TXT file.")
@@ -86,7 +86,7 @@ func ScanFlag() Input {
timeoutPtr := flag.Int("t", TimeoutRequest, "Set timeout for the requests.")
intensivePtr := flag.Bool("intensive", false, "Crawl searching for resources matching 2nd level domain.")
ruaPtr := flag.Bool("rua", false, "Use a random browser user agent on every request.")
- proxyPtr := flag.String("proxy", "", "Set a Proxy to be used (http and socks5 supported).")
+ proxyPtr := flag.String("proxy", "", "Set a Proxy, http and socks5 supported.")
secretsPtr := flag.Bool("s", false, "Hunt for secrets.")
secretsFilePtr := flag.String("sf", "", "Use an external file (txt, one per line)"+
diff --git a/pkg/output/beautify.go b/pkg/output/banner.go
similarity index 96%
rename from pkg/output/beautify.go
rename to pkg/output/banner.go
index 45009a0..ae5e4d0 100644
--- a/pkg/output/beautify.go
+++ b/pkg/output/banner.go
@@ -44,8 +44,8 @@ const (
\___\__,_|_| |_|\__,_|\__,_|_| `
)
-// Beautify prints the banner + version.
-func Beautify() {
+// Banner prints the banner + version.
+func Banner() {
links := " > github.com/edoardottt/cariddi\n > edoardoottavianelli.it\n"
sepLine := "========================================\n"
diff --git a/pkg/output/examples.go b/pkg/output/examples.go
index fcbf6ca..279013e 100644
--- a/pkg/output/examples.go
+++ b/pkg/output/examples.go
@@ -30,7 +30,7 @@ import "fmt"
// PrintExamples prints some examples.
func PrintExamples() {
- Beautify()
+ Banner()
fmt.Println(`
cariddi -version (Print the version)
@@ -38,15 +38,15 @@ func PrintExamples() {
cariddi -examples (Print the examples)
- cat urls | cariddi -e (Hunt for secrets)
+ cat urls | cariddi -s (Hunt for secrets)
cat urls | cariddi -d 2 (2 seconds between a page crawled and another)
cat urls | cariddi -c 200 (Set the concurrency level to 200)
- cat urls | cariddi -s (Hunt for juicy endpoints)
+ cat urls | cariddi -e (Hunt for juicy endpoints)
- cat urls | cariddi -plain (Print only useful things)
+ cat urls | cariddi -plain (Print only results)
cat urls | cariddi -ot target_name (Results in txt file)
@@ -84,5 +84,7 @@ func PrintExamples() {
cat urls | cariddi -ua "Custom User Agent"
- cat urls | cariddi -insecure`)
+ cat urls | cariddi -json
+
+ cat urls | cariddi -json | jq .`)
}
diff --git a/pkg/output/help.go b/pkg/output/help.go
index 6877ddc..80b0a5b 100644
--- a/pkg/output/help.go
+++ b/pkg/output/help.go
@@ -30,7 +30,7 @@ import "fmt"
// PrintHelp prints the help.
func PrintHelp() {
- Beautify()
+ Banner()
fmt.Println(`Usage of cariddi:
-c int
Concurrency level. (default 20)
diff --git a/pkg/output/json.go b/pkg/output/jsonl.go
similarity index 85%
rename from pkg/output/json.go
rename to pkg/output/jsonl.go
index 7b874b7..7acdcc3 100644
--- a/pkg/output/json.go
+++ b/pkg/output/jsonl.go
@@ -36,13 +36,13 @@ import (
)
type JSONData struct {
- URL string `json:"url"`
- Method string `json:"method"`
- StatusCode int `json:"status_code"`
- Words int `json:"words"`
- Lines int `json:"lines"`
- ContentType string `json:"content_type,omitempty"`
- ContentLength int `json:"content_length,omitempty"`
+ URL string `json:"url"`
+ Method string `json:"method"`
+ StatusCode int `json:"status_code"`
+ Words int `json:"words"`
+ Lines int `json:"lines"`
+ ContentType string `json:"content_type,omitempty"`
+ ContentLength int `json:"content_length,omitempty"`
Matches *MatcherResults `json:"matches,omitempty"`
// Host string `json:"host"` # TODO: Available in Colly 2.x
}
@@ -141,19 +141,19 @@ func GetJSONString(
// Set empty data if no matches to bridge the omitempty gap for empty structs
var (
- isFileTypeNill = false
+ isFileTypeNill = false
isParametersEmpty = len(parameters) == 0
- isErrorsEmpty = len(errorList) == 0
- isInfoEmpty = len(infoList) == 0
- isSecretsEmpty = len(secretList) == 0
+ isErrorsEmpty = len(errorList) == 0
+ isInfoEmpty = len(infoList) == 0
+ isSecretsEmpty = len(secretList) == 0
)
- if (*filetype == scanner.FileType{}){
+ if (*filetype == scanner.FileType{}) {
matcherResults.FileType = nil
isFileTypeNill = true
}
- if (isFileTypeNill && isParametersEmpty && isErrorsEmpty && isInfoEmpty && isSecretsEmpty){
+ if isFileTypeNill && isParametersEmpty && isErrorsEmpty && isInfoEmpty && isSecretsEmpty {
resp.Matches = nil
}
diff --git a/pkg/output/json_test.go b/pkg/output/jsonl_test.go
similarity index 100%
rename from pkg/output/json_test.go
rename to pkg/output/jsonl_test.go
From 14b4efd2849debb8a142185551b2e3aa6b408c68 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Wed, 5 Apr 2023 11:22:55 +0200
Subject: [PATCH 02/20] update
---
.golangci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.golangci.yml b/.golangci.yml
index 2693711..6c36914 100644
--- a/.golangci.yml
+++ b/.golangci.yml
@@ -59,5 +59,5 @@ issues:
exclude-rules:
- path: crawler/colly.go
text: "should replace loop with `(FinalInfos|FinalErrors|FinalSecrets)"
- - path: pkg/output/beautify.go
+ - path: pkg/output/banner.go
text: "should be written without leading space as"
\ No newline at end of file
From eb49f7bcee1fec6a0f269c76bae17515daf2ab5b Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Wed, 5 Apr 2023 11:33:44 +0200
Subject: [PATCH 03/20] update
---
README.md | 11 +----------
1 file changed, 1 insertion(+), 10 deletions(-)
diff --git a/README.md b/README.md
index 005347a..e487455 100644
--- a/README.md
+++ b/README.md
@@ -33,10 +33,6 @@
-
-
-
-
@@ -220,13 +216,8 @@ If there aren't errors, go ahead :)
**Help me building this!**
-Special thanks to: [go-colly](http://go-colly.org/), [zricethezav](https://github.com/zricethezav/gitleaks/blob/master/config/default.go), [projectdiscovery](https://github.com/projectdiscovery/nuclei-templates/tree/master/file/keys), [tomnomnom](https://github.com/tomnomnom/gf/tree/master/examples), [RegexPassive](https://github.com/hahwul/RegexPassive) and the contributors.
+Special thanks to: [go-colly](http://go-colly.org/), [zricethezav](https://github.com/zricethezav/gitleaks/blob/master/config/default.go), [projectdiscovery](https://github.com/projectdiscovery/nuclei-templates/tree/master/file/keys), [tomnomnom](https://github.com/tomnomnom/gf/tree/master/examples), [RegexPassive](https://github.com/hahwul/RegexPassive) and [all the contributors](https://github.com/edoardottt/cariddi/wiki/Contributors).
-**To do:**
-
- - [ ] Add more Tests
-
- - [ ] Tor support
License 📝
-------
From c931485b081b2b39a3934083dbeb8a42cd4ee05e Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Wed, 5 Apr 2023 11:43:03 +0200
Subject: [PATCH 04/20] update
---
Makefile | 8 ++++----
go.mod | 4 ++--
go.sum | 7 ++++---
make.bat | 13 ++++++-------
4 files changed, 16 insertions(+), 16 deletions(-)
diff --git a/Makefile b/Makefile
index b06dbcd..f1b8a6e 100644
--- a/Makefile
+++ b/Makefile
@@ -1,8 +1,8 @@
REPO=github.com/edoardottt/cariddi
-fmt:
- @gofmt -s ./*
- @echo "Done."
+tidy:
+ @go get -u ./...
+ @go mod tidy -v
remod:
@rm -rf go.*
@@ -11,7 +11,7 @@ remod:
@echo "Done."
update:
- @go get -u
+ @go get -u ./...
@go mod tidy -v
@make unlinux
@git pull
diff --git a/go.mod b/go.mod
index 373ee40..e90408c 100644
--- a/go.mod
+++ b/go.mod
@@ -18,11 +18,11 @@ require (
github.com/golang/protobuf v1.5.3 // indirect
github.com/kennygrant/sanitize v1.2.4 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
- github.com/mattn/go-isatty v0.0.17 // indirect
+ github.com/mattn/go-isatty v0.0.18 // indirect
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d // indirect
github.com/temoto/robotstxt v1.1.2 // indirect
golang.org/x/net v0.8.0 // indirect
- golang.org/x/sys v0.6.0 // indirect
+ golang.org/x/sys v0.7.0 // indirect
golang.org/x/text v0.8.0 // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/protobuf v1.30.0 // indirect
diff --git a/go.sum b/go.sum
index 1710407..ca017b9 100644
--- a/go.sum
+++ b/go.sum
@@ -30,8 +30,8 @@ github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
-github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
-github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
+github.com/mattn/go-isatty v0.0.18 h1:DOKFKCQ7FNG2L1rbrmstDN4QVRdS89Nkh85u68Uwp98=
+github.com/mattn/go-isatty v0.0.18/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d h1:hrujxIzL1woJ7AwssoOcM/tq5JjjG2yYOc8odClEiXA=
@@ -65,8 +65,9 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.7.0 h1:3jlCCIQZPdOYu1h8BkNvLz8Kgwtae2cagcG/VamtZRU=
+golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
diff --git a/make.bat b/make.bat
index 95d5ec8..2c4bc27 100644
--- a/make.bat
+++ b/make.bat
@@ -17,8 +17,8 @@ IF "%ARG%"=="update" (
GOTO Done
)
-IF "%ARG%"=="fmt" (
- CALL :Fmt
+IF "%ARG%"=="tidy" (
+ CALL :Tidy
GOTO Done
)
@@ -50,10 +50,9 @@ go test -v ./...
echo Done
EXIT /B 0
-:Fmt
-set GO111MODULE=on
-echo Formatting ...
-go fmt ./...
+:Tidy
+go get -u ./...
+go mod tidy -v
echo Done.
EXIT /B 0
@@ -64,7 +63,7 @@ EXIT /B 0
:Update
set GO111MODULE=on
echo Updating ...
-go get -u
+go get -u ./...
go mod tidy -v
CALL :Unwindows
git pull
From 849405ffdde67d810dd677fb0ebc19d51442fe89 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Tue, 6 Jun 2023 08:37:11 +0200
Subject: [PATCH 05/20] update golangcilint
---
.golangci.yml | 1 -
1 file changed, 1 deletion(-)
diff --git a/.golangci.yml b/.golangci.yml
index 6c36914..cfc0617 100644
--- a/.golangci.yml
+++ b/.golangci.yml
@@ -5,7 +5,6 @@ linters:
enable:
- asciicheck
- bodyclose
- - depguard
- dogsled
- dupl
- errcheck
From 8fffb90eed1760c21d8a0674b7560c69453479e9 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Tue, 6 Jun 2023 08:37:50 +0200
Subject: [PATCH 06/20] update main
---
cmd/cariddi/main.go | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/cmd/cariddi/main.go b/cmd/cariddi/main.go
index 23f9c31..649995b 100644
--- a/cmd/cariddi/main.go
+++ b/cmd/cariddi/main.go
@@ -86,6 +86,7 @@ func main() {
InfoFlag: flags.Info,
Debug: flags.Debug,
UserAgent: flags.UserAgent,
+ StoreResp: flags.StoreResp,
}
// Read the targets from standard input.
@@ -126,6 +127,10 @@ func main() {
output.HeaderHTML("Results", ResultHTML)
}
+ if config.StoreResp {
+ fileUtils.CreateOutputFile("index", "responses", "txt")
+ }
+
// Read headers if needed
if flags.HeadersFile != "" || flags.Headers != "" {
var headersInput string
From 3b9baf38ad2f255fc8c083030a78029e621dc1c4 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Tue, 6 Jun 2023 08:38:12 +0200
Subject: [PATCH 07/20] add CreateHostOutputFolder
---
internal/file/file.go | 17 +++++++++++++++--
1 file changed, 15 insertions(+), 2 deletions(-)
diff --git a/internal/file/file.go b/internal/file/file.go
index d2e0b4d..bd3b2fc 100644
--- a/internal/file/file.go
+++ b/internal/file/file.go
@@ -33,6 +33,7 @@ import (
"log"
"net/http"
"os"
+ "path/filepath"
"strings"
)
@@ -52,11 +53,23 @@ func CreateOutputFolder() {
}
}
+// CreateHostOutputFolder creates the host output folder
+// for the HTTP responses.
+// If it fails exits with an error message.
+func CreateHostOutputFolder(host string) {
+ // Create a folder/directory at a full qualified path
+ err := os.MkdirAll(filepath.Join("output-cariddi", host), Permission0755)
+ if err != nil {
+ fmt.Println("Can't create host output folder.")
+ os.Exit(1)
+ }
+}
+
// CreateOutputFile takes a target (of the attack), a subcommand
// (PORT-DNS-DIR-SUBDOMAIN-REPORT) and a format (json-html-txt).
// It creates the output folder if needed, then checks if the output file
-// already exists, if yes asks the user if scilla has to overwrite it;
-// if no scilla creates it.
+// already exists, if yes asks the user if cariddi has to overwrite it;
+// if no cariddi creates it.
// Whenever an instruction fails, it exits with an error message.
func CreateOutputFile(target string, subcommand string, format string) string {
target = ReplaceBadCharacterOutput(target)
From 85dae448f885a526b4a6f1215b917e7500096078 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Tue, 6 Jun 2023 08:38:30 +0200
Subject: [PATCH 08/20] add -sr option
---
pkg/input/flags.go | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/pkg/input/flags.go b/pkg/input/flags.go
index c3fcfb0..be2fdca 100644
--- a/pkg/input/flags.go
+++ b/pkg/input/flags.go
@@ -65,6 +65,7 @@ type Input struct {
Info bool
Debug bool
UserAgent string
+ StoreResp bool
}
// ScanFlag defines all the options taken
@@ -111,6 +112,8 @@ func ScanFlag() Input {
userAgentPtr := flag.String("ua", "", "Use a custom User Agent.")
+ storeRespPtr := flag.Bool("sr", false, "Store HTTP responses.")
+
flag.Parse()
result := Input{
@@ -141,6 +144,7 @@ func ScanFlag() Input {
*infoPtr,
*debugPtr,
*userAgentPtr,
+ *storeRespPtr,
}
return result
From 457df74a6cb72112ffedf612016da7f636515a16 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Tue, 6 Jun 2023 08:38:46 +0200
Subject: [PATCH 09/20] add -sr option
---
pkg/output/examples.go | 16 ++++++++--------
pkg/output/help.go | 4 +++-
2 files changed, 11 insertions(+), 9 deletions(-)
diff --git a/pkg/output/examples.go b/pkg/output/examples.go
index 279013e..0adb87b 100644
--- a/pkg/output/examples.go
+++ b/pkg/output/examples.go
@@ -72,19 +72,19 @@ func PrintExamples() {
cat urls | cariddi -proxy http://127.0.0.1:8080 (Set a Proxy to be used (http and socks5 supported))
- cat urls | cariddi -headers "Cookie: auth=admin;type=2;; X-Custom: customHeader"
+ cat urls | cariddi -headers "Cookie: auth=admin;type=2;; X-Custom: customHeader (Use custom headers)"
- cat urls | cariddi -headersfile headers.txt
+ cat urls | cariddi -headersfile headers.txt (Read from an external file custom headers)
- cat urls | cariddi -err
+ cat urls | cariddi -err (Hunt for errors)
- cat urls | cariddi -info
+ cat urls | cariddi -info (Hunt for useful information)
- cat urls | cariddi -debug
+ cat urls | cariddi -debug (Print debug information)
- cat urls | cariddi -ua "Custom User Agent"
+ cat urls | cariddi -ua "Custom User Agent" (Use a custom User Agent)
- cat urls | cariddi -json
+ cat urls | cariddi -json (Print the output as JSON)
- cat urls | cariddi -json | jq .`)
+ cat urls | cariddi -sr (Store HTTP responses)`)
}
diff --git a/pkg/output/help.go b/pkg/output/help.go
index 80b0a5b..961d6d2 100644
--- a/pkg/output/help.go
+++ b/pkg/output/help.go
@@ -59,7 +59,7 @@ func PrintHelp() {
-i string
Ignore the URL containing at least one of the elements of this array.
-info
- Hunt for useful informations in websites.
+ Hunt for useful information in websites.
-intensive
Crawl searching for resources matching 2nd level domain.
-it string
@@ -77,6 +77,8 @@ func PrintHelp() {
-s Hunt for secrets.
-sf string
Use an external file (txt, one per line) to use custom regexes for secrets hunting.
+ -sr
+ Store HTTP responses.
-t int
Set timeout for the requests. (default 10)
-ua
From 4a056fc3ab880bff3719a7bcdef242ee3b76e219 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Tue, 6 Jun 2023 08:39:08 +0200
Subject: [PATCH 10/20] add -sr option
---
pkg/output/output.go | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/pkg/output/output.go b/pkg/output/output.go
index c849bec..43fceee 100644
--- a/pkg/output/output.go
+++ b/pkg/output/output.go
@@ -36,6 +36,10 @@ import (
"github.com/edoardottt/cariddi/pkg/scanner"
)
+const (
+ CariddiOutputFolder = "output-cariddi"
+)
+
// PrintSimpleOutput prints line by line.
func PrintSimpleOutput(out []string) {
for _, elem := range out {
@@ -48,7 +52,7 @@ func PrintSimpleOutput(out []string) {
func TxtOutput(flags input.Input, finalResults []string, finalSecret []scanner.SecretMatched,
finalEndpoints []scanner.EndpointMatched, finalExtensions []scanner.FileTypeMatched,
finalErrors []scanner.ErrorMatched, finalInfos []scanner.InfoMatched) {
- exists, err := fileUtils.ElementExists("output-cariddi")
+ exists, err := fileUtils.ElementExists(CariddiOutputFolder)
if err != nil {
fmt.Println("Error while creating the output directory.")
os.Exit(1)
@@ -120,7 +124,7 @@ func TxtOutput(flags input.Input, finalResults []string, finalSecret []scanner.S
func HTMLOutput(flags input.Input, resultFilename string, finalResults []string, finalSecret []scanner.SecretMatched,
finalEndpoints []scanner.EndpointMatched, finalExtensions []scanner.FileTypeMatched,
finalErrors []scanner.ErrorMatched, finalInfos []scanner.InfoMatched) {
- exists, err := fileUtils.ElementExists("output-cariddi")
+ exists, err := fileUtils.ElementExists(CariddiOutputFolder)
if err != nil {
fmt.Println("Error while creating the output directory.")
From 3aa80149bbfd0fe4bcbd7c509d018c899c945116 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Tue, 6 Jun 2023 08:39:17 +0200
Subject: [PATCH 11/20] add -sr option
---
pkg/crawler/colly.go | 73 ++++++++------------------------------------
1 file changed, 13 insertions(+), 60 deletions(-)
diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go
index a21b920..ae4d3f7 100644
--- a/pkg/crawler/colly.go
+++ b/pkg/crawler/colly.go
@@ -48,59 +48,6 @@ import (
"github.com/gocolly/colly/extensions"
)
-type Results struct {
- URLs []string
- Secrets []scanner.SecretMatched
- Endpoints []scanner.EndpointMatched
- Extensions []scanner.FileTypeMatched
- Errors []scanner.ErrorMatched
- Infos []scanner.InfoMatched
-}
-
-type Scan struct {
- // Flags
- Cache bool
- Debug bool
- EndpointsFlag bool
- ErrorsFlag bool
- InfoFlag bool
- Intensive bool
- Plain bool
- Rua bool
- SecretsFlag bool
- Ignore string
- IgnoreTxt string
- JSON bool
- HTML string
- Proxy string
- Target string
- Txt string
- UserAgent string
- FileType int
- Headers map[string]string
-
- // Settings
- Concurrency int
- Delay int
- Timeout int
-
- // Storage
- SecretsSlice []string
- EndpointsSlice []string
-}
-
-type Event struct {
- ProtocolTemp string
- TargetTemp string
- Target string
- Intensive bool
- Ignore bool
- Debug bool
- JSON bool
- IgnoreSlice []string
- URLs *[]string
-}
-
// New it's the actual crawler engine.
// It controls all the behaviours of a scan
// (event handlers, secrets, errors, extensions and endpoints scanning).
@@ -182,6 +129,17 @@ func New(scan *Scan) *Results {
}
c.OnResponse(func(r *colly.Response) {
+ if !scan.JSON {
+ fmt.Println(r.Request.URL)
+ }
+
+ if scan.StoreResp {
+ err := output.StoreHTTPResponse(r)
+ if err != nil {
+ log.Println(err)
+ }
+ }
+
minBodyLentgh := 10
lengthOk := len(string(r.Body)) > minBodyLentgh
secrets := []scanner.SecretMatched{}
@@ -231,10 +189,12 @@ func New(scan *Scan) *Results {
infos = append(infos, infosSlice...)
}
}
+
if scan.JSON {
jsonOutput, err := output.GetJSONString(
r, secrets, parameters, filetype, errors, infos,
)
+
if err == nil {
fmt.Println(string(jsonOutput))
} else {
@@ -374,13 +334,6 @@ func CreateColly(delayTime int, concurrency int, cache bool, timeout int,
// registerHTMLEvents registers the associated functions for each
// HTML event triggering an action.
func registerHTMLEvents(c *colly.Collector, event *Event) {
- // On every request that Colly is making, print the URL it's currently visiting
- c.OnRequest(func(e *colly.Request) {
- if !event.JSON {
- fmt.Println(e.URL.String())
- }
- })
-
// On every a element which has href attribute call callback
c.OnHTML("a[href]", func(e *colly.HTMLElement) {
link := e.Attr("href")
From d3c11781c9e9aa8786f9e1c491478002a7d91d06 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Tue, 6 Jun 2023 08:39:35 +0200
Subject: [PATCH 12/20] add responses
---
pkg/crawler/options.go | 83 ++++++++++++++++++
pkg/output/responses.go | 185 ++++++++++++++++++++++++++++++++++++++++
2 files changed, 268 insertions(+)
create mode 100644 pkg/crawler/options.go
create mode 100644 pkg/output/responses.go
diff --git a/pkg/crawler/options.go b/pkg/crawler/options.go
new file mode 100644
index 0000000..73ab694
--- /dev/null
+++ b/pkg/crawler/options.go
@@ -0,0 +1,83 @@
+/*
+==========
+Cariddi
+==========
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program. If not, see http://www.gnu.org/licenses/.
+
+ @Repository: https://github.com/edoardottt/cariddi
+
+ @Author: edoardottt, https://www.edoardoottavianelli.it
+
+ @License: https://github.com/edoardottt/cariddi/blob/main/LICENSE
+
+*/
+
+package crawler
+
+import "github.com/edoardottt/cariddi/pkg/scanner"
+
+type Results struct {
+ URLs []string
+ Secrets []scanner.SecretMatched
+ Endpoints []scanner.EndpointMatched
+ Extensions []scanner.FileTypeMatched
+ Errors []scanner.ErrorMatched
+ Infos []scanner.InfoMatched
+}
+
+type Scan struct {
+ // Flags
+ Cache bool
+ Debug bool
+ EndpointsFlag bool
+ ErrorsFlag bool
+ InfoFlag bool
+ Intensive bool
+ Plain bool
+ Rua bool
+ SecretsFlag bool
+ Ignore string
+ IgnoreTxt string
+ JSON bool
+ HTML string
+ Proxy string
+ Target string
+ Txt string
+ UserAgent string
+ FileType int
+ Headers map[string]string
+ StoreResp bool
+
+ // Settings
+ Concurrency int
+ Delay int
+ Timeout int
+
+ // Storage
+ SecretsSlice []string
+ EndpointsSlice []string
+}
+
+type Event struct {
+ ProtocolTemp string
+ TargetTemp string
+ Target string
+ Intensive bool
+ Ignore bool
+ Debug bool
+ JSON bool
+ IgnoreSlice []string
+ URLs *[]string
+}
diff --git a/pkg/output/responses.go b/pkg/output/responses.go
new file mode 100644
index 0000000..d983e28
--- /dev/null
+++ b/pkg/output/responses.go
@@ -0,0 +1,185 @@
+/*
+==========
+Cariddi
+==========
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program. If not, see http://www.gnu.org/licenses/.
+
+ @Repository: https://github.com/edoardottt/cariddi
+
+ @Author: edoardottt, https://www.edoardoottavianelli.it
+
+ @License: https://github.com/edoardottt/cariddi/blob/main/LICENSE
+
+*/
+
+package output
+
+import (
+ "bytes"
+ "crypto/sha1"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "io"
+ "log"
+ "net/url"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/gocolly/colly"
+
+ fileUtils "github.com/edoardottt/cariddi/internal/file"
+)
+
+const (
+ index = "index.responses.txt"
+)
+
+var (
+ ErrHTTPResp = errors.New("cannot store HTTP response")
+)
+
+func getResponseHash(url string) string {
+ hash := sha1.Sum([]byte(url))
+ return hex.EncodeToString(hash[:])
+}
+
+// FormatResponse formats an HTTP response ready to be written in a file.
+func FormatResponse(resp *colly.Response) ([]byte, error) {
+ builder := &bytes.Buffer{}
+
+ builder.WriteString(resp.Request.URL.String())
+ builder.WriteString("\n\n\n")
+
+ builder.WriteString(resp.Request.Method)
+ builder.WriteString(" ")
+
+ path := resp.Request.URL.Path
+ if resp.Request.URL.Fragment != "" {
+ path = path + "#" + resp.Request.URL.Fragment
+ }
+
+ builder.WriteString(path)
+ builder.WriteString(" ")
+ builder.WriteString("HTTP/1.1")
+ builder.WriteString("\n")
+ builder.WriteString("Host: " + resp.Request.URL.Host)
+ builder.WriteRune('\n')
+
+ for k, v := range *resp.Request.Headers {
+ builder.WriteString(k + ": " + strings.Join(v, "; ") + "\n")
+ }
+
+ if resp.Request.Body != nil {
+ bodyResp, _ := io.ReadAll(resp.Request.Body)
+ if string(bodyResp) != "" {
+ builder.WriteString("\n")
+ builder.WriteString(string(bodyResp))
+ }
+ }
+
+ builder.WriteString("\n\n")
+ builder.WriteString("HTTP/1.1")
+ builder.WriteString(" ")
+ builder.WriteString(fmt.Sprint(resp.StatusCode))
+ builder.WriteString("\n")
+
+ for k, v := range *resp.Headers {
+ builder.WriteString(k + ": " + strings.Join(v, "; ") + "\n")
+ }
+
+ builder.WriteString("\n")
+
+ body, _ := io.ReadAll(bytes.NewReader(resp.Body))
+
+ builder.WriteString(string(body))
+
+ return builder.Bytes(), nil
+}
+
+func getResponseFileName(folder, url string) string {
+ file := getResponseHash(url) + ".txt"
+ return filepath.Join(folder, file)
+}
+
+// UpdateIndex updates the index file with the
+// correct information linking to HTTP responses files.
+// If it fails returns an error.
+func UpdateIndex(resp *colly.Response) error {
+ index, err := os.OpenFile(filepath.Join(CariddiOutputFolder, index), os.O_APPEND|os.O_WRONLY, fileUtils.Permission0644)
+ if err != nil {
+ return err
+ }
+
+ defer index.Close()
+
+ builder := &bytes.Buffer{}
+
+ builder.WriteString(getResponseFileName(filepath.Join(CariddiOutputFolder, resp.Request.URL.Host),
+ resp.Request.URL.String()))
+ builder.WriteRune(' ')
+ builder.WriteString(resp.Request.URL.String())
+ builder.WriteRune(' ')
+ builder.WriteString("(" + fmt.Sprint(resp.StatusCode) + ")")
+ builder.WriteRune('\n')
+
+ if _, writeErr := index.Write(builder.Bytes()); writeErr != nil {
+ return fmt.Errorf("%w %s", err, "could not update index")
+ }
+
+ return nil
+}
+
+// WriteHTTPResponse creates an HTTP response output file and
+// writes the HTTP response inside it.
+// If it fails returns an error.
+func WriteHTTPResponse(inputURL *url.URL, response []byte) error {
+ file := getResponseFileName(filepath.Join(CariddiOutputFolder, inputURL.Host), inputURL.String())
+
+ outFile, err := os.OpenFile(file, os.O_CREATE|os.O_WRONLY, fileUtils.Permission0644)
+ if err != nil {
+ return err
+ }
+
+ if _, writeErr := outFile.Write(response); writeErr != nil {
+ return ErrHTTPResp
+ }
+
+ return nil
+}
+
+// StoreHTTPResponse stores an HTTP response in a file.
+// If it fails returns an error.
+func StoreHTTPResponse(r *colly.Response) error {
+ fileUtils.CreateHostOutputFolder(r.Request.URL.Host)
+
+ err := UpdateIndex(r)
+ if err != nil {
+ log.Println(err)
+ }
+
+ response, err := FormatResponse(r)
+ if err != nil {
+ log.Println(err)
+ }
+
+ err = WriteHTTPResponse(r.Request.URL, response)
+ if err != nil {
+ log.Println(err)
+ }
+
+ return nil
+}
From 2a8b61652ec9629675fa01d1cf23b32638b53b91 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Tue, 6 Jun 2023 08:42:05 +0200
Subject: [PATCH 13/20] update
---
README.md | 135 ++++++++++++++++++++++++++++--------------------------
1 file changed, 70 insertions(+), 65 deletions(-)
diff --git a/README.md b/README.md
index e487455..fbfcc8b 100644
--- a/README.md
+++ b/README.md
@@ -56,7 +56,7 @@
License
-Preview :bar_chart:
+Preview :bar_chart
----------
@@ -69,11 +69,13 @@ Installation 📡
----------
### Using Snap
+
```bash
sudo snap install cariddi
```
### Using Go
+
```bash
go install -v github.com/edoardottt/cariddi/cmd/cariddi@latest
```
@@ -97,7 +99,7 @@ You need [Go](https://golang.org/).
- `git clone https://github.com/edoardottt/cariddi.git`
- `cd cariddi`
- `go get ./...`
- - `.\make.bat windows` (to install)
+ - `.\make.bat windows` (to install)
- `.\make.bat unwindows` (to uninstall)
Get Started 🎉
@@ -108,99 +110,101 @@ Get Started 🎉
```
Usage of cariddi:
-c int
- Concurrency level. (default 20)
+ Concurrency level. (default 20)
-cache
- Use the .cariddi_cache folder as cache.
+ Use the .cariddi_cache folder as cache.
-d int
- Delay between a page crawled and another.
+ Delay between a page crawled and another.
-debug
- Print debug information while crawling.
- -e Hunt for juicy endpoints.
+ Print debug information while crawling.
+ -e Hunt for juicy endpoints.
-ef string
- Use an external file (txt, one per line) to use custom parameters for endpoints hunting.
+ Use an external file (txt, one per line) to use custom parameters for endpoints hunting.
-err
- Hunt for errors in websites.
+ Hunt for errors in websites.
-examples
- Print the examples.
+ Print the examples.
-ext int
- Hunt for juicy file extensions. Integer from 1(juicy) to 7(not juicy).
- -h Print the help.
+ Hunt for juicy file extensions. Integer from 1(juicy) to 7(not juicy).
+ -h Print the help.
-headers string
- Use custom headers for each request E.g. -headers "Cookie: auth=yes;;Client: type=2".
+ Use custom headers for each request E.g. -headers "Cookie: auth=yes;;Client: type=2".
-headersfile string
- Read from an external file custom headers (same format of headers flag).
+ Read from an external file custom headers (same format of headers flag).
-json
- Print the output as JSON in stdout.
+ Print the output as JSON in stdout.
-i string
- Ignore the URL containing at least one of the elements of this array.
+ Ignore the URL containing at least one of the elements of this array.
-info
- Hunt for useful informations in websites.
+ Hunt for useful informations in websites.
-intensive
- Crawl searching for resources matching 2nd level domain.
+ Crawl searching for resources matching 2nd level domain.
-it string
- Ignore the URL containing at least one of the lines of this file.
+ Ignore the URL containing at least one of the lines of this file.
-oh string
- Write the output into an HTML file.
+ Write the output into an HTML file.
-ot string
- Write the output into a TXT file.
+ Write the output into a TXT file.
-plain
- Print only the results.
+ Print only the results.
-proxy string
- Set a Proxy to be used (http and socks5 supported).
+ Set a Proxy to be used (http and socks5 supported).
-rua
- Use a random browser user agent on every request.
- -s Hunt for secrets.
+ Use a random browser user agent on every request.
+ -s Hunt for secrets.
-sf string
- Use an external file (txt, one per line) to use custom regexes for secrets hunting.
+ Use an external file (txt, one per line) to use custom regexes for secrets hunting.
+ -sr
+ Store HTTP responses.
-t int
- Set timeout for the requests. (default 10)
+ Set timeout for the requests. (default 10)
-ua string
- Use a custom User Agent.
+ Use a custom User Agent.
-version
- Print the version.
+ Print the version.
```
-
Examples 💡
----------
- - `cariddi -version` (Print the version)
- - `cariddi -h` (Print the help)
- - `cariddi -examples` (Print the examples)
- - `cat urls | cariddi -s` (Hunt for secrets)
- - `cat urls | cariddi -d 2` (2 seconds between a page crawled and another)
- - `cat urls | cariddi -c 200` (Set the concurrency level to 200)
- - `cat urls | cariddi -e` (Hunt for juicy endpoints)
- - `cat urls | cariddi -plain` (Print only results)
- - `cat urls | cariddi -ot target_name` (Results in txt file)
- - `cat urls | cariddi -oh target_name` (Results in html file)
- - `cat urls | cariddi -ext 2` (Hunt for juicy (level 2 out of 7) files)
- - `cat urls | cariddi -e -ef endpoints_file` (Hunt for custom endpoints)
- - `cat urls | cariddi -s -sf secrets_file` (Hunt for custom secrets)
- - `cat urls | cariddi -i forum,blog,community,open` (Ignore urls containing these words)
- - `cat urls | cariddi -it ignore_file` (Ignore urls containing at least one line in the input file)
- - `cat urls | cariddi -cache` (Use the .cariddi_cache folder as cache)
- - `cat urls | cariddi -t 5` (Set the timeout for the requests)
- - `cat urls | cariddi -intensive` (Crawl searching also subdomains, same as `*.target.com`)
- - `cat urls | cariddi -rua` (Use a random browser user agent on every request)
- - `cat urls | cariddi -proxy http://127.0.0.1:8080` (Set a Proxy, http and socks5 supported)
- - `cat urls | cariddi -headers "Cookie: auth=admin;type=2;; X-Custom: customHeader"`
- - `cat urls | cariddi -headersfile headers.txt` (Read from an external file custom headers)
- - `cat urls | cariddi -err` (Hunt for errors in websites)
- - `cat urls | cariddi -info` (Hunt for useful informations in websites)
- - `cat urls | cariddi -debug` (Print debug information while crawling)
- - `cat urls | cariddi -ua "Custom User Agent"` (Use a custom User Agent)
- - `cat urls | cariddi -json` (Print the output as JSON in stdout)
- - `cat urls | cariddi -json | jq .` (Pipe the JSON output into jq)
-
- - For Windows:
- - use `powershell.exe -Command "cat urls | .\cariddi.exe"` inside the Command prompt
- - or just `cat urls | cariddi.exe` using PowerShell
-
- - To integrate cariddi with Burpsuite [make sure to follow these steps](https://github.com/edoardottt/cariddi/wiki/BurpSuite-Integration).
+- `cariddi -version` (Print the version)
+- `cariddi -h` (Print the help)
+- `cariddi -examples` (Print the examples)
+- `cat urls | cariddi -s` (Hunt for secrets)
+- `cat urls | cariddi -d 2` (2 seconds between a page crawled and another)
+- `cat urls | cariddi -c 200` (Set the concurrency level to 200)
+- `cat urls | cariddi -e` (Hunt for juicy endpoints)
+- `cat urls | cariddi -plain` (Print only results)
+- `cat urls | cariddi -ot target_name` (Results in txt file)
+- `cat urls | cariddi -oh target_name` (Results in html file)
+- `cat urls | cariddi -ext 2` (Hunt for juicy (level 2 out of 7) files)
+- `cat urls | cariddi -e -ef endpoints_file` (Hunt for custom endpoints)
+- `cat urls | cariddi -s -sf secrets_file` (Hunt for custom secrets)
+- `cat urls | cariddi -i forum,blog,community,open` (Ignore urls containing these words)
+- `cat urls | cariddi -it ignore_file` (Ignore urls containing at least one line in the input file)
+- `cat urls | cariddi -cache` (Use the .cariddi_cache folder as cache)
+- `cat urls | cariddi -t 5` (Set the timeout for the requests)
+- `cat urls | cariddi -intensive` (Crawl searching also subdomains, same as `*.target.com`)
+- `cat urls | cariddi -rua` (Use a random browser user agent on every request)
+- `cat urls | cariddi -proxy http://127.0.0.1:8080` (Set a Proxy, http and socks5 supported)
+- `cat urls | cariddi -headers "Cookie: auth=admin;type=2;; X-Custom: customHeader"`
+- `cat urls | cariddi -headersfile headers.txt` (Read from an external file custom headers)
+- `cat urls | cariddi -err` (Hunt for errors in websites)
+- `cat urls | cariddi -info` (Hunt for useful informations in websites)
+- `cat urls | cariddi -debug` (Print debug information while crawling)
+- `cat urls | cariddi -ua "Custom User Agent"` (Use a custom User Agent)
+- `cat urls | cariddi -json` (Print the output as JSON in stdout)
+- `cat urls | cariddi -sr` (Store HTTP responses)
+
+- For Windows:
+ - use `powershell.exe -Command "cat urls | .\cariddi.exe"` inside the Command prompt
+ - or just `cat urls | cariddi.exe` using PowerShell
+
+- To integrate cariddi with Burpsuite [make sure to follow these steps](https://github.com/edoardottt/cariddi/wiki/BurpSuite-Integration).
Changelog 📌
-------
+
Detailed changes for each release are documented in the [release notes](https://github.com/edoardottt/cariddi/releases).
Contributing 🛠
@@ -209,16 +213,17 @@ Contributing 🛠
Just open an [issue](https://github.com/edoardottt/cariddi/issues)/[pull request](https://github.com/edoardottt/cariddi/pulls).
Before opening a pull request, download [golangci-lint](https://golangci-lint.run/usage/install/) and run
+
```bash
golangci-lint run
```
+
If there aren't errors, go ahead :)
**Help me building this!**
Special thanks to: [go-colly](http://go-colly.org/), [zricethezav](https://github.com/zricethezav/gitleaks/blob/master/config/default.go), [projectdiscovery](https://github.com/projectdiscovery/nuclei-templates/tree/master/file/keys), [tomnomnom](https://github.com/tomnomnom/gf/tree/master/examples), [RegexPassive](https://github.com/hahwul/RegexPassive) and [all the contributors](https://github.com/edoardottt/cariddi/wiki/Contributors).
-
License 📝
-------
From 839ac6af2b1141314b00808ce6afee6e95d84f81 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Tue, 6 Jun 2023 08:42:47 +0200
Subject: [PATCH 14/20] update
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index fbfcc8b..9f04995 100644
--- a/README.md
+++ b/README.md
@@ -56,7 +56,7 @@
License
-Preview :bar_chart
+Preview 📊
----------
From a4df7b52413ceeaff8201491a5eae75f6bef6013 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Fri, 9 Jun 2023 11:35:10 +0200
Subject: [PATCH 15/20] update func
---
cmd/cariddi/main.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/cmd/cariddi/main.go b/cmd/cariddi/main.go
index 649995b..384b232 100644
--- a/cmd/cariddi/main.go
+++ b/cmd/cariddi/main.go
@@ -128,7 +128,7 @@ func main() {
}
if config.StoreResp {
- fileUtils.CreateOutputFile("index", "responses", "txt")
+ fileUtils.CreateIndexOutputFile("index.responses.txt")
}
// Read headers if needed
From e26214df51aeee4f7a4c6b81034434d7bd4538f5 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Fri, 9 Jun 2023 11:35:28 +0200
Subject: [PATCH 16/20] add create index file func
---
internal/file/file.go | 29 ++++++++++++++++++++++++++---
1 file changed, 26 insertions(+), 3 deletions(-)
diff --git a/internal/file/file.go b/internal/file/file.go
index bd3b2fc..19c01be 100644
--- a/internal/file/file.go
+++ b/internal/file/file.go
@@ -65,7 +65,7 @@ func CreateHostOutputFolder(host string) {
}
}
-// CreateOutputFile takes a target (of the attack), a subcommand
+// CreateOutputFile takes as input a target (of the attack), a subcommand
// (PORT-DNS-DIR-SUBDOMAIN-REPORT) and a format (json-html-txt).
// It creates the output folder if needed, then checks if the output file
// already exists, if yes asks the user if cariddi has to overwrite it;
@@ -76,9 +76,9 @@ func CreateOutputFile(target string, subcommand string, format string) string {
var filename string
if subcommand != "" {
- filename = "output-cariddi" + "/" + target + "." + subcommand + "." + format
+ filename = filepath.Join("output-cariddi", target+"."+subcommand+"."+format)
} else {
- filename = "output-cariddi" + "/" + target + "." + format
+ filename = filepath.Join("output-cariddi", target+"."+format)
}
_, err := os.Stat(filename)
@@ -114,6 +114,29 @@ func CreateOutputFile(target string, subcommand string, format string) string {
return filename
}
+// CreateIndexOutputFile takes as input the name of the index file.
+// It creates the output folder if needed, then checks if the index output file
+// already exists, if no cariddi creates it.
+// Whenever an instruction fails, it exits with an error message.
+func CreateIndexOutputFile(filename string) {
+ _, err := os.Stat(filename)
+
+ if os.IsNotExist(err) {
+ if _, err := os.Stat("output-cariddi/"); os.IsNotExist(err) {
+ CreateOutputFolder()
+ }
+ // If the file doesn't exist, create it.
+ filename = filepath.Join("output-cariddi", filename)
+ f, err := os.OpenFile(filename, os.O_CREATE|os.O_WRONLY, Permission0644)
+ if err != nil {
+ fmt.Println("Can't create output file.")
+ os.Exit(1)
+ }
+
+ f.Close()
+ }
+}
+
// ReplaceBadCharacterOutput replaces forward-slashes
// with dashes (to avoid problems with output folder).
func ReplaceBadCharacterOutput(input string) string {
From c90430b5ef8d54ad3f7c6da56f8c7b4c7e507c36 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Fri, 9 Jun 2023 11:35:39 +0200
Subject: [PATCH 17/20] fix sr
---
pkg/output/responses.go | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/pkg/output/responses.go b/pkg/output/responses.go
index d983e28..fc3decb 100644
--- a/pkg/output/responses.go
+++ b/pkg/output/responses.go
@@ -119,7 +119,9 @@ func getResponseFileName(folder, url string) string {
// correct information linking to HTTP responses files.
// If it fails returns an error.
func UpdateIndex(resp *colly.Response) error {
- index, err := os.OpenFile(filepath.Join(CariddiOutputFolder, index), os.O_APPEND|os.O_WRONLY, fileUtils.Permission0644)
+ index, err := os.OpenFile(filepath.Join(CariddiOutputFolder, index),
+ os.O_APPEND|os.O_WRONLY,
+ fileUtils.Permission0644)
if err != nil {
return err
}
From 36079c0ebc1486004b7801339ccf4f05a5a8755e Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Fri, 9 Jun 2023 11:36:47 +0200
Subject: [PATCH 18/20] v1.3.2
---
pkg/output/banner.go | 2 +-
snapcraft.yaml | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/pkg/output/banner.go b/pkg/output/banner.go
index ae5e4d0..ac29b3f 100644
--- a/pkg/output/banner.go
+++ b/pkg/output/banner.go
@@ -35,7 +35,7 @@ import (
// nolint: checknoglobals
const (
- version = "v1.3.1"
+ version = "v1.3.2"
banner = ` _ _ _ _
(_) | | | (_)
___ __ _ _ __ _ __| | __| |_
diff --git a/snapcraft.yaml b/snapcraft.yaml
index 24ea194..44b51f0 100644
--- a/snapcraft.yaml
+++ b/snapcraft.yaml
@@ -2,7 +2,7 @@ name: cariddi
summary: Fast web crawler and scanner
description: |
Take a list of domains, crawl urls and scan for endpoints, secrets, api keys, file extensions, tokens and more
-version: 1.3.1
+version: 1.3.2
grade: stable
base: core20
From 2f71650571a3dbd0e2111eb3894d81808ef75c48 Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Fri, 9 Jun 2023 11:51:16 +0200
Subject: [PATCH 19/20] linting
---
internal/file/file.go | 1 +
1 file changed, 1 insertion(+)
diff --git a/internal/file/file.go b/internal/file/file.go
index 19c01be..64bfd9a 100644
--- a/internal/file/file.go
+++ b/internal/file/file.go
@@ -127,6 +127,7 @@ func CreateIndexOutputFile(filename string) {
}
// If the file doesn't exist, create it.
filename = filepath.Join("output-cariddi", filename)
+
f, err := os.OpenFile(filename, os.O_CREATE|os.O_WRONLY, Permission0644)
if err != nil {
fmt.Println("Can't create output file.")
From d9ec9c419919c1ccf4cdb29778779374aee9326b Mon Sep 17 00:00:00 2001
From: edoardottt
Date: Fri, 9 Jun 2023 11:51:33 +0200
Subject: [PATCH 20/20] Document input flags in code #123
---
pkg/input/flags.go | 82 +++++++++++++++++++++++++++++++---------------
1 file changed, 55 insertions(+), 27 deletions(-)
diff --git a/pkg/input/flags.go b/pkg/input/flags.go
index be2fdca..3848d7f 100644
--- a/pkg/input/flags.go
+++ b/pkg/input/flags.go
@@ -38,34 +38,62 @@ const (
// Input struct.
// It contains all the possible options.
type Input struct {
- Version bool
- Delay int
- Concurrency int
- Help bool
- Examples bool
- Plain bool
- JSON bool
- HTMLout string
- TXTout string
- Ignore string
- IgnoreTXT string
- Cache bool
- Timeout int
- Intensive bool
- Rua bool
- Proxy string
- Secrets bool
- SecretsFile string
- Endpoints bool
+ // Version prints the version banner.
+ Version bool
+ // Delay between a page crawled and another.
+ Delay int
+ // Concurrency level.
+ Concurrency int
+ // Help prints the help banner.
+ Help bool
+ // Examples prints the examples banner.
+ Examples bool
+ // Plain prints only the results.
+ Plain bool
+ // JSON prints the output as JSON in stdout.
+ JSON bool
+ // HTMLout writes the output into an HTML file.
+ HTMLout string
+ // TXTout writes the output into an TXT file.
+ TXTout string
+ // Ignore ignores the URL containing at least one of the elements of this array.
+ Ignore string
+ // IgnoreTXT ignores the URL containing at least one of the lines of this file.
+ IgnoreTXT string
+ // Cache uses the .cariddi_cache folder as cache.
+ Cache bool
+ // Timeout set timeout for the requests. (default 10)
+ Timeout int
+ // Intensive crawls searching for resources matching 2nd level domain.
+ Intensive bool
+ // Rua uses a random browser user agent on every request.
+ Rua bool
+ // Proxy set a Proxy to be used (http and socks5 supported).
+ Proxy string
+ // Secrets hunts for secrets.
+ Secrets bool
+ // SecretsFile uses an external file (txt, one per line) to use custom regexes for secrets hunting.
+ SecretsFile string
+ // Endpoints hunts for juicy endpoints.
+ Endpoints bool
+ // EndpointsFile uses an external file (txt, one per line) to use custom parameters for endpoints hunting.
EndpointsFile string
- Extensions int
- Headers string
- HeadersFile string
- Errors bool
- Info bool
- Debug bool
- UserAgent string
- StoreResp bool
+ // Extensions hunts for juicy file extensions. Integer from 1(juicy) to 7(not juicy).
+ Extensions int
+ // Headers uses custom headers for each request E.g. -headers "Cookie: auth=yes;;Client: type=2".
+ Headers string
+ // HeadersFile reads from an external file custom headers (same format of headers flag).
+ HeadersFile string
+ // Errors hunts for errors in websites.
+ Errors bool
+ // Info hunts for useful informations in websites.
+ Info bool
+ // Debug prints debug information while crawling.
+ Debug bool
+ // UserAgent uses a custom User Agent.
+ UserAgent string
+ // StoreResp stores HTTP responses.
+ StoreResp bool
}
// ScanFlag defines all the options taken