From ec331cf39aa77642e4d6a840235bc9bac55e5a81 Mon Sep 17 00:00:00 2001 From: Cedric Brisson Date: Tue, 15 Nov 2022 16:49:38 -0500 Subject: [PATCH 01/22] Moved functions that were not strictly attached to colly into a 'utils' file for better readability --- pkg/crawler/colly.go | 213 ---------------------------------------- pkg/crawler/utils.go | 224 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 224 insertions(+), 213 deletions(-) create mode 100644 pkg/crawler/utils.go diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go index 03e46b8..e00bf95 100644 --- a/pkg/crawler/colly.go +++ b/pkg/crawler/colly.go @@ -35,8 +35,6 @@ import ( "net/http" "os" "os/signal" - "regexp" - "strings" "time" fileUtils "github.com/edoardottt/cariddi/internal/file" @@ -400,214 +398,3 @@ func visitHTMLLink(link, protocolTemp, targetTemp, target string, intensive, ign } } } - -// visitXMLLink checks if the collector should visit a link or not. -func visitXMLLink(link, protocolTemp, targetTemp, target string, intensive, ignoreBool, debug bool, - ignoreSlice []string, finalResults *[]string, e *colly.XMLElement, c *colly.Collector) { - if len(link) != 0 { - absoluteURL := urlUtils.AbsoluteURL(protocolTemp, targetTemp, e.Request.AbsoluteURL(link)) - // Visit link found on page - // Only those links are visited which are in AllowedDomains - if (!intensive && urlUtils.SameDomain(protocolTemp+"://"+target, absoluteURL)) || - (intensive && intensiveOk(targetTemp, absoluteURL, debug)) { - if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { - err := c.Visit(absoluteURL) - if !errors.Is(err, colly.ErrAlreadyVisited) { - *finalResults = append(*finalResults, absoluteURL) - - if err != nil && debug { - log.Println(err) - } - } - } - } - } -} - -// huntSecrets hunts for secrets. -func huntSecrets(secretsFile []string, target string, body string) []scanner.SecretMatched { - secrets := SecretsMatch(target, body, secretsFile) - return secrets -} - -// SecretsMatch checks if a body matches some secrets. -func SecretsMatch(url string, body string, secretsFile []string) []scanner.SecretMatched { - var secrets []scanner.SecretMatched - - if len(secretsFile) == 0 { - for _, secret := range scanner.GetSecretRegexes() { - if matched, err := regexp.Match(secret.Regex, []byte(body)); err == nil && matched { - re := regexp.MustCompile(secret.Regex) - match := re.FindStringSubmatch(body) - - // Avoiding false positives - var isFalsePositive = false - - for _, falsePositive := range secret.FalsePositives { - if strings.Contains(strings.ToLower(match[0]), falsePositive) { - isFalsePositive = true - break - } - } - - if !isFalsePositive { - secretFound := scanner.SecretMatched{Secret: secret, URL: url, Match: match[0]} - secrets = append(secrets, secretFound) - } - } - } - } else { - for _, secret := range secretsFile { - if matched, err := regexp.Match(secret, []byte(body)); err == nil && matched { - re := regexp.MustCompile(secret) - match := re.FindStringSubmatch(body) - secretScanned := scanner.Secret{Name: "CustomFromFile", Description: "", Regex: secret, Poc: ""} - secretFound := scanner.SecretMatched{Secret: secretScanned, URL: url, Match: match[0]} - secrets = append(secrets, secretFound) - } - } - } - - return secrets -} - -// huntEndpoints hunts for juicy endpoints. -func huntEndpoints(endpointsFile []string, target string) []scanner.EndpointMatched { - endpoints := EndpointsMatch(target, endpointsFile) - return endpoints -} - -// EndpointsMatch check if an endpoint matches a juicy parameter. -func EndpointsMatch(target string, endpointsFile []string) []scanner.EndpointMatched { - endpoints := []scanner.EndpointMatched{} - matched := []scanner.Parameter{} - parameters := urlUtils.RetrieveParameters(target) - - if len(endpointsFile) == 0 { - for _, parameter := range scanner.GetJuicyParameters() { - for _, param := range parameters { - if strings.ToLower(param) == parameter.Parameter { - matched = append(matched, parameter) - } - endpoints = append(endpoints, scanner.EndpointMatched{Parameters: matched, URL: target}) - } - } - } else { - for _, parameter := range endpointsFile { - for _, param := range parameters { - if param == parameter { - matched = append(matched, scanner.Parameter{Parameter: parameter, Attacks: []string{}}) - } - endpoints = append(endpoints, scanner.EndpointMatched{Parameters: matched, URL: target}) - } - } - } - - return endpoints -} - -// huntExtensions hunts for extensions. -func huntExtensions(target string, severity int) scanner.FileTypeMatched { - extension := scanner.FileTypeMatched{} - copyTarget := target - - for _, ext := range scanner.GetExtensions() { - if ext.Severity <= severity { - firstIndex := strings.Index(target, "?") - if firstIndex > -1 { - target = target[:firstIndex] - } - - if strings.ToLower(target[len(target)-len("."+ext.Extension):]) == "."+ext.Extension { - extension = scanner.FileTypeMatched{Filetype: ext, URL: copyTarget} - } - } - } - - return extension -} - -// huntErrors hunts for errors. -func huntErrors(target string, body string) []scanner.ErrorMatched { - errorsSlice := ErrorsMatch(target, body) - return errorsSlice -} - -// ErrorsMatch checks the patterns for errors. -func ErrorsMatch(url string, body string) []scanner.ErrorMatched { - errors := []scanner.ErrorMatched{} - - for _, errorItem := range scanner.GetErrorRegexes() { - for _, errorRegex := range errorItem.Regex { - if matched, err := regexp.Match(errorRegex, []byte(body)); err == nil && matched { - re := regexp.MustCompile(errorRegex) - match := re.FindStringSubmatch(body) - errorFound := scanner.ErrorMatched{Error: errorItem, URL: url, Match: match[0]} - errors = append(errors, errorFound) - } - } - } - - return errors -} - -// huntInfos hunts for infos. -func huntInfos(target string, body string) []scanner.InfoMatched { - infosSlice := InfoMatch(target, body) - return infosSlice -} - -// InfoMatch checks the patterns for infos. -func InfoMatch(url string, body string) []scanner.InfoMatched { - infos := []scanner.InfoMatched{} - - for _, infoItem := range scanner.GetInfoRegexes() { - for _, infoRegex := range infoItem.Regex { - if matched, err := regexp.Match(infoRegex, []byte(body)); err == nil && matched { - re := regexp.MustCompile(infoRegex) - match := re.FindStringSubmatch(body) - infoFound := scanner.InfoMatched{Info: infoItem, URL: url, Match: match[0]} - infos = append(infos, infoFound) - } - } - } - - return infos -} - -// RetrieveBody retrieves the body (in the response) of a url. -func RetrieveBody(target string) string { - sb, err := GetRequest(target) - if err == nil && sb != "" { - return sb - } - - return "" -} - -// IgnoreMatch checks if the URL should be ignored or not. -func IgnoreMatch(url string, ignoreSlice []string) bool { - for _, ignore := range ignoreSlice { - if strings.Contains(url, ignore) { - return true - } - } - - return false -} - -// intensiveOk checks if a given url can be crawled -// in intensive mode (if the 2nd level domain matches with -// the inputted target). -func intensiveOk(target string, urlInput string, debug bool) bool { - root, err := urlUtils.GetRootHost(urlInput) - if err != nil { - if debug { - fmt.Println(err.Error() + ": " + urlInput) - } - - return false - } - - return root == target -} diff --git a/pkg/crawler/utils.go b/pkg/crawler/utils.go new file mode 100644 index 0000000..214e7f2 --- /dev/null +++ b/pkg/crawler/utils.go @@ -0,0 +1,224 @@ +package crawler + +import ( + "errors" + "fmt" + "log" + "regexp" + "strings" + + urlUtils "github.com/edoardottt/cariddi/internal/url" + "github.com/edoardottt/cariddi/pkg/scanner" + "github.com/gocolly/colly" +) + +// visitXMLLink checks if the collector should visit a link or not. +func visitXMLLink(link, protocolTemp, targetTemp, target string, intensive, ignoreBool, debug bool, + ignoreSlice []string, finalResults *[]string, e *colly.XMLElement, c *colly.Collector) { + if len(link) != 0 { + absoluteURL := urlUtils.AbsoluteURL(protocolTemp, targetTemp, e.Request.AbsoluteURL(link)) + // Visit link found on page + // Only those links are visited which are in AllowedDomains + if (!intensive && urlUtils.SameDomain(protocolTemp+"://"+target, absoluteURL)) || + (intensive && intensiveOk(targetTemp, absoluteURL, debug)) { + if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { + err := c.Visit(absoluteURL) + if !errors.Is(err, colly.ErrAlreadyVisited) { + *finalResults = append(*finalResults, absoluteURL) + + if err != nil && debug { + log.Println(err) + } + } + } + } + } +} + +// huntSecrets hunts for secrets. +func huntSecrets(secretsFile []string, target string, body string) []scanner.SecretMatched { + secrets := SecretsMatch(target, body, secretsFile) + return secrets +} + +// SecretsMatch checks if a body matches some secrets. +func SecretsMatch(url string, body string, secretsFile []string) []scanner.SecretMatched { + var secrets []scanner.SecretMatched + + if len(secretsFile) == 0 { + for _, secret := range scanner.GetSecretRegexes() { + if matched, err := regexp.Match(secret.Regex, []byte(body)); err == nil && matched { + re := regexp.MustCompile(secret.Regex) + match := re.FindStringSubmatch(body) + + // Avoiding false positives + var isFalsePositive = false + + for _, falsePositive := range secret.FalsePositives { + if strings.Contains(strings.ToLower(match[0]), falsePositive) { + isFalsePositive = true + break + } + } + + if !isFalsePositive { + secretFound := scanner.SecretMatched{Secret: secret, URL: url, Match: match[0]} + secrets = append(secrets, secretFound) + } + } + } + } else { + for _, secret := range secretsFile { + if matched, err := regexp.Match(secret, []byte(body)); err == nil && matched { + re := regexp.MustCompile(secret) + match := re.FindStringSubmatch(body) + secretScanned := scanner.Secret{Name: "CustomFromFile", Description: "", Regex: secret, Poc: ""} + secretFound := scanner.SecretMatched{Secret: secretScanned, URL: url, Match: match[0]} + secrets = append(secrets, secretFound) + } + } + } + + return secrets +} + +// huntEndpoints hunts for juicy endpoints. +func huntEndpoints(endpointsFile []string, target string) []scanner.EndpointMatched { + endpoints := EndpointsMatch(target, endpointsFile) + return endpoints +} + +// EndpointsMatch check if an endpoint matches a juicy parameter. +func EndpointsMatch(target string, endpointsFile []string) []scanner.EndpointMatched { + endpoints := []scanner.EndpointMatched{} + matched := []scanner.Parameter{} + parameters := urlUtils.RetrieveParameters(target) + + if len(endpointsFile) == 0 { + for _, parameter := range scanner.GetJuicyParameters() { + for _, param := range parameters { + if strings.ToLower(param) == parameter.Parameter { + matched = append(matched, parameter) + } + endpoints = append(endpoints, scanner.EndpointMatched{Parameters: matched, URL: target}) + } + } + } else { + for _, parameter := range endpointsFile { + for _, param := range parameters { + if param == parameter { + matched = append(matched, scanner.Parameter{Parameter: parameter, Attacks: []string{}}) + } + endpoints = append(endpoints, scanner.EndpointMatched{Parameters: matched, URL: target}) + } + } + } + + return endpoints +} + +// huntExtensions hunts for extensions. +func huntExtensions(target string, severity int) scanner.FileTypeMatched { + extension := scanner.FileTypeMatched{} + copyTarget := target + + for _, ext := range scanner.GetExtensions() { + if ext.Severity <= severity { + firstIndex := strings.Index(target, "?") + if firstIndex > -1 { + target = target[:firstIndex] + } + + if strings.ToLower(target[len(target)-len("."+ext.Extension):]) == "."+ext.Extension { + extension = scanner.FileTypeMatched{Filetype: ext, URL: copyTarget} + } + } + } + + return extension +} + +// huntErrors hunts for errors. +func huntErrors(target string, body string) []scanner.ErrorMatched { + errorsSlice := ErrorsMatch(target, body) + return errorsSlice +} + +// ErrorsMatch checks the patterns for errors. +func ErrorsMatch(url string, body string) []scanner.ErrorMatched { + errors := []scanner.ErrorMatched{} + + for _, errorItem := range scanner.GetErrorRegexes() { + for _, errorRegex := range errorItem.Regex { + if matched, err := regexp.Match(errorRegex, []byte(body)); err == nil && matched { + re := regexp.MustCompile(errorRegex) + match := re.FindStringSubmatch(body) + errorFound := scanner.ErrorMatched{Error: errorItem, URL: url, Match: match[0]} + errors = append(errors, errorFound) + } + } + } + + return errors +} + +// huntInfos hunts for infos. +func huntInfos(target string, body string) []scanner.InfoMatched { + infosSlice := InfoMatch(target, body) + return infosSlice +} + +// InfoMatch checks the patterns for infos. +func InfoMatch(url string, body string) []scanner.InfoMatched { + infos := []scanner.InfoMatched{} + + for _, infoItem := range scanner.GetInfoRegexes() { + for _, infoRegex := range infoItem.Regex { + if matched, err := regexp.Match(infoRegex, []byte(body)); err == nil && matched { + re := regexp.MustCompile(infoRegex) + match := re.FindStringSubmatch(body) + infoFound := scanner.InfoMatched{Info: infoItem, URL: url, Match: match[0]} + infos = append(infos, infoFound) + } + } + } + + return infos +} + +// RetrieveBody retrieves the body (in the response) of a url. +func RetrieveBody(target string) string { + sb, err := GetRequest(target) + if err == nil && sb != "" { + return sb + } + + return "" +} + +// IgnoreMatch checks if the URL should be ignored or not. +func IgnoreMatch(url string, ignoreSlice []string) bool { + for _, ignore := range ignoreSlice { + if strings.Contains(url, ignore) { + return true + } + } + + return false +} + +// intensiveOk checks if a given url can be crawled +// in intensive mode (if the 2nd level domain matches with +// the inputted target). +func intensiveOk(target string, urlInput string, debug bool) bool { + root, err := urlUtils.GetRootHost(urlInput) + if err != nil { + if debug { + fmt.Println(err.Error() + ": " + urlInput) + } + + return false + } + + return root == target +} From 3a83d1f79b00f9104e8f326fe664df0e0933cc8a Mon Sep 17 00:00:00 2001 From: Cedric Brisson Date: Tue, 15 Nov 2022 17:16:30 -0500 Subject: [PATCH 02/22] Fixes issue #94 --- cmd/cariddi/main.go | 14 +++++------ pkg/crawler/colly.go | 57 +++++++++++++++++++++++--------------------- 2 files changed, 37 insertions(+), 34 deletions(-) diff --git a/cmd/cariddi/main.go b/cmd/cariddi/main.go index ca5cd1a..bda1af4 100644 --- a/cmd/cariddi/main.go +++ b/cmd/cariddi/main.go @@ -121,17 +121,17 @@ func main() { // For each target generate a crawler and collect all the results. for _, inp := range targets { - results, secrets, endpoints, extensions, errors, infos := crawler.New(inp, ResultTxt, ResultHTML, flags.Delay, + results := crawler.New(inp, ResultTxt, ResultHTML, flags.Delay, flags.Concurrency, flags.Ignore, flags.IgnoreTXT, flags.Cache, flags.Timeout, flags.Intensive, flags.Rua, flags.Proxy, flags.Insecure, flags.Secrets, secretsFileSlice, flags.Plain, flags.Endpoints, endpointsFileSlice, flags.Extensions, headers, flags.Errors, flags.Info, flags.Debug, flags.UserAgent) - finalResults = append(finalResults, results...) - finalSecret = append(finalSecret, secrets...) - finalEndpoints = append(finalEndpoints, endpoints...) - finalExtensions = append(finalExtensions, extensions...) - finalErrors = append(finalErrors, errors...) - finalInfos = append(finalInfos, infos...) + finalResults = append(finalResults, results.Results...) + finalSecret = append(finalSecret, results.Secrets...) + finalEndpoints = append(finalEndpoints, results.Endpoints...) + finalExtensions = append(finalExtensions, results.Extensions...) + finalErrors = append(finalErrors, results.Errors...) + finalInfos = append(finalInfos, results.Infos...) } // Remove duplicates from all the results. diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go index e00bf95..15e8446 100644 --- a/pkg/crawler/colly.go +++ b/pkg/crawler/colly.go @@ -47,6 +47,15 @@ import ( "github.com/gocolly/colly/extensions" ) +type Results struct { + Results []string + Secrets []scanner.SecretMatched + Endpoints []scanner.EndpointMatched + Extensions []scanner.FileTypeMatched + Errors []scanner.ErrorMatched + Infos []scanner.InfoMatched +} + // New it's the actual crawler engine. // It controls all the behaviours of a scan // (event handlers, secrets, errors, extensions and endpoints scanning). @@ -54,12 +63,13 @@ func New(target string, txt string, html string, delayTime int, concurrency int, ignore string, ignoreTxt string, cache bool, timeout int, intensive bool, rua bool, proxy string, insecure bool, secretsFlag bool, secretsFile []string, plain bool, endpointsFlag bool, endpointsFile []string, fileType int, headers map[string]string, errorsFlag bool, infoFlag bool, - debug bool, userAgent string) ([]string, []scanner.SecretMatched, []scanner.EndpointMatched, - []scanner.FileTypeMatched, []scanner.ErrorMatched, []scanner.InfoMatched) { + debug bool, userAgent string) *Results { // This is to avoid to insert into the crawler target regular // expression directories passed as input. var targetTemp, protocolTemp string + results := &Results{} + // if there isn't a scheme use http. if !urlUtils.HasProtocol(target) { protocolTemp = "http" @@ -102,13 +112,6 @@ func New(target string, txt string, html string, delayTime int, concurrency int, ignoreSlice = fileUtils.ReadFile(ignoreTxt) } - FinalResults := []string{} - FinalSecrets := []scanner.SecretMatched{} - FinalEndpoints := []scanner.EndpointMatched{} - FinalExtensions := []scanner.FileTypeMatched{} - FinalErrors := []scanner.ErrorMatched{} - FinalInfos := []scanner.InfoMatched{} - // crawler creation c := CreateColly(delayTime, concurrency, cache, timeout, intensive, rua, proxy, insecure, userAgent, target) @@ -121,74 +124,74 @@ func New(target string, txt string, html string, delayTime int, concurrency int, c.OnHTML("a[href]", func(e *colly.HTMLElement) { link := e.Attr("href") if len(link) != 0 && link[0] != '#' { - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &FinalResults, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) } }) // On every script element which has src attribute call callback c.OnHTML("script[src]", func(e *colly.HTMLElement) { link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &FinalResults, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // On every link element which has href attribute call callback c.OnHTML("link[href]", func(e *colly.HTMLElement) { link := e.Attr("href") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &FinalResults, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // On every iframe element which has src attribute call callback c.OnHTML("iframe[src]", func(e *colly.HTMLElement) { link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &FinalResults, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // On every svg element which has src attribute call callback c.OnHTML("svg[src]", func(e *colly.HTMLElement) { link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &FinalResults, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // On every img element which has src attribute call callback c.OnHTML("img[src]", func(e *colly.HTMLElement) { link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &FinalResults, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // On every from element which has action attribute call callback c.OnHTML("form[action]", func(e *colly.HTMLElement) { link := e.Attr("action") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &FinalResults, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//url", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &FinalResults, e, c) + visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//link", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &FinalResults, e, c) + visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//href", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &FinalResults, e, c) + visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//loc", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &FinalResults, e, c) + visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//fileurl", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &FinalResults, e, c) + visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // Add headers (if needed) on each request @@ -211,7 +214,7 @@ func New(target string, txt string, html string, delayTime int, concurrency int, if secretsFlag && lengthOk { secretsSlice := huntSecrets(secretsFile, r.Request.URL.String(), string(r.Body)) for _, elem := range secretsSlice { - FinalSecrets = append(FinalSecrets, elem) + results.Secrets = append(results.Secrets, elem) } } // HERE SCAN FOR ENDPOINTS @@ -219,7 +222,7 @@ func New(target string, txt string, html string, delayTime int, concurrency int, endpointsSlice := huntEndpoints(endpointsFile, r.Request.URL.String()) for _, elem := range endpointsSlice { if len(elem.Parameters) != 0 { - FinalEndpoints = append(FinalEndpoints, elem) + results.Endpoints = append(results.Endpoints, elem) } } } @@ -227,14 +230,14 @@ func New(target string, txt string, html string, delayTime int, concurrency int, if 1 <= fileType && fileType <= 7 { extension := huntExtensions(r.Request.URL.String(), fileType) if extension.URL != "" { - FinalExtensions = append(FinalExtensions, extension) + results.Extensions = append(results.Extensions, extension) } } // HERE SCAN FOR ERRORS if errorsFlag { errorsSlice := huntErrors(r.Request.URL.String(), string(r.Body)) for _, elem := range errorsSlice { - FinalErrors = append(FinalErrors, elem) + results.Errors = append(results.Errors, elem) } } @@ -242,7 +245,7 @@ func New(target string, txt string, html string, delayTime int, concurrency int, if infoFlag { infosSlice := huntInfos(r.Request.URL.String(), string(r.Body)) for _, elem := range infosSlice { - FinalInfos = append(FinalInfos, elem) + results.Infos = append(results.Infos, elem) } } } @@ -312,7 +315,7 @@ func New(target string, txt string, html string, delayTime int, concurrency int, output.FooterHTML(html) } - return FinalResults, FinalSecrets, FinalEndpoints, FinalExtensions, FinalErrors, FinalInfos + return results } // CreateColly takes as input all the settings needed to instantiate From a21f6d3a7db4ea817052dadfc20c793b5b0e2fcf Mon Sep 17 00:00:00 2001 From: Cedric Brisson Date: Tue, 15 Nov 2022 17:29:26 -0500 Subject: [PATCH 03/22] Fixed linting issue --- pkg/crawler/colly.go | 51 +++++++++++++++++++++++++------------------- 1 file changed, 29 insertions(+), 22 deletions(-) diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go index 15e8446..2dde2f9 100644 --- a/pkg/crawler/colly.go +++ b/pkg/crawler/colly.go @@ -113,7 +113,8 @@ func New(target string, txt string, html string, delayTime int, concurrency int, } // crawler creation - c := CreateColly(delayTime, concurrency, cache, timeout, intensive, rua, proxy, insecure, userAgent, target) + c := CreateColly(delayTime, concurrency, cache, timeout, + intensive, rua, proxy, insecure, userAgent, target) // On every request that Colly is making, print the URL it's currently visiting c.OnRequest(func(e *colly.Request) { @@ -124,74 +125,86 @@ func New(target string, txt string, html string, delayTime int, concurrency int, c.OnHTML("a[href]", func(e *colly.HTMLElement) { link := e.Attr("href") if len(link) != 0 && link[0] != '#' { - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, + ignoreBool, debug, ignoreSlice, &results.Results, e, c) } }) // On every script element which has src attribute call callback c.OnHTML("script[src]", func(e *colly.HTMLElement) { link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, + ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // On every link element which has href attribute call callback c.OnHTML("link[href]", func(e *colly.HTMLElement) { link := e.Attr("href") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, + ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // On every iframe element which has src attribute call callback c.OnHTML("iframe[src]", func(e *colly.HTMLElement) { link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, + ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // On every svg element which has src attribute call callback c.OnHTML("svg[src]", func(e *colly.HTMLElement) { link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, + ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // On every img element which has src attribute call callback c.OnHTML("img[src]", func(e *colly.HTMLElement) { link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, + ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // On every from element which has action attribute call callback c.OnHTML("form[action]", func(e *colly.HTMLElement) { link := e.Attr("action") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, + ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//url", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) + visitXMLLink(link, protocolTemp, targetTemp, target, intensive, + ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//link", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) + visitXMLLink(link, protocolTemp, targetTemp, target, intensive, + ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//href", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) + visitXMLLink(link, protocolTemp, targetTemp, target, intensive, + ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//loc", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) + visitXMLLink(link, protocolTemp, targetTemp, target, intensive, + ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//fileurl", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, ignoreBool, debug, ignoreSlice, &results.Results, e, c) + visitXMLLink(link, protocolTemp, targetTemp, target, intensive, + ignoreBool, debug, ignoreSlice, &results.Results, e, c) }) // Add headers (if needed) on each request @@ -213,9 +226,7 @@ func New(target string, txt string, html string, delayTime int, concurrency int, // HERE SCAN FOR SECRETS if secretsFlag && lengthOk { secretsSlice := huntSecrets(secretsFile, r.Request.URL.String(), string(r.Body)) - for _, elem := range secretsSlice { - results.Secrets = append(results.Secrets, elem) - } + results.Secrets = append(results.Secrets, secretsSlice...) } // HERE SCAN FOR ENDPOINTS if endpointsFlag { @@ -236,17 +247,13 @@ func New(target string, txt string, html string, delayTime int, concurrency int, // HERE SCAN FOR ERRORS if errorsFlag { errorsSlice := huntErrors(r.Request.URL.String(), string(r.Body)) - for _, elem := range errorsSlice { - results.Errors = append(results.Errors, elem) - } + results.Errors = append(results.Errors, errorsSlice...) } // HERE SCAN FOR INFOS if infoFlag { infosSlice := huntInfos(r.Request.URL.String(), string(r.Body)) - for _, elem := range infosSlice { - results.Infos = append(results.Infos, elem) - } + results.Infos = append(results.Infos, infosSlice...) } } }) From f1c1bdc8d0ed086b579493c7d4154b3c7f9afdae Mon Sep 17 00:00:00 2001 From: Cedric Brisson Date: Wed, 16 Nov 2022 08:57:36 -0500 Subject: [PATCH 04/22] fix https://github.com/edoardottt/cariddi/pull/97/files/a21f6d3a7db4ea817052dadfc20c793b5b0e2fcf#r1023601615 --- pkg/crawler/colly.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go index 2dde2f9..60480cb 100644 --- a/pkg/crawler/colly.go +++ b/pkg/crawler/colly.go @@ -48,7 +48,7 @@ import ( ) type Results struct { - Results []string + URLs []string Secrets []scanner.SecretMatched Endpoints []scanner.EndpointMatched Extensions []scanner.FileTypeMatched From 9163400fdaebbad9be36f6dcbe0bdcf7f6a8b679 Mon Sep 17 00:00:00 2001 From: Cedric Brisson Date: Wed, 16 Nov 2022 08:59:37 -0500 Subject: [PATCH 05/22] fix linting --- cmd/cariddi/main.go | 2 +- pkg/crawler/colly.go | 24 ++++++++++++------------ 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/cmd/cariddi/main.go b/cmd/cariddi/main.go index bda1af4..b8b8f68 100644 --- a/cmd/cariddi/main.go +++ b/cmd/cariddi/main.go @@ -126,7 +126,7 @@ func main() { flags.Rua, flags.Proxy, flags.Insecure, flags.Secrets, secretsFileSlice, flags.Plain, flags.Endpoints, endpointsFileSlice, flags.Extensions, headers, flags.Errors, flags.Info, flags.Debug, flags.UserAgent) - finalResults = append(finalResults, results.Results...) + finalResults = append(finalResults, results.URLs...) finalSecret = append(finalSecret, results.Secrets...) finalEndpoints = append(finalEndpoints, results.Endpoints...) finalExtensions = append(finalExtensions, results.Extensions...) diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go index 60480cb..b6cea0d 100644 --- a/pkg/crawler/colly.go +++ b/pkg/crawler/colly.go @@ -126,7 +126,7 @@ func New(target string, txt string, html string, delayTime int, concurrency int, link := e.Attr("href") if len(link) != 0 && link[0] != '#' { visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.Results, e, c) + ignoreBool, debug, ignoreSlice, &results.URLs, e, c) } }) @@ -134,77 +134,77 @@ func New(target string, txt string, html string, delayTime int, concurrency int, c.OnHTML("script[src]", func(e *colly.HTMLElement) { link := e.Attr("src") visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.Results, e, c) + ignoreBool, debug, ignoreSlice, &results.URLs, e, c) }) // On every link element which has href attribute call callback c.OnHTML("link[href]", func(e *colly.HTMLElement) { link := e.Attr("href") visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.Results, e, c) + ignoreBool, debug, ignoreSlice, &results.URLs, e, c) }) // On every iframe element which has src attribute call callback c.OnHTML("iframe[src]", func(e *colly.HTMLElement) { link := e.Attr("src") visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.Results, e, c) + ignoreBool, debug, ignoreSlice, &results.URLs, e, c) }) // On every svg element which has src attribute call callback c.OnHTML("svg[src]", func(e *colly.HTMLElement) { link := e.Attr("src") visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.Results, e, c) + ignoreBool, debug, ignoreSlice, &results.URLs, e, c) }) // On every img element which has src attribute call callback c.OnHTML("img[src]", func(e *colly.HTMLElement) { link := e.Attr("src") visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.Results, e, c) + ignoreBool, debug, ignoreSlice, &results.URLs, e, c) }) // On every from element which has action attribute call callback c.OnHTML("form[action]", func(e *colly.HTMLElement) { link := e.Attr("action") visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.Results, e, c) + ignoreBool, debug, ignoreSlice, &results.URLs, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//url", func(e *colly.XMLElement) { link := e.Text visitXMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.Results, e, c) + ignoreBool, debug, ignoreSlice, &results.URLs, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//link", func(e *colly.XMLElement) { link := e.Text visitXMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.Results, e, c) + ignoreBool, debug, ignoreSlice, &results.URLs, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//href", func(e *colly.XMLElement) { link := e.Text visitXMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.Results, e, c) + ignoreBool, debug, ignoreSlice, &results.URLs, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//loc", func(e *colly.XMLElement) { link := e.Text visitXMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.Results, e, c) + ignoreBool, debug, ignoreSlice, &results.URLs, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//fileurl", func(e *colly.XMLElement) { link := e.Text visitXMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.Results, e, c) + ignoreBool, debug, ignoreSlice, &results.URLs, e, c) }) // Add headers (if needed) on each request From 314323364e5a8725ed8440e851a1f6cc801173ad Mon Sep 17 00:00:00 2001 From: Cedric Brisson Date: Wed, 16 Nov 2022 19:44:22 -0500 Subject: [PATCH 06/22] Fixes issue 95. Now that we have a solid object as a reference, we should be able to next redundant lines :) --- cmd/cariddi/main.go | 45 ++++++++----- pkg/crawler/colly.go | 156 ++++++++++++++++++++++++++----------------- 2 files changed, 123 insertions(+), 78 deletions(-) diff --git a/cmd/cariddi/main.go b/cmd/cariddi/main.go index b8b8f68..57217cf 100644 --- a/cmd/cariddi/main.go +++ b/cmd/cariddi/main.go @@ -65,6 +65,28 @@ func main() { output.Beautify() } + // Setup the config according to the flags that were + // passed via the CLI + config := &crawler.Scan{ + Delay: flags.Delay, + Concurrency: flags.Concurrency, + Ignore: flags.Ignore, + IgnoreTxt: flags.IgnoreTXT, + Cache: flags.Cache, + Timeout: flags.Timeout, + Intensive: flags.Intensive, + Rua: flags.Rua, + Proxy: flags.Proxy, + SecretsFlag: flags.Secrets, + Plain: flags.Plain, + EndpointsFlag: flags.Endpoints, + FileType: flags.Extensions, + ErrorsFlag: flags.Errors, + InfoFlag: flags.Info, + Debug: flags.Debug, + UserAgent: flags.UserAgent, + } + // Read the targets from standard input. targets := input.ScanTargets() @@ -73,16 +95,14 @@ func main() { // If it is needed, read custom endpoints definition // from the specified file. - var endpointsFileSlice []string if flags.EndpointsFile != "" { - endpointsFileSlice = fileUtils.ReadFile(flags.EndpointsFile) + config.EndpointsFile = fileUtils.ReadFile(flags.EndpointsFile) } // If it is needed, read custom secrets definition // from the specified file. - var secretsFileSlice []string if flags.SecretsFile != "" { - secretsFileSlice = fileUtils.ReadFile(flags.SecretsFile) + config.SecretsFile = fileUtils.ReadFile(flags.SecretsFile) } finalResults := []string{} @@ -93,9 +113,9 @@ func main() { finalInfos := []scanner.InfoMatched{} // Create output files if needed (txt / html). - var ResultTxt = "" + config.Txt = "" if flags.TXT != "" { - ResultTxt = fileUtils.CreateOutputFile(flags.TXT, "results", "txt") + config.Txt = fileUtils.CreateOutputFile(flags.TXT, "results", "txt") } var ResultHTML = "" @@ -106,8 +126,6 @@ func main() { } // Read headers if needed - var headers map[string]string - if flags.HeadersFile != "" || flags.Headers != "" { var headersInput string if flags.HeadersFile != "" { @@ -116,16 +134,13 @@ func main() { headersInput = flags.Headers } - headers = input.GetHeaders(headersInput) + config.Headers = input.GetHeaders(headersInput) } // For each target generate a crawler and collect all the results. - for _, inp := range targets { - results := crawler.New(inp, ResultTxt, ResultHTML, flags.Delay, - flags.Concurrency, flags.Ignore, flags.IgnoreTXT, flags.Cache, flags.Timeout, flags.Intensive, - flags.Rua, flags.Proxy, flags.Insecure, flags.Secrets, secretsFileSlice, flags.Plain, flags.Endpoints, - endpointsFileSlice, flags.Extensions, headers, flags.Errors, flags.Info, flags.Debug, flags.UserAgent) - + for _, target := range targets { + config.Target = target + results := crawler.New(config) finalResults = append(finalResults, results.URLs...) finalSecret = append(finalSecret, results.Secrets...) finalEndpoints = append(finalEndpoints, results.Endpoints...) diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go index b6cea0d..91690c0 100644 --- a/pkg/crawler/colly.go +++ b/pkg/crawler/colly.go @@ -56,14 +56,44 @@ type Results struct { Infos []scanner.InfoMatched } +type Scan struct { + // Flags + Cache bool + Debug bool + EndpointsFlag bool + ErrorsFlag bool + InfoFlag bool + Insecure bool + Intensive bool + Plain bool + Rua bool + SecretsFlag bool + + // Settings + Concurrency int + Delay int + FileType int + Timeout int + + Ignore string + IgnoreTxt string + Html string + Proxy string + Target string + Txt string + UserAgent string + + // Storage + SecretsFile []string + EndpointsFile []string + + Headers map[string]string +} + // New it's the actual crawler engine. // It controls all the behaviours of a scan // (event handlers, secrets, errors, extensions and endpoints scanning). -func New(target string, txt string, html string, delayTime int, concurrency int, - ignore string, ignoreTxt string, cache bool, timeout int, intensive bool, rua bool, - proxy string, insecure bool, secretsFlag bool, secretsFile []string, plain bool, endpointsFlag bool, - endpointsFile []string, fileType int, headers map[string]string, errorsFlag bool, infoFlag bool, - debug bool, userAgent string) *Results { +func New(scan *Scan) *Results { // This is to avoid to insert into the crawler target regular // expression directories passed as input. var targetTemp, protocolTemp string @@ -71,15 +101,15 @@ func New(target string, txt string, html string, delayTime int, concurrency int, results := &Results{} // if there isn't a scheme use http. - if !urlUtils.HasProtocol(target) { + if !urlUtils.HasProtocol(scan.Target) { protocolTemp = "http" - targetTemp = urlUtils.GetHost(protocolTemp + "://" + target) + targetTemp = urlUtils.GetHost(protocolTemp + "://" + scan.Target) } else { - protocolTemp = urlUtils.GetProtocol(target) - targetTemp = urlUtils.GetHost(target) + protocolTemp = urlUtils.GetProtocol(scan.Target) + targetTemp = urlUtils.GetHost(scan.Target) } - if intensive { + if scan.Intensive { var err error targetTemp, err = urlUtils.GetRootHost(protocolTemp + "://" + targetTemp) @@ -90,31 +120,31 @@ func New(target string, txt string, html string, delayTime int, concurrency int, } if targetTemp == "" { - fmt.Println("The URL provided is not built in a proper way: " + target) + fmt.Println("The URL provided is not built in a proper way: " + scan.Target) os.Exit(1) } // clean target input - target = urlUtils.RemoveProtocol(target) + scan.Target = urlUtils.RemoveProtocol(scan.Target) ignoreSlice := []string{} ignoreBool := false // if ignore -> produce the slice - if ignore != "" { + if scan.Ignore != "" { ignoreBool = true - ignoreSlice = sliceUtils.CheckInputArray(ignore) + ignoreSlice = sliceUtils.CheckInputArray(scan.Ignore) } // if ignoreTxt -> produce the slice - if ignoreTxt != "" { + if scan.IgnoreTxt != "" { ignoreBool = true - ignoreSlice = fileUtils.ReadFile(ignoreTxt) + ignoreSlice = fileUtils.ReadFile(scan.IgnoreTxt) } // crawler creation - c := CreateColly(delayTime, concurrency, cache, timeout, - intensive, rua, proxy, insecure, userAgent, target) + c := CreateColly(scan.Delay, scan.Concurrency, scan.Cache, scan.Timeout, + scan.Intensive, scan.Rua, scan.Proxy, scan.Insecure, scan.UserAgent, scan.Target) // On every request that Colly is making, print the URL it's currently visiting c.OnRequest(func(e *colly.Request) { @@ -125,92 +155,92 @@ func New(target string, txt string, html string, delayTime int, concurrency int, c.OnHTML("a[href]", func(e *colly.HTMLElement) { link := e.Attr("href") if len(link) != 0 && link[0] != '#' { - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.URLs, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, + ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) } }) // On every script element which has src attribute call callback c.OnHTML("script[src]", func(e *colly.HTMLElement) { link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.URLs, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, + ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) }) // On every link element which has href attribute call callback c.OnHTML("link[href]", func(e *colly.HTMLElement) { link := e.Attr("href") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.URLs, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, + ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) }) // On every iframe element which has src attribute call callback c.OnHTML("iframe[src]", func(e *colly.HTMLElement) { link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.URLs, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, + ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) }) // On every svg element which has src attribute call callback c.OnHTML("svg[src]", func(e *colly.HTMLElement) { link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.URLs, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, + ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) }) // On every img element which has src attribute call callback c.OnHTML("img[src]", func(e *colly.HTMLElement) { link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.URLs, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, + ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) }) // On every from element which has action attribute call callback c.OnHTML("form[action]", func(e *colly.HTMLElement) { link := e.Attr("action") - visitHTMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.URLs, e, c) + visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, + ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//url", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.URLs, e, c) + visitXMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, + ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//link", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.URLs, e, c) + visitXMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, + ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//href", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.URLs, e, c) + visitXMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, + ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//loc", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.URLs, e, c) + visitXMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, + ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) }) // Create a callback on the XPath query searching for the URLs c.OnXML("//fileurl", func(e *colly.XMLElement) { link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, target, intensive, - ignoreBool, debug, ignoreSlice, &results.URLs, e, c) + visitXMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, + ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) }) // Add headers (if needed) on each request - if (len(headers)) > 0 { + if (len(scan.Headers)) > 0 { c.OnRequest(func(r *colly.Request) { - for header, value := range headers { + for header, value := range scan.Headers { r.Headers.Set(header, value) } }) @@ -222,15 +252,15 @@ func New(target string, txt string, html string, delayTime int, concurrency int, lengthOk := len(string(r.Body)) > minBodyLentgh // if endpoints or secrets or filetype: scan - if endpointsFlag || secretsFlag || (1 <= fileType && fileType <= 7) || errorsFlag || infoFlag { + if scan.EndpointsFlag || scan.SecretsFlag || (1 <= scan.FileType && scan.FileType <= 7) || scan.ErrorsFlag || scan.InfoFlag { // HERE SCAN FOR SECRETS - if secretsFlag && lengthOk { - secretsSlice := huntSecrets(secretsFile, r.Request.URL.String(), string(r.Body)) + if scan.SecretsFlag && lengthOk { + secretsSlice := huntSecrets(scan.SecretsFile, r.Request.URL.String(), string(r.Body)) results.Secrets = append(results.Secrets, secretsSlice...) } // HERE SCAN FOR ENDPOINTS - if endpointsFlag { - endpointsSlice := huntEndpoints(endpointsFile, r.Request.URL.String()) + if scan.EndpointsFlag { + endpointsSlice := huntEndpoints(scan.EndpointsFile, r.Request.URL.String()) for _, elem := range endpointsSlice { if len(elem.Parameters) != 0 { results.Endpoints = append(results.Endpoints, elem) @@ -238,20 +268,20 @@ func New(target string, txt string, html string, delayTime int, concurrency int, } } // HERE SCAN FOR EXTENSIONS - if 1 <= fileType && fileType <= 7 { - extension := huntExtensions(r.Request.URL.String(), fileType) + if 1 <= scan.FileType && scan.FileType <= 7 { + extension := huntExtensions(r.Request.URL.String(), scan.FileType) if extension.URL != "" { results.Extensions = append(results.Extensions, extension) } } // HERE SCAN FOR ERRORS - if errorsFlag { + if scan.ErrorsFlag { errorsSlice := huntErrors(r.Request.URL.String(), string(r.Body)) results.Errors = append(results.Errors, errorsSlice...) } // HERE SCAN FOR INFOS - if infoFlag { + if scan.InfoFlag { infosSlice := huntInfos(r.Request.URL.String(), string(r.Body)) results.Infos = append(results.Infos, infosSlice...) } @@ -259,7 +289,7 @@ func New(target string, txt string, html string, delayTime int, concurrency int, }) // Start scraping on target - path, err := urlUtils.GetPath(protocolTemp + "://" + target) + path, err := urlUtils.GetPath(protocolTemp + "://" + scan.Target) if err == nil { var ( addPath string @@ -270,25 +300,25 @@ func New(target string, txt string, html string, delayTime int, concurrency int, addPath = "/" } - absoluteURL = protocolTemp + "://" + target + addPath + "robots.txt" + absoluteURL = protocolTemp + "://" + scan.Target + addPath + "robots.txt" if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { err = c.Visit(absoluteURL) - if err != nil && debug && !errors.Is(err, colly.ErrAlreadyVisited) { + if err != nil && scan.Debug && !errors.Is(err, colly.ErrAlreadyVisited) { log.Println(err) } } - absoluteURL = protocolTemp + "://" + target + addPath + "sitemap.xml" + absoluteURL = protocolTemp + "://" + scan.Target + addPath + "sitemap.xml" if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { err = c.Visit(absoluteURL) - if err != nil && debug && !errors.Is(err, colly.ErrAlreadyVisited) { + if err != nil && scan.Debug && !errors.Is(err, colly.ErrAlreadyVisited) { log.Println(err) } } } - err = c.Visit(protocolTemp + "://" + target) - if err != nil && debug && !errors.Is(err, colly.ErrAlreadyVisited) { + err = c.Visit(protocolTemp + "://" + scan.Target) + if err != nil && scan.Debug && !errors.Is(err, colly.ErrAlreadyVisited) { log.Println(err) } @@ -306,7 +336,7 @@ func New(target string, txt string, html string, delayTime int, concurrency int, os.Exit(1) } - if !plain { + if !scan.Plain { fmt.Fprint(os.Stdout, "\r") fmt.Println("CTRL+C pressed: Exiting") cCount++ @@ -318,8 +348,8 @@ func New(target string, txt string, html string, delayTime int, concurrency int, c.Wait() - if html != "" { - output.FooterHTML(html) + if scan.Html != "" { + output.FooterHTML(scan.Html) } return results From c5159977f6a8bbe993f2bbdfeaab43c29504c203 Mon Sep 17 00:00:00 2001 From: Cedric Brisson Date: Wed, 16 Nov 2022 19:49:30 -0500 Subject: [PATCH 07/22] Fix linting issues --- pkg/crawler/colly.go | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go index 91690c0..bff89e4 100644 --- a/pkg/crawler/colly.go +++ b/pkg/crawler/colly.go @@ -77,7 +77,7 @@ type Scan struct { Ignore string IgnoreTxt string - Html string + HTML string Proxy string Target string Txt string @@ -252,7 +252,8 @@ func New(scan *Scan) *Results { lengthOk := len(string(r.Body)) > minBodyLentgh // if endpoints or secrets or filetype: scan - if scan.EndpointsFlag || scan.SecretsFlag || (1 <= scan.FileType && scan.FileType <= 7) || scan.ErrorsFlag || scan.InfoFlag { + if scan.EndpointsFlag || scan.SecretsFlag || + (1 <= scan.FileType && scan.FileType <= 7) || scan.ErrorsFlag || scan.InfoFlag { // HERE SCAN FOR SECRETS if scan.SecretsFlag && lengthOk { secretsSlice := huntSecrets(scan.SecretsFile, r.Request.URL.String(), string(r.Body)) @@ -348,8 +349,8 @@ func New(scan *Scan) *Results { c.Wait() - if scan.Html != "" { - output.FooterHTML(scan.Html) + if scan.HTML != "" { + output.FooterHTML(scan.HTML) } return results From eb9a91e2688245a201d8078446ef87b0ce63dad1 Mon Sep 17 00:00:00 2001 From: edoardottt Date: Thu, 17 Nov 2022 08:31:03 +0100 Subject: [PATCH 08/22] linting --- cmd/cariddi/main.go | 4 ++-- pkg/crawler/colly.go | 51 +++++++++++--------------------------------- pkg/crawler/utils.go | 23 ++++++++++++++++++++ 3 files changed, 38 insertions(+), 40 deletions(-) diff --git a/cmd/cariddi/main.go b/cmd/cariddi/main.go index 57217cf..a43891a 100644 --- a/cmd/cariddi/main.go +++ b/cmd/cariddi/main.go @@ -96,13 +96,13 @@ func main() { // If it is needed, read custom endpoints definition // from the specified file. if flags.EndpointsFile != "" { - config.EndpointsFile = fileUtils.ReadFile(flags.EndpointsFile) + config.EndpointsSlice = fileUtils.ReadFile(flags.EndpointsFile) } // If it is needed, read custom secrets definition // from the specified file. if flags.SecretsFile != "" { - config.SecretsFile = fileUtils.ReadFile(flags.SecretsFile) + config.SecretsSlice = fileUtils.ReadFile(flags.SecretsFile) } finalResults := []string{} diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go index bff89e4..931298d 100644 --- a/pkg/crawler/colly.go +++ b/pkg/crawler/colly.go @@ -68,26 +68,24 @@ type Scan struct { Plain bool Rua bool SecretsFlag bool + Ignore string + IgnoreTxt string + HTML string + Proxy string + Target string + Txt string + UserAgent string + FileType int + Headers map[string]string // Settings Concurrency int Delay int - FileType int Timeout int - Ignore string - IgnoreTxt string - HTML string - Proxy string - Target string - Txt string - UserAgent string - // Storage - SecretsFile []string - EndpointsFile []string - - Headers map[string]string + SecretsSlice []string + EndpointsSlice []string } // New it's the actual crawler engine. @@ -256,12 +254,12 @@ func New(scan *Scan) *Results { (1 <= scan.FileType && scan.FileType <= 7) || scan.ErrorsFlag || scan.InfoFlag { // HERE SCAN FOR SECRETS if scan.SecretsFlag && lengthOk { - secretsSlice := huntSecrets(scan.SecretsFile, r.Request.URL.String(), string(r.Body)) + secretsSlice := huntSecrets(scan.SecretsSlice, r.Request.URL.String(), string(r.Body)) results.Secrets = append(results.Secrets, secretsSlice...) } // HERE SCAN FOR ENDPOINTS if scan.EndpointsFlag { - endpointsSlice := huntEndpoints(scan.EndpointsFile, r.Request.URL.String()) + endpointsSlice := huntEndpoints(scan.EndpointsSlice, r.Request.URL.String()) for _, elem := range endpointsSlice { if len(elem.Parameters) != 0 { results.Endpoints = append(results.Endpoints, elem) @@ -416,26 +414,3 @@ func CreateColly(delayTime int, concurrency int, cache bool, timeout int, return c } - -// visitHTMLLink checks if the collector should visit a link or not. -func visitHTMLLink(link, protocolTemp, targetTemp, target string, intensive, ignoreBool, debug bool, - ignoreSlice []string, finalResults *[]string, e *colly.HTMLElement, c *colly.Collector) { - if len(link) != 0 { - absoluteURL := urlUtils.AbsoluteURL(protocolTemp, targetTemp, e.Request.AbsoluteURL(link)) - // Visit link found on page - // Only those links are visited which are in AllowedDomains - if (!intensive && urlUtils.SameDomain(protocolTemp+"://"+target, absoluteURL)) || - (intensive && intensiveOk(targetTemp, absoluteURL, debug)) { - if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { - err := c.Visit(absoluteURL) - if !errors.Is(err, colly.ErrAlreadyVisited) { - *finalResults = append(*finalResults, absoluteURL) - - if err != nil && debug { - log.Println(err) - } - } - } - } - } -} diff --git a/pkg/crawler/utils.go b/pkg/crawler/utils.go index 214e7f2..988bc6d 100644 --- a/pkg/crawler/utils.go +++ b/pkg/crawler/utils.go @@ -12,6 +12,29 @@ import ( "github.com/gocolly/colly" ) +// visitHTMLLink checks if the collector should visit a link or not. +func visitHTMLLink(link, protocolTemp, targetTemp, target string, intensive, ignoreBool, debug bool, + ignoreSlice []string, finalResults *[]string, e *colly.HTMLElement, c *colly.Collector) { + if len(link) != 0 { + absoluteURL := urlUtils.AbsoluteURL(protocolTemp, targetTemp, e.Request.AbsoluteURL(link)) + // Visit link found on page + // Only those links are visited which are in AllowedDomains + if (!intensive && urlUtils.SameDomain(protocolTemp+"://"+target, absoluteURL)) || + (intensive && intensiveOk(targetTemp, absoluteURL, debug)) { + if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { + err := c.Visit(absoluteURL) + if !errors.Is(err, colly.ErrAlreadyVisited) { + *finalResults = append(*finalResults, absoluteURL) + + if err != nil && debug { + log.Println(err) + } + } + } + } + } +} + // visitXMLLink checks if the collector should visit a link or not. func visitXMLLink(link, protocolTemp, targetTemp, target string, intensive, ignoreBool, debug bool, ignoreSlice []string, finalResults *[]string, e *colly.XMLElement, c *colly.Collector) { From 7ce0f4e739dbf789990ce2bc315db856312cafda Mon Sep 17 00:00:00 2001 From: edoardottt Date: Fri, 18 Nov 2022 21:32:27 +0100 Subject: [PATCH 09/22] insecure by default --- pkg/crawler/colly.go | 13 +++++-------- pkg/input/flags.go | 3 --- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go index 931298d..5668cc6 100644 --- a/pkg/crawler/colly.go +++ b/pkg/crawler/colly.go @@ -63,7 +63,6 @@ type Scan struct { EndpointsFlag bool ErrorsFlag bool InfoFlag bool - Insecure bool Intensive bool Plain bool Rua bool @@ -142,7 +141,7 @@ func New(scan *Scan) *Results { // crawler creation c := CreateColly(scan.Delay, scan.Concurrency, scan.Cache, scan.Timeout, - scan.Intensive, scan.Rua, scan.Proxy, scan.Insecure, scan.UserAgent, scan.Target) + scan.Intensive, scan.Rua, scan.Proxy, scan.UserAgent, scan.Target) // On every request that Colly is making, print the URL it's currently visiting c.OnRequest(func(e *colly.Request) { @@ -357,7 +356,7 @@ func New(scan *Scan) *Results { // CreateColly takes as input all the settings needed to instantiate // a new Colly Collector object and it returns this object. func CreateColly(delayTime int, concurrency int, cache bool, timeout int, - intensive bool, rua bool, proxy string, insecure bool, userAgent string, target string) *colly.Collector { + intensive bool, rua bool, proxy string, userAgent string, target string) *colly.Collector { c := colly.NewCollector( colly.Async(true), ) @@ -406,11 +405,9 @@ func CreateColly(delayTime int, concurrency int, cache bool, timeout int, } } - if insecure { - c.WithTransport(&http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, - }) - } + c.WithTransport(&http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + }) return c } diff --git a/pkg/input/flags.go b/pkg/input/flags.go index 32c5352..a632e4b 100644 --- a/pkg/input/flags.go +++ b/pkg/input/flags.go @@ -53,7 +53,6 @@ type Input struct { Intensive bool Rua bool Proxy string - Insecure bool Secrets bool SecretsFile string Endpoints bool @@ -86,7 +85,6 @@ func ScanFlag() Input { intensivePtr := flag.Bool("intensive", false, "Crawl searching for resources matching 2nd level domain.") ruaPtr := flag.Bool("rua", false, "Use a random browser user agent on every request.") proxyPtr := flag.String("proxy", "", "Set a Proxy to be used (http and socks5 supported).") - insecurePtr := flag.Bool("insecure", false, "Ignore invalid HTTPS certificates") secretsPtr := flag.Bool("s", false, "Hunt for secrets.") secretsFilePtr := flag.String("sf", "", "Use an external file (txt, one per line)"+ @@ -129,7 +127,6 @@ func ScanFlag() Input { *intensivePtr, *ruaPtr, *proxyPtr, - *insecurePtr, *secretsPtr, *secretsFilePtr, *endpointsPtr, From 43fe15cf587f9030e0cc0b9f583ed32bf4a61e25 Mon Sep 17 00:00:00 2001 From: edoardottt Date: Mon, 21 Nov 2022 21:21:45 +0100 Subject: [PATCH 10/22] Fix Slack webhook regex --- pkg/scanner/secrets.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/scanner/secrets.go b/pkg/scanner/secrets.go index b005d74..81b49a9 100644 --- a/pkg/scanner/secrets.go +++ b/pkg/scanner/secrets.go @@ -239,7 +239,7 @@ func GetSecretRegexes() []Secret { { "Slack Webhook", "Slack Webhook", - `https://hooks.slack.com/services/T[0-9A-Za-z\-_]{8}/B[0-9A-Za-z\-_]{8}/[0-9A-Za-z\-_]{24}`, + `https\:\/\/hooks\.slack\.com/services/T[0-9A-Za-z\-_]{8}/B[0-9A-Za-z\-_]{8}/[0-9A-Za-z\-_]{24}`, []string{}, "?", }, From b5d63417f7c8fe41b809cd1c9b4815d009ed8a1c Mon Sep 17 00:00:00 2001 From: edoardottt Date: Thu, 8 Dec 2022 12:11:17 +0100 Subject: [PATCH 11/22] fix robots and sitemap --- pkg/crawler/colly.go | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go index 5668cc6..1153bc9 100644 --- a/pkg/crawler/colly.go +++ b/pkg/crawler/colly.go @@ -298,19 +298,21 @@ func New(scan *Scan) *Results { addPath = "/" } - absoluteURL = protocolTemp + "://" + scan.Target + addPath + "robots.txt" - if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { - err = c.Visit(absoluteURL) - if err != nil && scan.Debug && !errors.Is(err, colly.ErrAlreadyVisited) { - log.Println(err) + if path == "" || path == "/" { + absoluteURL = protocolTemp + "://" + scan.Target + addPath + "robots.txt" + if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { + err = c.Visit(absoluteURL) + if err != nil && scan.Debug && !errors.Is(err, colly.ErrAlreadyVisited) { + log.Println(err) + } } - } - absoluteURL = protocolTemp + "://" + scan.Target + addPath + "sitemap.xml" - if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { - err = c.Visit(absoluteURL) - if err != nil && scan.Debug && !errors.Is(err, colly.ErrAlreadyVisited) { - log.Println(err) + absoluteURL = protocolTemp + "://" + scan.Target + addPath + "sitemap.xml" + if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { + err = c.Visit(absoluteURL) + if err != nil && scan.Debug && !errors.Is(err, colly.ErrAlreadyVisited) { + log.Println(err) + } } } } From c1de7ff99988245b6e0221370c56742db5907e3e Mon Sep 17 00:00:00 2001 From: edoardottt Date: Thu, 15 Dec 2022 20:38:43 +0100 Subject: [PATCH 12/22] update BTC address regex --- pkg/scanner/info.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/scanner/info.go b/pkg/scanner/info.go index f45e9d5..b2524af 100644 --- a/pkg/scanner/info.go +++ b/pkg/scanner/info.go @@ -68,7 +68,7 @@ func GetInfoRegexes() []Info { { "BTC address", []string{ - `[13][a-km-zA-HJ-NP-Z1-9]{25,34}`}, + `([13]|bc1)[A-HJ-NP-Za-km-z1-9]{27,34}`}, }, /* HOW TO AVOID VERY VERY LONG BASE64 IMAGES ??? From a28ea974d86fbfaabf501ad232514bcd9e417668 Mon Sep 17 00:00:00 2001 From: edoardottt Date: Thu, 15 Dec 2022 20:46:57 +0100 Subject: [PATCH 13/22] mod update --- go.mod | 21 +++++++++++---------- go.sum | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 10 deletions(-) diff --git a/go.mod b/go.mod index e274d39..f65109e 100644 --- a/go.mod +++ b/go.mod @@ -10,19 +10,20 @@ require ( require ( github.com/PuerkitoBio/goquery v1.8.0 // indirect github.com/andybalholm/cascadia v1.3.1 // indirect - github.com/antchfx/htmlquery v1.2.4 // indirect - github.com/antchfx/xmlquery v1.3.10 // indirect - github.com/antchfx/xpath v1.2.0 // indirect + github.com/antchfx/htmlquery v1.2.5 // indirect + github.com/antchfx/xmlquery v1.3.13 // indirect + github.com/antchfx/xpath v1.2.1 // indirect github.com/gobwas/glob v0.2.3 // indirect - github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect - github.com/golang/protobuf v1.3.1 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/protobuf v1.5.2 // indirect github.com/kennygrant/sanitize v1.2.4 // indirect - github.com/mattn/go-colorable v0.1.9 // indirect - github.com/mattn/go-isatty v0.0.14 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.16 // indirect github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca // indirect github.com/temoto/robotstxt v1.1.2 // indirect - golang.org/x/net v0.0.0-20210916014120-12bc252f5db8 // indirect - golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c // indirect - golang.org/x/text v0.3.6 // indirect + golang.org/x/net v0.4.0 // indirect + golang.org/x/sys v0.3.0 // indirect + golang.org/x/text v0.5.0 // indirect google.golang.org/appengine v1.6.7 // indirect + google.golang.org/protobuf v1.28.1 // indirect ) diff --git a/go.sum b/go.sum index cd76f61..7cb43d5 100644 --- a/go.sum +++ b/go.sum @@ -4,10 +4,16 @@ github.com/andybalholm/cascadia v1.3.1 h1:nhxRkql1kdYCc8Snf7D5/D3spOX+dBgjA6u8x0 github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA= github.com/antchfx/htmlquery v1.2.4 h1:qLteofCMe/KGovBI6SQgmou2QNyedFUW+pE+BpeZ494= github.com/antchfx/htmlquery v1.2.4/go.mod h1:2xO6iu3EVWs7R2JYqBbp8YzG50gj/ofqs5/0VZoDZLc= +github.com/antchfx/htmlquery v1.2.5 h1:1lXnx46/1wtv1E/kzmH8vrfMuUKYgkdDBA9pIdMJnk4= +github.com/antchfx/htmlquery v1.2.5/go.mod h1:2MCVBzYVafPBmKbrmwB9F5xdd+IEgRY61ci2oOsOQVw= github.com/antchfx/xmlquery v1.3.10 h1:U2yMwr8U0KmGM2iDG2Ky/3LfxNsiK4uw1bSBkeMO9+g= github.com/antchfx/xmlquery v1.3.10/go.mod h1:wojC/BxjEkjJt6dPiAqUzoXO5nIMWtxHS8PD8TmN4ks= +github.com/antchfx/xmlquery v1.3.13 h1:wqhTv2BN5MzYg9rnPVtZb3IWP8kW6WV/ebAY0FCTI7Y= +github.com/antchfx/xmlquery v1.3.13/go.mod h1:3w2RvQvTz+DaT5fSgsELkSJcdNgkmg6vuXDEuhdwsPQ= github.com/antchfx/xpath v1.2.0 h1:mbwv7co+x0RwgeGAOHdrKy89GvHaGvxxBtPK0uF9Zr8= github.com/antchfx/xpath v1.2.0/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= +github.com/antchfx/xpath v1.2.1 h1:qhp4EW6aCOVr5XIkT+l6LJ9ck/JsUH/yyauNgTQkBF8= +github.com/antchfx/xpath v1.2.1/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= @@ -18,15 +24,25 @@ github.com/gocolly/colly v1.2.0 h1:qRz9YAn8FIH0qzgNUw+HT9UN7wm1oF9OBAilwEWpyrI= github.com/gocolly/colly v1.2.0/go.mod h1:Hof5T3ZswNVsOHYmba1u03W65HDWgpV5HifSuueE0EA= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o= github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak= github.com/mattn/go-colorable v0.1.9 h1:sqDoxXbdeALODt0DAeJCVp38ps9ZogZEAXjus69YV3U= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca h1:NugYot0LIVPxTvN8n+Kvkn6TrbMyxQiuvKdEwFdR9vI= @@ -44,6 +60,9 @@ golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20210916014120-12bc252f5db8 h1:/6y1LfuqNuQdHAm0jjtPtgRcxIxjVZgm5OTu8/QhZvk= golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.4.0 h1:Q5QPcMlvfxFTAPV0+07Xz/MpK9NTXu2VDUuy0FeMfaU= +golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -51,13 +70,27 @@ golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM= +golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= From 806a69a23b6929dcd9c70627aa02347ccf346468 Mon Sep 17 00:00:00 2001 From: edoardottt Date: Thu, 15 Dec 2022 20:49:26 +0100 Subject: [PATCH 14/22] fix endpoints --- pkg/scanner/endpoints.go | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pkg/scanner/endpoints.go b/pkg/scanner/endpoints.go index 463c7c6..7f71acc 100644 --- a/pkg/scanner/endpoints.go +++ b/pkg/scanner/endpoints.go @@ -83,6 +83,11 @@ func GetJuicyParameters() []Parameter { {"thread", []string{"SQLi"}}, {"type", []string{"SQLi", "LFI", "XSS"}}, {"date", []string{"SQLi", "LFI", "XSS"}}, + {"img", []string{"OpenRedir", "SSRF", "SQLi", "LFI", "XSS"}}, + {"img_url", []string{"OpenRedir", "SSRF", "SQLi", "LFI", "XSS"}}, + {"img-url", []string{"OpenRedir", "SSRF", "SQLi", "LFI", "XSS"}}, + {"img-src", []string{"OpenRedir", "SSRF", "SQLi", "LFI", "XSS"}}, + {"src", []string{"OpenRedir", "SSRF", "SQLi", "LFI", "XSS"}}, {"form", []string{"SQLi"}}, {"join", []string{"SQLi"}}, {"main", []string{"SQLi"}}, @@ -161,11 +166,9 @@ func GetJuicyParameters() []Parameter { {"domain", []string{"SSRF"}}, {"callback", []string{"SSRF"}}, {"feed", []string{"SSRF"}}, - {"host", []string{"SSRF"}}, {"port", []string{"SSRF"}}, {"to", []string{"SSRF"}}, {"host", []string{"SSRF"}}, - {"host", []string{"SSRF"}}, {"q", []string{"XSS"}}, {"keyword", []string{"XSS"}}, {"keywords", []string{"XSS"}}, From 3548f1de19cace80529ab42b9e697106a7c41bf8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Jan 2023 19:18:54 +0000 Subject: [PATCH 15/22] chore(deps): bump github.com/fatih/color from 1.13.0 to 1.14.1 Bumps [github.com/fatih/color](https://github.com/fatih/color) from 1.13.0 to 1.14.1. - [Release notes](https://github.com/fatih/color/releases) - [Commits](https://github.com/fatih/color/compare/v1.13.0...v1.14.1) --- updated-dependencies: - dependency-name: github.com/fatih/color dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- go.mod | 4 ++-- go.sum | 47 +++++++++++++++++++++-------------------------- 2 files changed, 23 insertions(+), 28 deletions(-) diff --git a/go.mod b/go.mod index f65109e..16f47c7 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/edoardottt/cariddi go 1.17 require ( - github.com/fatih/color v1.13.0 + github.com/fatih/color v1.14.1 github.com/gocolly/colly v1.2.0 ) @@ -18,7 +18,7 @@ require ( github.com/golang/protobuf v1.5.2 // indirect github.com/kennygrant/sanitize v1.2.4 // indirect github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.16 // indirect + github.com/mattn/go-isatty v0.0.17 // indirect github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca // indirect github.com/temoto/robotstxt v1.1.2 // indirect golang.org/x/net v0.4.0 // indirect diff --git a/go.sum b/go.sum index 7cb43d5..acb71b0 100644 --- a/go.sum +++ b/go.sum @@ -2,47 +2,36 @@ github.com/PuerkitoBio/goquery v1.8.0 h1:PJTF7AmFCFKk1N6V6jmKfrNH9tV5pNE6lZMkG0g github.com/PuerkitoBio/goquery v1.8.0/go.mod h1:ypIiRMtY7COPGk+I/YbZLbxsxn9g5ejnI2HSMtkjZvI= github.com/andybalholm/cascadia v1.3.1 h1:nhxRkql1kdYCc8Snf7D5/D3spOX+dBgjA6u8x004T2c= github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA= -github.com/antchfx/htmlquery v1.2.4 h1:qLteofCMe/KGovBI6SQgmou2QNyedFUW+pE+BpeZ494= -github.com/antchfx/htmlquery v1.2.4/go.mod h1:2xO6iu3EVWs7R2JYqBbp8YzG50gj/ofqs5/0VZoDZLc= github.com/antchfx/htmlquery v1.2.5 h1:1lXnx46/1wtv1E/kzmH8vrfMuUKYgkdDBA9pIdMJnk4= github.com/antchfx/htmlquery v1.2.5/go.mod h1:2MCVBzYVafPBmKbrmwB9F5xdd+IEgRY61ci2oOsOQVw= -github.com/antchfx/xmlquery v1.3.10 h1:U2yMwr8U0KmGM2iDG2Ky/3LfxNsiK4uw1bSBkeMO9+g= -github.com/antchfx/xmlquery v1.3.10/go.mod h1:wojC/BxjEkjJt6dPiAqUzoXO5nIMWtxHS8PD8TmN4ks= github.com/antchfx/xmlquery v1.3.13 h1:wqhTv2BN5MzYg9rnPVtZb3IWP8kW6WV/ebAY0FCTI7Y= github.com/antchfx/xmlquery v1.3.13/go.mod h1:3w2RvQvTz+DaT5fSgsELkSJcdNgkmg6vuXDEuhdwsPQ= -github.com/antchfx/xpath v1.2.0 h1:mbwv7co+x0RwgeGAOHdrKy89GvHaGvxxBtPK0uF9Zr8= -github.com/antchfx/xpath v1.2.0/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= github.com/antchfx/xpath v1.2.1 h1:qhp4EW6aCOVr5XIkT+l6LJ9ck/JsUH/yyauNgTQkBF8= github.com/antchfx/xpath v1.2.1/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.14.1 h1:qfhVLaG5s+nCROl1zJsZRxFeYrHLqWroPOQ8BWiNb4w= +github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8WlgGZGg= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/gocolly/colly v1.2.0 h1:qRz9YAn8FIH0qzgNUw+HT9UN7wm1oF9OBAilwEWpyrI= github.com/gocolly/colly v1.2.0/go.mod h1:Hof5T3ZswNVsOHYmba1u03W65HDWgpV5HifSuueE0EA= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o= github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak= -github.com/mattn/go-colorable v0.1.9 h1:sqDoxXbdeALODt0DAeJCVp38ps9ZogZEAXjus69YV3U= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng= +github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca h1:NugYot0LIVPxTvN8n+Kvkn6TrbMyxQiuvKdEwFdR9vI= @@ -52,41 +41,47 @@ github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0 github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/temoto/robotstxt v1.1.2 h1:W2pOjSJ6SWvldyEuiFXNxz3xZ8aiWX5LbfDiOFd7Fxg= github.com/temoto/robotstxt v1.1.2/go.mod h1:+1AmkuG3IYkh1kv0d2qEB9Le88ehNO0zwOr3ujewlOo= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20210916014120-12bc252f5db8 h1:/6y1LfuqNuQdHAm0jjtPtgRcxIxjVZgm5OTu8/QhZvk= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.4.0 h1:Q5QPcMlvfxFTAPV0+07Xz/MpK9NTXu2VDUuy0FeMfaU= golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ= golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM= golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= From b9734d7914582501ec45e5ea00ae753bf0f6979a Mon Sep 17 00:00:00 2001 From: vrenzolaverace Date: Tue, 31 Jan 2023 08:47:18 +0100 Subject: [PATCH 16/22] Update dependabot.yml --- .github/dependabot.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index d9508b4..8cd4ada 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -11,4 +11,4 @@ updates: prefix: "chore" include: "scope" labels: - - "Type: Maintenance" + - "Maintenance" From 603e2f072b9cdbc5f8577eeba2b0738a721a5e89 Mon Sep 17 00:00:00 2001 From: edoardottt Date: Sat, 11 Feb 2023 16:53:57 +0100 Subject: [PATCH 17/22] update main --- cmd/cariddi/main.go | 216 -------------------------------------------- go.mod | 2 +- go.sum | 15 --- 3 files changed, 1 insertion(+), 232 deletions(-) delete mode 100644 cmd/cariddi/main.go diff --git a/cmd/cariddi/main.go b/cmd/cariddi/main.go deleted file mode 100644 index a43891a..0000000 --- a/cmd/cariddi/main.go +++ /dev/null @@ -1,216 +0,0 @@ -/* -========== -Cariddi -========== - -This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program. If not, see http://www.gnu.org/licenses/. - - @Repository: https://github.com/edoardottt/cariddi - - @Author: edoardottt, https://www.edoardoottavianelli.it - - @License: https://github.com/edoardottt/cariddi/blob/main/LICENSE - -*/ - -package main - -import ( - "os" - - fileUtils "github.com/edoardottt/cariddi/internal/file" - sliceUtils "github.com/edoardottt/cariddi/internal/slice" - "github.com/edoardottt/cariddi/pkg/crawler" - "github.com/edoardottt/cariddi/pkg/input" - "github.com/edoardottt/cariddi/pkg/output" - "github.com/edoardottt/cariddi/pkg/scanner" -) - -// main function. -func main() { - // Scan flags. - flags := input.ScanFlag() - - // Print version and exit. - if flags.Version { - output.Beautify() - os.Exit(0) - } - - // Print help and exit. - if flags.Help { - output.PrintHelp() - os.Exit(0) - } - - // Print examples and exit. - if flags.Examples { - output.PrintExamples() - os.Exit(0) - } - - // If it's possible print the cariddi banner. - if !flags.Plain { - output.Beautify() - } - - // Setup the config according to the flags that were - // passed via the CLI - config := &crawler.Scan{ - Delay: flags.Delay, - Concurrency: flags.Concurrency, - Ignore: flags.Ignore, - IgnoreTxt: flags.IgnoreTXT, - Cache: flags.Cache, - Timeout: flags.Timeout, - Intensive: flags.Intensive, - Rua: flags.Rua, - Proxy: flags.Proxy, - SecretsFlag: flags.Secrets, - Plain: flags.Plain, - EndpointsFlag: flags.Endpoints, - FileType: flags.Extensions, - ErrorsFlag: flags.Errors, - InfoFlag: flags.Info, - Debug: flags.Debug, - UserAgent: flags.UserAgent, - } - - // Read the targets from standard input. - targets := input.ScanTargets() - - // Check if there are errors in the flags definition. - input.CheckFlags(flags) - - // If it is needed, read custom endpoints definition - // from the specified file. - if flags.EndpointsFile != "" { - config.EndpointsSlice = fileUtils.ReadFile(flags.EndpointsFile) - } - - // If it is needed, read custom secrets definition - // from the specified file. - if flags.SecretsFile != "" { - config.SecretsSlice = fileUtils.ReadFile(flags.SecretsFile) - } - - finalResults := []string{} - finalSecret := []scanner.SecretMatched{} - finalEndpoints := []scanner.EndpointMatched{} - finalExtensions := []scanner.FileTypeMatched{} - finalErrors := []scanner.ErrorMatched{} - finalInfos := []scanner.InfoMatched{} - - // Create output files if needed (txt / html). - config.Txt = "" - if flags.TXT != "" { - config.Txt = fileUtils.CreateOutputFile(flags.TXT, "results", "txt") - } - - var ResultHTML = "" - if flags.HTML != "" { - ResultHTML = fileUtils.CreateOutputFile(flags.HTML, "", "html") - output.BannerHTML(ResultHTML) - output.HeaderHTML("Results", ResultHTML) - } - - // Read headers if needed - if flags.HeadersFile != "" || flags.Headers != "" { - var headersInput string - if flags.HeadersFile != "" { - headersInput = string(fileUtils.ReadEntireFile(flags.HeadersFile)) - } else { - headersInput = flags.Headers - } - - config.Headers = input.GetHeaders(headersInput) - } - - // For each target generate a crawler and collect all the results. - for _, target := range targets { - config.Target = target - results := crawler.New(config) - finalResults = append(finalResults, results.URLs...) - finalSecret = append(finalSecret, results.Secrets...) - finalEndpoints = append(finalEndpoints, results.Endpoints...) - finalExtensions = append(finalExtensions, results.Extensions...) - finalErrors = append(finalErrors, results.Errors...) - finalInfos = append(finalInfos, results.Infos...) - } - - // Remove duplicates from all the results. - finalResults = sliceUtils.RemoveDuplicateValues(finalResults) - finalSecret = scanner.RemoveDuplicateSecrets(finalSecret) - finalEndpoints = scanner.RemovDuplicateEndpoints(finalEndpoints) - finalExtensions = scanner.RemoveDuplicateExtensions(finalExtensions) - finalErrors = scanner.RemoveDuplicateErrors(finalErrors) - finalInfos = scanner.RemoveDuplicateInfos(finalInfos) - - // IF TXT OUTPUT > - if flags.TXT != "" { - output.TxtOutput(flags, finalResults, finalSecret, finalEndpoints, - finalExtensions, finalErrors, finalInfos) - } - - // IF HTML OUTPUT > - if flags.HTML != "" { - output.HTMLOutput(flags, ResultHTML, finalResults, finalSecret, - finalEndpoints, finalExtensions, finalErrors, finalInfos) - } - - // If needed print secrets. - if !flags.Plain && len(finalSecret) != 0 { - for _, elem := range finalSecret { - output.EncapsulateCustomGreen(elem.Secret.Name, elem.Match+" in "+elem.URL) - } - } - - // If needed print endpoints. - if !flags.Plain && len(finalEndpoints) != 0 { - for _, elem := range finalEndpoints { - for _, parameter := range elem.Parameters { - finalString := "" + parameter.Parameter - if len(parameter.Attacks) != 0 { - finalString += " -" - for _, attack := range parameter.Attacks { - finalString += " " + attack - } - } - - output.EncapsulateCustomGreen(finalString, " in "+elem.URL) - } - } - } - - // If needed print extensions. - if !flags.Plain && len(finalExtensions) != 0 { - for _, elem := range finalExtensions { - output.EncapsulateCustomGreen(elem.Filetype.Extension, elem.URL+" matched!") - } - } - - // If needed print errors. - if !flags.Plain && len(finalErrors) != 0 { - for _, elem := range finalErrors { - output.EncapsulateCustomGreen(elem.Error.ErrorName, elem.Match+" in "+elem.URL) - } - } - - // If needed print infos. - if !flags.Plain && len(finalInfos) != 0 { - for _, elem := range finalInfos { - output.EncapsulateCustomGreen(elem.Info.Name, elem.Match+" in "+elem.URL) - } - } -} diff --git a/go.mod b/go.mod index 16f47c7..54986e8 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/edoardottt/cariddi -go 1.17 +go 1.18 require ( github.com/fatih/color v1.14.1 diff --git a/go.sum b/go.sum index acb71b0..6fb100d 100644 --- a/go.sum +++ b/go.sum @@ -41,46 +41,31 @@ github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0 github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/temoto/robotstxt v1.1.2 h1:W2pOjSJ6SWvldyEuiFXNxz3xZ8aiWX5LbfDiOFd7Fxg= github.com/temoto/robotstxt v1.1.2/go.mod h1:+1AmkuG3IYkh1kv0d2qEB9Le88ehNO0zwOr3ujewlOo= -github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.4.0 h1:Q5QPcMlvfxFTAPV0+07Xz/MpK9NTXu2VDUuy0FeMfaU= golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ= golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.5.0 h1:OLmvp0KP+FVG99Ct/qFiL/Fhk4zp4QQnZ7b2U+5piUM= golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= From f706165955093e5dfc5c784fbcfd9d271b27dc7e Mon Sep 17 00:00:00 2001 From: vrenzolaverace Date: Sat, 11 Feb 2023 16:56:28 +0100 Subject: [PATCH 18/22] Create main.go --- cmd/cariddi/main.go | 216 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 216 insertions(+) create mode 100644 cmd/cariddi/main.go diff --git a/cmd/cariddi/main.go b/cmd/cariddi/main.go new file mode 100644 index 0000000..a43891a --- /dev/null +++ b/cmd/cariddi/main.go @@ -0,0 +1,216 @@ +/* +========== +Cariddi +========== + +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program. If not, see http://www.gnu.org/licenses/. + + @Repository: https://github.com/edoardottt/cariddi + + @Author: edoardottt, https://www.edoardoottavianelli.it + + @License: https://github.com/edoardottt/cariddi/blob/main/LICENSE + +*/ + +package main + +import ( + "os" + + fileUtils "github.com/edoardottt/cariddi/internal/file" + sliceUtils "github.com/edoardottt/cariddi/internal/slice" + "github.com/edoardottt/cariddi/pkg/crawler" + "github.com/edoardottt/cariddi/pkg/input" + "github.com/edoardottt/cariddi/pkg/output" + "github.com/edoardottt/cariddi/pkg/scanner" +) + +// main function. +func main() { + // Scan flags. + flags := input.ScanFlag() + + // Print version and exit. + if flags.Version { + output.Beautify() + os.Exit(0) + } + + // Print help and exit. + if flags.Help { + output.PrintHelp() + os.Exit(0) + } + + // Print examples and exit. + if flags.Examples { + output.PrintExamples() + os.Exit(0) + } + + // If it's possible print the cariddi banner. + if !flags.Plain { + output.Beautify() + } + + // Setup the config according to the flags that were + // passed via the CLI + config := &crawler.Scan{ + Delay: flags.Delay, + Concurrency: flags.Concurrency, + Ignore: flags.Ignore, + IgnoreTxt: flags.IgnoreTXT, + Cache: flags.Cache, + Timeout: flags.Timeout, + Intensive: flags.Intensive, + Rua: flags.Rua, + Proxy: flags.Proxy, + SecretsFlag: flags.Secrets, + Plain: flags.Plain, + EndpointsFlag: flags.Endpoints, + FileType: flags.Extensions, + ErrorsFlag: flags.Errors, + InfoFlag: flags.Info, + Debug: flags.Debug, + UserAgent: flags.UserAgent, + } + + // Read the targets from standard input. + targets := input.ScanTargets() + + // Check if there are errors in the flags definition. + input.CheckFlags(flags) + + // If it is needed, read custom endpoints definition + // from the specified file. + if flags.EndpointsFile != "" { + config.EndpointsSlice = fileUtils.ReadFile(flags.EndpointsFile) + } + + // If it is needed, read custom secrets definition + // from the specified file. + if flags.SecretsFile != "" { + config.SecretsSlice = fileUtils.ReadFile(flags.SecretsFile) + } + + finalResults := []string{} + finalSecret := []scanner.SecretMatched{} + finalEndpoints := []scanner.EndpointMatched{} + finalExtensions := []scanner.FileTypeMatched{} + finalErrors := []scanner.ErrorMatched{} + finalInfos := []scanner.InfoMatched{} + + // Create output files if needed (txt / html). + config.Txt = "" + if flags.TXT != "" { + config.Txt = fileUtils.CreateOutputFile(flags.TXT, "results", "txt") + } + + var ResultHTML = "" + if flags.HTML != "" { + ResultHTML = fileUtils.CreateOutputFile(flags.HTML, "", "html") + output.BannerHTML(ResultHTML) + output.HeaderHTML("Results", ResultHTML) + } + + // Read headers if needed + if flags.HeadersFile != "" || flags.Headers != "" { + var headersInput string + if flags.HeadersFile != "" { + headersInput = string(fileUtils.ReadEntireFile(flags.HeadersFile)) + } else { + headersInput = flags.Headers + } + + config.Headers = input.GetHeaders(headersInput) + } + + // For each target generate a crawler and collect all the results. + for _, target := range targets { + config.Target = target + results := crawler.New(config) + finalResults = append(finalResults, results.URLs...) + finalSecret = append(finalSecret, results.Secrets...) + finalEndpoints = append(finalEndpoints, results.Endpoints...) + finalExtensions = append(finalExtensions, results.Extensions...) + finalErrors = append(finalErrors, results.Errors...) + finalInfos = append(finalInfos, results.Infos...) + } + + // Remove duplicates from all the results. + finalResults = sliceUtils.RemoveDuplicateValues(finalResults) + finalSecret = scanner.RemoveDuplicateSecrets(finalSecret) + finalEndpoints = scanner.RemovDuplicateEndpoints(finalEndpoints) + finalExtensions = scanner.RemoveDuplicateExtensions(finalExtensions) + finalErrors = scanner.RemoveDuplicateErrors(finalErrors) + finalInfos = scanner.RemoveDuplicateInfos(finalInfos) + + // IF TXT OUTPUT > + if flags.TXT != "" { + output.TxtOutput(flags, finalResults, finalSecret, finalEndpoints, + finalExtensions, finalErrors, finalInfos) + } + + // IF HTML OUTPUT > + if flags.HTML != "" { + output.HTMLOutput(flags, ResultHTML, finalResults, finalSecret, + finalEndpoints, finalExtensions, finalErrors, finalInfos) + } + + // If needed print secrets. + if !flags.Plain && len(finalSecret) != 0 { + for _, elem := range finalSecret { + output.EncapsulateCustomGreen(elem.Secret.Name, elem.Match+" in "+elem.URL) + } + } + + // If needed print endpoints. + if !flags.Plain && len(finalEndpoints) != 0 { + for _, elem := range finalEndpoints { + for _, parameter := range elem.Parameters { + finalString := "" + parameter.Parameter + if len(parameter.Attacks) != 0 { + finalString += " -" + for _, attack := range parameter.Attacks { + finalString += " " + attack + } + } + + output.EncapsulateCustomGreen(finalString, " in "+elem.URL) + } + } + } + + // If needed print extensions. + if !flags.Plain && len(finalExtensions) != 0 { + for _, elem := range finalExtensions { + output.EncapsulateCustomGreen(elem.Filetype.Extension, elem.URL+" matched!") + } + } + + // If needed print errors. + if !flags.Plain && len(finalErrors) != 0 { + for _, elem := range finalErrors { + output.EncapsulateCustomGreen(elem.Error.ErrorName, elem.Match+" in "+elem.URL) + } + } + + // If needed print infos. + if !flags.Plain && len(finalInfos) != 0 { + for _, elem := range finalInfos { + output.EncapsulateCustomGreen(elem.Info.Name, elem.Match+" in "+elem.URL) + } + } +} From c8b843f6dcfaad8b1c0c735e87fe96fa12909933 Mon Sep 17 00:00:00 2001 From: edoardottt Date: Sat, 11 Feb 2023 17:08:52 +0100 Subject: [PATCH 19/22] fix s3 rule --- pkg/scanner/secrets.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/scanner/secrets.go b/pkg/scanner/secrets.go index 81b49a9..bad7bf7 100644 --- a/pkg/scanner/secrets.go +++ b/pkg/scanner/secrets.go @@ -333,7 +333,7 @@ func GetSecretRegexes() []Secret { "S3 Bucket", `(?:[a-zA-Z0-9_-]+s3\.amazonaws\.com|[a-zA-Z0-9_.-]+amazonaws\.com|` + `[a-zA-Z0-9-\.\_]+\.s3\.amazonaws\.com|s3\:\/\/[a-zA-Z0-9-\.\_]+|` + - `s3-[a-zA-Z0-9-\.\_\/]+|s3\.amazonaws\.com/[a-zA-Z0-9-\.\_]+)`, + `s3\.amazonaws\.com/[a-zA-Z0-9-\.\_]+)`, []string{}, "?", }, From 339cd7b8960be5766cd9aa1eb107ae9608a2e5fe Mon Sep 17 00:00:00 2001 From: edoardottt Date: Sun, 12 Feb 2023 11:59:36 +0100 Subject: [PATCH 20/22] refactor collector --- pkg/crawler/colly.go | 185 +++++++++++++++++++++---------------------- pkg/crawler/utils.go | 92 ++++++++++----------- 2 files changed, 139 insertions(+), 138 deletions(-) diff --git a/pkg/crawler/colly.go b/pkg/crawler/colly.go index 1153bc9..fba1ce6 100644 --- a/pkg/crawler/colly.go +++ b/pkg/crawler/colly.go @@ -143,96 +143,19 @@ func New(scan *Scan) *Results { c := CreateColly(scan.Delay, scan.Concurrency, scan.Cache, scan.Timeout, scan.Intensive, scan.Rua, scan.Proxy, scan.UserAgent, scan.Target) - // On every request that Colly is making, print the URL it's currently visiting - c.OnRequest(func(e *colly.Request) { - fmt.Println(e.URL.String()) - }) - - // On every a element which has href attribute call callback - c.OnHTML("a[href]", func(e *colly.HTMLElement) { - link := e.Attr("href") - if len(link) != 0 && link[0] != '#' { - visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, - ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) - } - }) - - // On every script element which has src attribute call callback - c.OnHTML("script[src]", func(e *colly.HTMLElement) { - link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, - ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) - }) - - // On every link element which has href attribute call callback - c.OnHTML("link[href]", func(e *colly.HTMLElement) { - link := e.Attr("href") - visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, - ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) - }) - - // On every iframe element which has src attribute call callback - c.OnHTML("iframe[src]", func(e *colly.HTMLElement) { - link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, - ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) - }) - - // On every svg element which has src attribute call callback - c.OnHTML("svg[src]", func(e *colly.HTMLElement) { - link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, - ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) - }) - - // On every img element which has src attribute call callback - c.OnHTML("img[src]", func(e *colly.HTMLElement) { - link := e.Attr("src") - visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, - ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) - }) - - // On every from element which has action attribute call callback - c.OnHTML("form[action]", func(e *colly.HTMLElement) { - link := e.Attr("action") - visitHTMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, - ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) - }) - - // Create a callback on the XPath query searching for the URLs - c.OnXML("//url", func(e *colly.XMLElement) { - link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, - ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) - }) - - // Create a callback on the XPath query searching for the URLs - c.OnXML("//link", func(e *colly.XMLElement) { - link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, - ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) - }) - - // Create a callback on the XPath query searching for the URLs - c.OnXML("//href", func(e *colly.XMLElement) { - link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, - ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) - }) - - // Create a callback on the XPath query searching for the URLs - c.OnXML("//loc", func(e *colly.XMLElement) { - link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, - ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) - }) + event := &Event{ + ProtocolTemp: protocolTemp, + TargetTemp: targetTemp, + Target: scan.Target, + Intensive: scan.Intensive, + Ignore: ignoreBool, + Debug: scan.Debug, + IgnoreSlice: ignoreSlice, + URLs: &results.URLs, + } - // Create a callback on the XPath query searching for the URLs - c.OnXML("//fileurl", func(e *colly.XMLElement) { - link := e.Text - visitXMLLink(link, protocolTemp, targetTemp, scan.Target, scan.Intensive, - ignoreBool, scan.Debug, ignoreSlice, &results.URLs, e, c) - }) + registerHTMLEvents(c, event) + registerXMLEvents(c, event) // Add headers (if needed) on each request if (len(scan.Headers)) > 0 { @@ -253,12 +176,12 @@ func New(scan *Scan) *Results { (1 <= scan.FileType && scan.FileType <= 7) || scan.ErrorsFlag || scan.InfoFlag { // HERE SCAN FOR SECRETS if scan.SecretsFlag && lengthOk { - secretsSlice := huntSecrets(scan.SecretsSlice, r.Request.URL.String(), string(r.Body)) + secretsSlice := huntSecrets(r.Request.URL.String(), string(r.Body), &scan.SecretsSlice) results.Secrets = append(results.Secrets, secretsSlice...) } // HERE SCAN FOR ENDPOINTS if scan.EndpointsFlag { - endpointsSlice := huntEndpoints(scan.EndpointsSlice, r.Request.URL.String()) + endpointsSlice := huntEndpoints(r.Request.URL.String(), &scan.EndpointsSlice) for _, elem := range endpointsSlice { if len(elem.Parameters) != 0 { results.Endpoints = append(results.Endpoints, elem) @@ -300,7 +223,7 @@ func New(scan *Scan) *Results { if path == "" || path == "/" { absoluteURL = protocolTemp + "://" + scan.Target + addPath + "robots.txt" - if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { + if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, &ignoreSlice)) { err = c.Visit(absoluteURL) if err != nil && scan.Debug && !errors.Is(err, colly.ErrAlreadyVisited) { log.Println(err) @@ -308,7 +231,7 @@ func New(scan *Scan) *Results { } absoluteURL = protocolTemp + "://" + scan.Target + addPath + "sitemap.xml" - if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { + if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, &ignoreSlice)) { err = c.Visit(absoluteURL) if err != nil && scan.Debug && !errors.Is(err, colly.ErrAlreadyVisited) { log.Println(err) @@ -413,3 +336,79 @@ func CreateColly(delayTime int, concurrency int, cache bool, timeout int, return c } + +// registerHTMLEvents registers the associated functions for each +// HTML event triggering an action. +func registerHTMLEvents(c *colly.Collector, event *Event) { + // On every request that Colly is making, print the URL it's currently visiting + c.OnRequest(func(e *colly.Request) { + fmt.Println(e.URL.String()) + }) + + // On every a element which has href attribute call callback + c.OnHTML("a[href]", func(e *colly.HTMLElement) { + link := e.Attr("href") + if len(link) != 0 && link[0] != '#' { + visitHTMLLink(link, event, e, c) + } + }) + + // On every script element which has src attribute call callback + c.OnHTML("script[src]", func(e *colly.HTMLElement) { + visitHTMLLink(e.Attr("src"), event, e, c) + }) + + // On every link element which has href attribute call callback + c.OnHTML("link[href]", func(e *colly.HTMLElement) { + visitHTMLLink(e.Attr("href"), event, e, c) + }) + + // On every iframe element which has src attribute call callback + c.OnHTML("iframe[src]", func(e *colly.HTMLElement) { + visitHTMLLink(e.Attr("src"), event, e, c) + }) + + // On every svg element which has src attribute call callback + c.OnHTML("svg[src]", func(e *colly.HTMLElement) { + visitHTMLLink(e.Attr("src"), event, e, c) + }) + + // On every img element which has src attribute call callback + c.OnHTML("img[src]", func(e *colly.HTMLElement) { + visitHTMLLink(e.Attr("src"), event, e, c) + }) + + // On every from element which has action attribute call callback + c.OnHTML("form[action]", func(e *colly.HTMLElement) { + visitHTMLLink(e.Attr("action"), event, e, c) + }) +} + +// registerXMLEvents registers the associated functions for each +// XML event triggering an action. +func registerXMLEvents(c *colly.Collector, event *Event) { + // Create a callback on the XPath query searching for the URLs + c.OnXML("//url", func(e *colly.XMLElement) { + visitXMLLink(e.Text, event, e, c) + }) + + // Create a callback on the XPath query searching for the URLs + c.OnXML("//link", func(e *colly.XMLElement) { + visitXMLLink(e.Text, event, e, c) + }) + + // Create a callback on the XPath query searching for the URLs + c.OnXML("//href", func(e *colly.XMLElement) { + visitXMLLink(e.Text, event, e, c) + }) + + // Create a callback on the XPath query searching for the URLs + c.OnXML("//loc", func(e *colly.XMLElement) { + visitXMLLink(e.Text, event, e, c) + }) + + // Create a callback on the XPath query searching for the URLs + c.OnXML("//fileurl", func(e *colly.XMLElement) { + visitXMLLink(e.Text, event, e, c) + }) +} diff --git a/pkg/crawler/utils.go b/pkg/crawler/utils.go index 988bc6d..ac20afa 100644 --- a/pkg/crawler/utils.go +++ b/pkg/crawler/utils.go @@ -12,46 +12,48 @@ import ( "github.com/gocolly/colly" ) +type Event struct { + ProtocolTemp string + TargetTemp string + Target string + Intensive bool + Ignore bool + Debug bool + IgnoreSlice []string + URLs *[]string +} + // visitHTMLLink checks if the collector should visit a link or not. -func visitHTMLLink(link, protocolTemp, targetTemp, target string, intensive, ignoreBool, debug bool, - ignoreSlice []string, finalResults *[]string, e *colly.HTMLElement, c *colly.Collector) { +func visitHTMLLink(link string, event *Event, e *colly.HTMLElement, c *colly.Collector) { if len(link) != 0 { - absoluteURL := urlUtils.AbsoluteURL(protocolTemp, targetTemp, e.Request.AbsoluteURL(link)) + absoluteURL := urlUtils.AbsoluteURL(event.ProtocolTemp, event.TargetTemp, e.Request.AbsoluteURL(link)) // Visit link found on page // Only those links are visited which are in AllowedDomains - if (!intensive && urlUtils.SameDomain(protocolTemp+"://"+target, absoluteURL)) || - (intensive && intensiveOk(targetTemp, absoluteURL, debug)) { - if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { - err := c.Visit(absoluteURL) - if !errors.Is(err, colly.ErrAlreadyVisited) { - *finalResults = append(*finalResults, absoluteURL) - - if err != nil && debug { - log.Println(err) - } - } - } - } + visitLink(event, c, absoluteURL) } } // visitXMLLink checks if the collector should visit a link or not. -func visitXMLLink(link, protocolTemp, targetTemp, target string, intensive, ignoreBool, debug bool, - ignoreSlice []string, finalResults *[]string, e *colly.XMLElement, c *colly.Collector) { +func visitXMLLink(link string, event *Event, e *colly.XMLElement, c *colly.Collector) { if len(link) != 0 { - absoluteURL := urlUtils.AbsoluteURL(protocolTemp, targetTemp, e.Request.AbsoluteURL(link)) + absoluteURL := urlUtils.AbsoluteURL(event.ProtocolTemp, event.TargetTemp, e.Request.AbsoluteURL(link)) // Visit link found on page // Only those links are visited which are in AllowedDomains - if (!intensive && urlUtils.SameDomain(protocolTemp+"://"+target, absoluteURL)) || - (intensive && intensiveOk(targetTemp, absoluteURL, debug)) { - if !ignoreBool || (ignoreBool && !IgnoreMatch(absoluteURL, ignoreSlice)) { - err := c.Visit(absoluteURL) - if !errors.Is(err, colly.ErrAlreadyVisited) { - *finalResults = append(*finalResults, absoluteURL) - - if err != nil && debug { - log.Println(err) - } + visitLink(event, c, absoluteURL) + } +} + +// visitLink is a protocol agnostic wrapper to visit a link. +func visitLink(event *Event, c *colly.Collector, absoluteURL string) { + if (!event.Intensive && urlUtils.SameDomain(event.ProtocolTemp+"://"+event.Target, absoluteURL)) || + (event.Intensive && intensiveOk(event.TargetTemp, absoluteURL, event.Debug)) { + if !event.Ignore || (event.Ignore && !IgnoreMatch(absoluteURL, &event.IgnoreSlice)) { + err := c.Visit(absoluteURL) + if !errors.Is(err, colly.ErrAlreadyVisited) { + *event.URLs = append(*event.URLs, absoluteURL) + + if err != nil && event.Debug { + log.Println(err) } } } @@ -59,16 +61,16 @@ func visitXMLLink(link, protocolTemp, targetTemp, target string, intensive, igno } // huntSecrets hunts for secrets. -func huntSecrets(secretsFile []string, target string, body string) []scanner.SecretMatched { +func huntSecrets(target, body string, secretsFile *[]string) []scanner.SecretMatched { secrets := SecretsMatch(target, body, secretsFile) return secrets } // SecretsMatch checks if a body matches some secrets. -func SecretsMatch(url string, body string, secretsFile []string) []scanner.SecretMatched { +func SecretsMatch(url, body string, secretsFile *[]string) []scanner.SecretMatched { var secrets []scanner.SecretMatched - if len(secretsFile) == 0 { + if len(*secretsFile) == 0 { for _, secret := range scanner.GetSecretRegexes() { if matched, err := regexp.Match(secret.Regex, []byte(body)); err == nil && matched { re := regexp.MustCompile(secret.Regex) @@ -91,7 +93,7 @@ func SecretsMatch(url string, body string, secretsFile []string) []scanner.Secre } } } else { - for _, secret := range secretsFile { + for _, secret := range *secretsFile { if matched, err := regexp.Match(secret, []byte(body)); err == nil && matched { re := regexp.MustCompile(secret) match := re.FindStringSubmatch(body) @@ -106,18 +108,18 @@ func SecretsMatch(url string, body string, secretsFile []string) []scanner.Secre } // huntEndpoints hunts for juicy endpoints. -func huntEndpoints(endpointsFile []string, target string) []scanner.EndpointMatched { +func huntEndpoints(target string, endpointsFile *[]string) []scanner.EndpointMatched { endpoints := EndpointsMatch(target, endpointsFile) return endpoints } // EndpointsMatch check if an endpoint matches a juicy parameter. -func EndpointsMatch(target string, endpointsFile []string) []scanner.EndpointMatched { +func EndpointsMatch(target string, endpointsFile *[]string) []scanner.EndpointMatched { endpoints := []scanner.EndpointMatched{} matched := []scanner.Parameter{} parameters := urlUtils.RetrieveParameters(target) - if len(endpointsFile) == 0 { + if len(*endpointsFile) == 0 { for _, parameter := range scanner.GetJuicyParameters() { for _, param := range parameters { if strings.ToLower(param) == parameter.Parameter { @@ -127,7 +129,7 @@ func EndpointsMatch(target string, endpointsFile []string) []scanner.EndpointMat } } } else { - for _, parameter := range endpointsFile { + for _, parameter := range *endpointsFile { for _, param := range parameters { if param == parameter { matched = append(matched, scanner.Parameter{Parameter: parameter, Attacks: []string{}}) @@ -162,13 +164,13 @@ func huntExtensions(target string, severity int) scanner.FileTypeMatched { } // huntErrors hunts for errors. -func huntErrors(target string, body string) []scanner.ErrorMatched { +func huntErrors(target, body string) []scanner.ErrorMatched { errorsSlice := ErrorsMatch(target, body) return errorsSlice } // ErrorsMatch checks the patterns for errors. -func ErrorsMatch(url string, body string) []scanner.ErrorMatched { +func ErrorsMatch(url, body string) []scanner.ErrorMatched { errors := []scanner.ErrorMatched{} for _, errorItem := range scanner.GetErrorRegexes() { @@ -186,13 +188,13 @@ func ErrorsMatch(url string, body string) []scanner.ErrorMatched { } // huntInfos hunts for infos. -func huntInfos(target string, body string) []scanner.InfoMatched { +func huntInfos(target, body string) []scanner.InfoMatched { infosSlice := InfoMatch(target, body) return infosSlice } // InfoMatch checks the patterns for infos. -func InfoMatch(url string, body string) []scanner.InfoMatched { +func InfoMatch(url, body string) []scanner.InfoMatched { infos := []scanner.InfoMatched{} for _, infoItem := range scanner.GetInfoRegexes() { @@ -210,8 +212,8 @@ func InfoMatch(url string, body string) []scanner.InfoMatched { } // RetrieveBody retrieves the body (in the response) of a url. -func RetrieveBody(target string) string { - sb, err := GetRequest(target) +func RetrieveBody(target *string) string { + sb, err := GetRequest(*target) if err == nil && sb != "" { return sb } @@ -220,8 +222,8 @@ func RetrieveBody(target string) string { } // IgnoreMatch checks if the URL should be ignored or not. -func IgnoreMatch(url string, ignoreSlice []string) bool { - for _, ignore := range ignoreSlice { +func IgnoreMatch(url string, ignoreSlice *[]string) bool { + for _, ignore := range *ignoreSlice { if strings.Contains(url, ignore) { return true } From 37c71bbfefe38c793ab3878906252acb6a579315 Mon Sep 17 00:00:00 2001 From: edoardottt Date: Sun, 12 Feb 2023 15:56:24 +0100 Subject: [PATCH 21/22] fix false positives --- pkg/scanner/secrets.go | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pkg/scanner/secrets.go b/pkg/scanner/secrets.go index bad7bf7..bc24304 100644 --- a/pkg/scanner/secrets.go +++ b/pkg/scanner/secrets.go @@ -86,14 +86,14 @@ func GetSecretRegexes() []Secret { "Facebook Secret Key", "Facebook Secret Key", `(?i)(facebook|fb)(.{0,20})?(?-i)['\"][0-9a-f]{32}['\"]`, - []string{"facebook.com/", "facebook.svg"}, + []string{"facebook.com", "facebook.svg"}, "?", }, { "Facebook Client ID", "Facebook Client ID", `(?i)(facebook|fb)(.{0,20})?['\"][0-9]{13,17}['\"]`, - []string{"facebook.com/", "facebook.svg"}, + []string{"facebook.com", "facebook.svg"}, "?", }, { @@ -114,14 +114,14 @@ func GetSecretRegexes() []Secret { "Twitter Secret Key", "Twitter Secret Key", `(?i)twitter(.{0,20})?[0-9a-z]{35,44}`, - []string{}, + []string{"twitter.com"}, "?", }, { "Twitter Client ID", "Twitter Client ID", `(?i)twitter(.{0,20})?[0-9a-z]{18,25}`, - []string{}, + []string{"twitter.com"}, "?", }, { @@ -156,14 +156,14 @@ func GetSecretRegexes() []Secret { "LinkedIn Client ID", "LinkedIn Client ID", `(?i)linkedin(.{0,20})?(?-i)[0-9a-z]{12}`, - []string{"linkedin.com/", "linkedin.svg"}, + []string{"linkedin.com", "linkedin.svg"}, "?", }, { "LinkedIn Secret Key", "LinkedIn Secret Key", `(?i)linkedin(.{0,20})?[0-9a-z]{16}`, - []string{"linkedin.com/", "linkedin.svg"}, + []string{"linkedin.com", "linkedin.svg"}, "?", }, { From b965684213a14e61c2e2c6be2adfda31f488efba Mon Sep 17 00:00:00 2001 From: edoardottt Date: Sun, 12 Feb 2023 15:57:29 +0100 Subject: [PATCH 22/22] v1.3.0 --- pkg/output/beautify.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/output/beautify.go b/pkg/output/beautify.go index 9fae40f..157bac1 100644 --- a/pkg/output/beautify.go +++ b/pkg/output/beautify.go @@ -38,7 +38,7 @@ func Beautify() { banner2 := " ___ __ _ _ __(_) __| | __| (_)\n" banner3 := " / __/ _` | '__| |/ _` |/ _` | |\n" banner4 := " | (_| (_| | | | | (_| | (_| | |\n" - banner5 := " \\___\\__,_|_| |_|\\__,_|\\__,_|_| v1.2.1\n" + banner5 := " \\___\\__,_|_| |_|\\__,_|\\__,_|_| v1.3.0\n" banner6 := "" banner7 := " > github.com/edoardottt/cariddi\n" banner8 := " > edoardoottavianelli.it\n"