diff --git a/v2/pkg/passive/sources.go b/v2/pkg/passive/sources.go index 43b048f65..a72122292 100644 --- a/v2/pkg/passive/sources.go +++ b/v2/pkg/passive/sources.go @@ -19,6 +19,7 @@ import ( "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/certspotter" "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/chaos" "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/chinaz" + "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/columbus" "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/commoncrawl" "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/crtsh" "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/digitorus" @@ -62,6 +63,7 @@ var AllSources = [...]subscraping.Source{ &certspotter.Source{}, &chaos.Source{}, &chinaz.Source{}, + &columbus.Source{}, &commoncrawl.Source{}, &crtsh.Source{}, &digitorus.Source{}, diff --git a/v2/pkg/passive/sources_test.go b/v2/pkg/passive/sources_test.go index c99716d58..3866c064e 100644 --- a/v2/pkg/passive/sources_test.go +++ b/v2/pkg/passive/sources_test.go @@ -21,6 +21,7 @@ var ( "certspotter", "chaos", "chinaz", + "columbus", "commoncrawl", "crtsh", "digitorus", @@ -66,6 +67,7 @@ var ( "censys", "chaos", "chinaz", + "columbus", "crtsh", "digitorus", "dnsdumpster", diff --git a/v2/pkg/subscraping/sources/columbus/columbus.go b/v2/pkg/subscraping/sources/columbus/columbus.go new file mode 100644 index 000000000..83432ab92 --- /dev/null +++ b/v2/pkg/subscraping/sources/columbus/columbus.go @@ -0,0 +1,92 @@ +// Package columbus logic +package columbus + +import ( + "context" + "fmt" + "time" + + jsoniter "github.com/json-iterator/go" + + "github.com/projectdiscovery/subfinder/v2/pkg/subscraping" +) + +// Source is the passive scraping agent +type Source struct { + timeTaken time.Duration + errors int + results int +} + +// Run function returns all subdomains found with the service +func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result { + results := make(chan subscraping.Result) + s.errors = 0 + s.results = 0 + + go func() { + defer func(startTime time.Time) { + s.timeTaken = time.Since(startTime) + close(results) + }(time.Now()) + + resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://columbus.elmasy.com/api/lookup/%s", domain)) + if err != nil { + results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err} + s.errors++ + session.DiscardHTTPResponse(resp) + return + } + + var subdomains []string + err = jsoniter.NewDecoder(resp.Body).Decode(&subdomains) + if err != nil { + results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err} + s.errors++ + resp.Body.Close() + return + } + + resp.Body.Close() + + for _, record := range subdomains { + if record == "" { + continue + } + results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: record + "." + domain} + s.results++ + } + + }() + + return results +} + +// Name returns the name of the source +func (s *Source) Name() string { + return "columbus" +} + +func (s *Source) IsDefault() bool { + return true +} + +func (s *Source) HasRecursiveSupport() bool { + return false +} + +func (s *Source) NeedsKey() bool { + return false +} + +func (s *Source) AddApiKeys(_ []string) { + // no key needed +} + +func (s *Source) Statistics() subscraping.Statistics { + return subscraping.Statistics{ + Errors: s.errors, + Results: s.results, + TimeTaken: s.timeTaken, + } +}