diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index e6fabe953..d567636b7 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -58,7 +58,6 @@ jobs: FOFA_API_KEY: ${{secrets.FOFA_API_KEY}} FULLHUNT_API_KEY: ${{secrets.FULLHUNT_API_KEY}} GITHUB_API_KEY: ${{secrets.GITHUB_API_KEY}} - HUNTER_API_KEY: ${{secrets.HUNTER_API_KEY}} INTELX_API_KEY: ${{secrets.INTELX_API_KEY}} LEAKIX_API_KEY: ${{secrets.LEAKIX_API_KEY}} QUAKE_API_KEY: ${{secrets.QUAKE_API_KEY}} diff --git a/pkg/passive/sources.go b/pkg/passive/sources.go index 2f1a52217..af7d5e6e8 100644 --- a/pkg/passive/sources.go +++ b/pkg/passive/sources.go @@ -33,7 +33,6 @@ import ( "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/github" "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/hackertarget" "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/hudsonrock" - "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/hunter" "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/intelx" "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/leakix" "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/netlas" @@ -78,7 +77,6 @@ var AllSources = [...]subscraping.Source{ &fullhunt.Source{}, &github.Source{}, &hackertarget.Source{}, - &hunter.Source{}, &intelx.Source{}, &netlas.Source{}, &onyphe.Source{}, diff --git a/pkg/passive/sources_test.go b/pkg/passive/sources_test.go index e0a2e9ac2..abcad1471 100644 --- a/pkg/passive/sources_test.go +++ b/pkg/passive/sources_test.go @@ -51,7 +51,6 @@ var ( "whoisxmlapi", "windvane", "zoomeyeapi", - "hunter", "leakix", "facebook", // "threatminer", @@ -91,7 +90,6 @@ var ( "windvane", "virustotal", "whoisxmlapi", - "hunter", "leakix", "facebook", // "threatminer", diff --git a/pkg/subscraping/sources/hunter/hunter.go b/pkg/subscraping/sources/hunter/hunter.go deleted file mode 100644 index 960563279..000000000 --- a/pkg/subscraping/sources/hunter/hunter.go +++ /dev/null @@ -1,131 +0,0 @@ -package hunter - -import ( - "context" - "encoding/base64" - "fmt" - "time" - - jsoniter "github.com/json-iterator/go" - "github.com/projectdiscovery/subfinder/v2/pkg/subscraping" -) - -type hunterResp struct { - Code int `json:"code"` - Data hunterData `json:"data"` - Message string `json:"message"` -} - -type infoArr struct { - URL string `json:"url"` - IP string `json:"ip"` - Port int `json:"port"` - Domain string `json:"domain"` - Protocol string `json:"protocol"` -} - -type hunterData struct { - InfoArr []infoArr `json:"arr"` - Total int `json:"total"` -} - -// Source is the passive scraping agent -type Source struct { - apiKeys []string - timeTaken time.Duration - errors int - results int - skipped bool -} - -// Run function returns all subdomains found with the service -func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result { - results := make(chan subscraping.Result) - s.errors = 0 - s.results = 0 - - go func() { - defer func(startTime time.Time) { - s.timeTaken = time.Since(startTime) - close(results) - }(time.Now()) - - randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name()) - if randomApiKey == "" { - s.skipped = true - return - } - - var pages = 1 - for currentPage := 1; currentPage <= pages; currentPage++ { - // hunter api doc https://hunter.qianxin.com/home/helpCenter?r=5-1-2 - qbase64 := base64.URLEncoding.EncodeToString(fmt.Appendf(nil, "domain=\"%s\"", domain)) - resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://hunter.qianxin.com/openApi/search?api-key=%s&search=%s&page=%d&page_size=100&is_web=3", randomApiKey, qbase64, currentPage)) - if err != nil && resp == nil { - results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err} - s.errors++ - session.DiscardHTTPResponse(resp) - return - } - - var response hunterResp - err = jsoniter.NewDecoder(resp.Body).Decode(&response) - if err != nil { - results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err} - s.errors++ - session.DiscardHTTPResponse(resp) - return - } - session.DiscardHTTPResponse(resp) - - if response.Code == 401 || response.Code == 400 { - results <- subscraping.Result{ - Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%s", response.Message), - } - s.errors++ - return - } - - if response.Data.Total > 0 { - for _, hunterInfo := range response.Data.InfoArr { - subdomain := hunterInfo.Domain - results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain} - s.results++ - } - } - pages = int(response.Data.Total/1000) + 1 - } - }() - - return results -} - -// Name returns the name of the source -func (s *Source) Name() string { - return "hunter" -} - -func (s *Source) IsDefault() bool { - return true -} - -func (s *Source) HasRecursiveSupport() bool { - return false -} - -func (s *Source) NeedsKey() bool { - return true -} - -func (s *Source) AddApiKeys(keys []string) { - s.apiKeys = keys -} - -func (s *Source) Statistics() subscraping.Statistics { - return subscraping.Statistics{ - Errors: s.errors, - Results: s.results, - TimeTaken: s.timeTaken, - Skipped: s.skipped, - } -}