这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions v2/pkg/passive/sources.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import (
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/hackertarget"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/hunter"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/intelx"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/leakix"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/passivetotal"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/quake"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/rapiddns"
Expand Down Expand Up @@ -83,6 +84,7 @@ var AllSources = [...]subscraping.Source{
&whoisxmlapi.Source{},
&zoomeye.Source{},
&zoomeyeapi.Source{},
&leakix.Source{},
// &threatminer.Source{}, // failing api
// &reconcloud.Source{}, // failing due to cloudflare bot protection
}
Expand Down
7 changes: 5 additions & 2 deletions v2/pkg/passive/sources_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ var (
"zoomeye",
"zoomeyeapi",
"hunter",
"leakix",
// "threatminer",
// "reconcloud",
}
Expand Down Expand Up @@ -78,6 +79,7 @@ var (
"virustotal",
"whoisxmlapi",
"hunter",
"leakix",
// "threatminer",
// "reconcloud",
}
Expand All @@ -94,6 +96,7 @@ var (
"passivetotal",
"securitytrails",
"virustotal",
"leakix",
// "reconcloud",
}
)
Expand Down Expand Up @@ -140,13 +143,13 @@ func TestSourceFiltering(t *testing.T) {
{someSources, someExclusions, false, false, len(someSources) - len(someExclusions)},
{someSources, someExclusions, false, true, 1},
{someSources, someExclusions, true, false, len(AllSources) - len(someExclusions)},
{someSources, someExclusions, true, true, 9},
{someSources, someExclusions, true, true, 10},

{someSources, []string{}, false, false, len(someSources)},
{someSources, []string{}, true, false, len(AllSources)},

{[]string{}, []string{}, false, false, len(expectedDefaultSources)},
{[]string{}, []string{}, false, true, 10},
{[]string{}, []string{}, false, true, 11},
{[]string{}, []string{}, true, false, len(AllSources)},
{[]string{}, []string{}, true, true, len(expectedDefaultRecursiveSources)},
}
Expand Down
106 changes: 106 additions & 0 deletions v2/pkg/subscraping/sources/leakix/leakix.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
// Package leakix logic
package leakix

import (
"context"
"encoding/json"
"fmt"
"time"

"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
)

// Source is the passive scraping agent
type Source struct {
apiKeys []string
timeTaken time.Duration
errors int
results int
skipped bool
}

// Run function returns all subdomains found with the service
func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
s.errors = 0
s.results = 0

go func() {
defer func(startTime time.Time) {
s.timeTaken = time.Since(startTime)
close(results)
}(time.Now())
// Default headers
headers := map[string]string{
"accept": "application/json",
}
// Pick an API key
randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
if randomApiKey != "" {
headers["api-key"] = randomApiKey
}
// Request
resp, err := session.Get(ctx, "https://leakix.net/api/subdomains/"+domain, "", headers)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
return
} else if resp.StatusCode != 200 {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("request failed with status %d", resp.StatusCode)}
s.errors++
return
}
// Parse and return results
var subdomains []subResponse
decoder := json.NewDecoder(resp.Body)
err = decoder.Decode(&subdomains)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
return
}
for _, result := range subdomains {
results <- subscraping.Result{
Source: s.Name(), Type: subscraping.Subdomain, Value: result.Subdomain,
}
s.results++
}
}()
return results
}

// Name returns the name of the source
func (s *Source) Name() string {
return "leakix"
}

func (s *Source) IsDefault() bool {
return true
}

func (s *Source) HasRecursiveSupport() bool {
return true
}

func (s *Source) NeedsKey() bool {
return true
}

func (s *Source) AddApiKeys(keys []string) {
s.apiKeys = keys
}

func (s *Source) Statistics() subscraping.Statistics {
return subscraping.Statistics{
Errors: s.errors,
Results: s.results,
TimeTaken: s.timeTaken,
Skipped: s.skipped,
}
}

type subResponse struct {
Subdomain string `json:"subdomain"`
DistinctIps int `json:"distinct_ips"`
LastSeen time.Time `json:"last_seen"`
}