这是indexloc提供的服务,不要输入任何密码
Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
- Changed
- Fix a bug in autocalibration strategy merging, when two files have the same strategy key
- Fix panic when setting rate to 0 in the interactive console
- Fix greedy recursion strategy,do not fuzz recursively on filename matches such as /index.php

- v2.1.0
- New
Expand Down
1 change: 1 addition & 0 deletions CONTRIBUTORS.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
* [bsysop](https://twitter.com/bsysop)
* [ccsplit](https://github.com/ccsplit)
* [choket](https://github.com/choket)
* [chouettevan](https://github.com/chouettevan)
* [codingo](https://github.com/codingo)
* [c_sto](https://github.com/c-sto)
* [Damian89](https://github.com/Damian89)
Expand Down
15 changes: 5 additions & 10 deletions pkg/ffuf/job.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"sync"
"syscall"
"time"
"regexp"
)

// Job ties together Config, Runner, Input and Output
Expand Down Expand Up @@ -47,6 +48,8 @@ type QueueJob struct {
depth int
req Request
}
// This is the regexp used to match endpoins with file extensions,such as /robots.txt
var fileExtensions = regexp.MustCompile(`\.[a-zA-Z0-9]+$`)

func NewJob(conf *Config) *Job {
var j Job
Expand Down Expand Up @@ -507,7 +510,7 @@ func (j *Job) handleScraperResult(resp *Response, sres ScraperResult) {
// handleGreedyRecursionJob adds a recursion job to the queue if the maximum depth has not been reached
func (j *Job) handleGreedyRecursionJob(resp Response) {
// Handle greedy recursion strategy. Match has been determined before calling handleRecursionJob
if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth {
if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth && !fileExtensions.MatchString(resp.Request.Url) {
recUrl := resp.Request.Url + "/" + "FUZZ"
newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1, req: RecursionRequest(j.Config, recUrl)}
j.queuejobs = append(j.queuejobs, newJob)
Expand All @@ -520,19 +523,11 @@ func (j *Job) handleGreedyRecursionJob(resp Response) {
// handleDefaultRecursionJob adds a new recursion job to the job queue if a new directory is found and maximum depth has
// not been reached
func (j *Job) handleDefaultRecursionJob(resp Response) {
recUrl := resp.Request.Url + "/" + "FUZZ"
if (resp.Request.Url + "/") != resp.GetRedirectLocation(true) {
// Not a directory, return early
return
}
if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth {
// We have yet to reach the maximum recursion depth
newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1, req: RecursionRequest(j.Config, recUrl)}
j.queuejobs = append(j.queuejobs, newJob)
j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl))
} else {
j.Output.Warning(fmt.Sprintf("Directory found, but recursion depth exceeded. Ignoring: %s", resp.GetRedirectLocation(true)))
}
j.handleGreedyRecursionJob(resp)
}

// CheckStop stops the job if stopping conditions are met
Expand Down