New Upstream Release - ffuf

Ready changes

Summary

Merged new upstream version: 2.0.0 (was: 1.5.0).

Diff

diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml
index ab35228..dbf5ffe 100644
--- a/.github/workflows/golangci-lint.yml
+++ b/.github/workflows/golangci-lint.yml
@@ -11,9 +11,12 @@ jobs:
     name: lint
     runs-on: ubuntu-latest
     steps:
-      - uses: actions/checkout@v2
+      - uses: actions/setup-go@v3
+        with:
+          go-version: 1.17
+      - uses: actions/checkout@v3
       - name: golangci-lint
-        uses: golangci/golangci-lint-action@v2
+        uses: golangci/golangci-lint-action@v3
         with:
           # Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version.
           version: v1.29
diff --git a/.goreleaser.yml b/.goreleaser.yml
index 4e8d074..fd5eeae 100644
--- a/.goreleaser.yml
+++ b/.goreleaser.yml
@@ -10,7 +10,7 @@ builds:
     gcflags:
       - all=-trimpath={{.Env.GOPATH}}
     ldflags: |
-      -s -w -X github.com/ffuf/ffuf/pkg/ffuf.VERSION_APPENDIX= -extldflags '-static'
+      -s -w -X github.com/ffuf/ffuf/v2/pkg/ffuf.VERSION_APPENDIX= -extldflags '-static'
     goos:
       - linux
       - windows
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 283c12d..e1274bc 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,29 @@
   - New
   - Changed
   
+- v2.0.0
+  - New
+    - Added a new, dynamic keyword `FFUFHASH` that generates hash from job configuration and wordlist position to map blind payloads back to the initial request.
+    - New command line parameter for searching a hash: `-search FFUFHASH`
+    - Data scraper functionality
+    - Requests per second rate can be configured in the interactive mode
+  - Changed
+    - Multiline output prints out alphabetically sorted by keyword
+    - Default configuration directories now follow `XDG_CONFIG_HOME` variable (less spam in your home directory)
+    - Fixed issue with autocalibration of line & words filter
+    - Rate doesn't have initial burst anymore and is more robust in general
+    - Sniper mode template parsing fixes
+    - Time-based matcher now works properly
+    - Proxy URLs are verified to avoid hard to debug issues
+    - Made JSON (`-json`) output format take precedence over quiet output mode, to allow JSON output without the banner etc
+
+  
+- v1.5.0
+  - New
+    - New autocalibration options: `-ach`, `-ack` and `-acs`. Revamped the whole autocalibration process
+    - Configurable modes for matchers and filters (CLI flags: `fmode` and `mmode`): "and" and "or"
+  - Changed
+  
 - v1.4.1
   - New
   - Changed
diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md
index 6a15f06..28c7323 100644
--- a/CONTRIBUTORS.md
+++ b/CONTRIBUTORS.md
@@ -25,10 +25,12 @@
 * [JamTookTheBait](https://github.com/JamTookTheBait)
 * [jimen0](https://github.com/jimen0)
 * [joohoi](https://github.com/joohoi)
+* [JoshuaMulliken](https://github.com/JoshuaMulliken)
 * [jsgv](https://github.com/jsgv)
 * [justinsteven](https://github.com/justinsteven)
 * [jvesiluoma](https://github.com/jvesiluoma)
 * [Kiblyn11](https://github.com/Kiblyn11)
+* [l4yton](https://github.com/l4yton)
 * [lc](https://github.com/lc)
 * [mprencipe](https://github.com/mprencipe)
 * [nnwakelam](https://twitter.com/nnwakelam)
@@ -41,5 +43,6 @@
 * [Shaked](https://github.com/Shaked)
 * [Skyehopper](https://github.com/Skyehopper)
 * [SolomonSklash](https://github.com/SolomonSklash)
-* [l4yton](https://github.com/l4yton)
+* [TomNomNom](https://github.com/tomnomnom)
 * [xfgusta](https://github.com/xfgusta)
+
diff --git a/README.md b/README.md
index 262ce0d..9c6c665 100644
--- a/README.md
+++ b/README.md
@@ -13,25 +13,17 @@ A fast web fuzzer written in Go.
     - [Configuration files](https://github.com/ffuf/ffuf#configuration-files)
 - [Help](https://github.com/ffuf/ffuf#usage)
     - [Interactive mode](https://github.com/ffuf/ffuf#interactive-mode)
-- [Sponsorware?](https://github.com/ffuf/ffuf#sponsorware)
 
-## Sponsors
-[![Offensive Security](_img/offsec-logo.png)](https://www.offensive-security.com/)
-
-## Official Discord Channel
-
-ffuf has a channel at Porchetta Industries Discord server alongside of channels for many other tools.
-
-Come to hang out & to discuss about ffuf, it's usage and development!
-
-[![Porchetta Industries](https://discordapp.com/api/guilds/736724457258745996/widget.png?style=banner2)](https://discord.gg/VWcdZCUsQP)
 
 ## Installation
 
 - [Download](https://github.com/ffuf/ffuf/releases/latest) a prebuilt binary from [releases page](https://github.com/ffuf/ffuf/releases/latest), unpack and run!
+
+  _or_
+- If you are on macOS with [homebrew](https://brew.sh), ffuf can be installed with: `brew install ffuf`
   
   _or_
-- If you have recent go compiler installed: `go install github.com/ffuf/ffuf@latest` (the same command works for updating)
+- If you have recent go compiler installed: `go install github.com/ffuf/ffuf/v2@latest` (the same command works for updating)
   
   _or_
 - `git clone https://github.com/ffuf/ffuf ; cd ffuf ; go get ; go build`
@@ -42,6 +34,9 @@ Ffuf depends on Go 1.16 or greater.
 
 The usage examples below show just the simplest tasks you can accomplish using `ffuf`. 
 
+More elaborate documentation that goes through many features with a lot of examples is
+available in the ffuf wiki at [https://github.com/ffuf/ffuf/wiki](https://github.com/ffuf/ffuf/wiki)
+
 For more extensive documentation, with real life usage examples and tips, be sure to check out the awesome guide:
 "[Everything you need to know about FFUF](https://codingo.io/tools/ffuf/bounty/2020/09/17/everything-you-need-to-know-about-ffuf.html)" by 
 Michael Skelton ([@codingo](https://github.com/codingo)).
@@ -70,7 +65,7 @@ ffuf -w /path/to/vhost/wordlist -u https://target -H "Host: FUZZ" -fs 4242
 
 ### GET parameter fuzzing
 
-GET parameter name fuzzing is very similar to directory discovery, and works by defining the `FUZZ` keyword as a part of the URL. This also assumes an response size of 4242 bytes for invalid GET parameter name.
+GET parameter name fuzzing is very similar to directory discovery, and works by defining the `FUZZ` keyword as a part of the URL. This also assumes a response size of 4242 bytes for invalid GET parameter name.
 
 ```
 ffuf -w /path/to/paramnames.txt -u https://target/script.php?FUZZ=test_value -fs 4242
@@ -130,12 +125,15 @@ ffuf --input-cmd 'cat $FFUF_NUM.txt' -H "Content-Type: application/json" -X POST
 
 ### Configuration files
 
-When running ffuf, it first checks if a default configuration file exists. The file path for it is `~/.ffufrc` / `$HOME/.ffufrc`
-for most *nixes (for example `/home/joohoi/.ffufrc`) and `%USERPROFILE%\.ffufrc` for Windows. You can configure one or 
-multiple options in this file, and they will be applied on every subsequent ffuf job. An example of .ffufrc file can be
-found [here](https://github.com/ffuf/ffuf/blob/master/ffufrc.example). 
+When running ffuf, it first checks if a default configuration file exists. Default path for a `ffufrc` file is
+`$XDG_CONFIG_HOME/ffuf/ffufrc`.  You can configure one or multiple options in this file, and they will be applied on 
+every subsequent ffuf job. An example of ffufrc file can be found 
+[here](https://github.com/ffuf/ffuf/blob/master/ffufrc.example). 
 
-The configuration options provided on the command line override the ones loaded from `~/.ffufrc`.
+A more detailed description about configuration file locations can be found in the wiki: 
+[https://github.com/ffuf/ffuf/wiki/Configuration](https://github.com/ffuf/ffuf/wiki/Configuration)
+
+The configuration options provided on the command line override the ones loaded from the default `ffufrc` file.
 Note: this does not apply for CLI flags that can be provided more than once. One of such examples is `-H` (header) flag.
 In this case, the `-H` values provided on the command line will be _appended_ to the ones from the config file instead.
 
@@ -152,13 +150,14 @@ parameter.
 To define the test case for ffuf, use the keyword `FUZZ` anywhere in the URL (`-u`), headers (`-H`), or POST data (`-d`).
 
 ```
-Fuzz Faster U Fool - v1.3.0-dev
+Fuzz Faster U Fool - v2.0.0
 
 HTTP OPTIONS:
   -H                  Header `"Name: Value"`, separated by colon. Multiple -H flags are accepted.
   -X                  HTTP method to use
   -b                  Cookie data `"NAME1=VALUE1; NAME2=VALUE2"` for copy as curl functionality.
   -d                  POST data
+  -http2              Use HTTP2 protocol (default: false)
   -ignore-body        Do not fetch the response content. (default: false)
   -r                  Follow redirects (default: false)
   -recursion          Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it. (default: false)
@@ -174,8 +173,12 @@ GENERAL OPTIONS:
   -V                  Show version information. (default: false)
   -ac                 Automatically calibrate filtering options (default: false)
   -acc                Custom auto-calibration string. Can be used multiple times. Implies -ac
+  -ach                Per host autocalibration (default: false)
+  -ack                Autocalibration keyword (default: FUZZ)
+  -acs                Autocalibration strategy: "basic" or "advanced" (default: basic)
   -c                  Colorize output. (default: false)
   -config             Load configuration from a file
+  -json               JSON output, printing newline-delimited JSON records (default: false)
   -maxtime            Maximum running time in seconds for entire process. (default: 0)
   -maxtime-job        Maximum running time in seconds per job. (default: 0)
   -noninteractive     Disable the interactive console functionality (default: false)
@@ -183,7 +186,10 @@ GENERAL OPTIONS:
   -rate               Rate of requests per second (default: 0)
   -s                  Do not print additional information (silent mode) (default: false)
   -sa                 Stop on all error cases. Implies -sf and -se. (default: false)
+  -scraperfile        Custom scraper file path
+  -scrapers           Active scraper groups (default: all)
   -se                 Stop on spurious errors (default: false)
+  -search             Search for a FFUFHASH payload from ffuf history
   -sf                 Stop when > 95% of responses return 403 Forbidden (default: false)
   -t                  Number of concurrent threads. (default: 40)
   -v                  Verbose output, printing full URL and redirect location (if any) with the results. (default: false)
@@ -191,6 +197,7 @@ GENERAL OPTIONS:
 MATCHER OPTIONS:
   -mc                 Match HTTP status codes, or "all" for everything. (default: 200,204,301,302,307,401,403,405,500)
   -ml                 Match amount of lines in response
+  -mmode              Matcher set operator. Either of: and, or (default: or)
   -mr                 Match regexp
   -ms                 Match HTTP response size
   -mt                 Match how many milliseconds to the first response byte, either greater or less than. EG: >100 or <100
@@ -199,6 +206,7 @@ MATCHER OPTIONS:
 FILTER OPTIONS:
   -fc                 Filter HTTP status codes from response. Comma separated list of codes and ranges
   -fl                 Filter by amount of lines in response. Comma separated list of line counts and ranges
+  -fmode              Filter set operator. Either of: and, or (default: or)
   -fr                 Filter regexp
   -fs                 Filter HTTP response size. Comma separated list of sizes and ranges
   -ft                 Filter by number of milliseconds to the first response byte, either greater or less than. EG: >100 or <100
@@ -239,7 +247,6 @@ EXAMPLE USAGE:
     ffuf -w params.txt:PARAM -w values.txt:VAL -u https://example.org/?PARAM=VAL -mr "VAL" -c
 
   More information and examples: https://github.com/ffuf/ffuf
-
 ```
 
 ### Interactive mode
@@ -251,18 +258,25 @@ type "help" for a list of commands, or ENTER to resume.
 > help
 
 available commands:
- fc [value]             - (re)configure status code filter 
- fl [value]             - (re)configure line count filter 
- fw [value]             - (re)configure word count filter 
- fs [value]             - (re)configure size filter 
- queueshow              - show recursive job queue
- queuedel [number]      - delete a recursion job in the queue
- queueskip              - advance to the next queued recursion job
- restart                - restart and resume the current ffuf job
- resume                 - resume current ffuf job (or: ENTER) 
- show                   - show results for the current job
- savejson [filename]    - save current matches to a file
- help                   - you are looking at it
+ afc  [value]             - append to status code filter 
+ fc   [value]             - (re)configure status code filter 
+ afl  [value]             - append to line count filter 
+ fl   [value]             - (re)configure line count filter 
+ afw  [value]             - append to word count filter 
+ fw   [value]             - (re)configure word count filter 
+ afs  [value]             - append to size filter 
+ fs   [value]             - (re)configure size filter 
+ aft  [value]             - append to time filter 
+ ft   [value]             - (re)configure time filter 
+ rate [value]             - adjust rate of requests per second (active: 0)
+ queueshow                - show job queue
+ queuedel [number]        - delete a job in the queue
+ queueskip                - advance to the next queued job
+ restart                  - restart and resume the current ffuf job
+ resume                   - resume current ffuf job (or: ENTER) 
+ show                     - show results for the current job
+ savejson [filename]      - save current matches to a file
+ help                     - you are looking at it
 > 
 ```
 
@@ -282,30 +296,6 @@ job from the beginning.
   <img width="250" src="_img/ffuf_waving_250.png">
 </p>
 
-
-## Sponsorware
-
-`ffuf` employs a sponsorware model. This means that all new features developed by its author are initially exclusively 
-available for their sponsors. 30 days after the exclusive release, all the new features will be released at the freely
-available open source repository at https://github.com/ffuf/ffuf . 
-
-This model enables me to provide concrete benefits for the generous individuals and companies that enable me to work on 
-`ffuf`. The different sponsorship tiers can be seen [here](https://github.com/sponsors/joohoi).
-
-All the community contributions are and will be available directly in the freely available open source repository. The
-exclusive version benefits only include new features created by [@joohoi](https://github.com/joohoi)
-
-### Access the sponsorware through code contributions
-
-People that create significant contributions to the `ffuf` project itself should and will have access to the sponsorware
-as well. If you are planning to create such a contribution, please contact [@joohoi](https://github.com/joohoi)
-first to ensure that there aren't other people working on the same feature.
-
-## Helper scripts and advanced payloads
-
-See [ffuf-scripts](https://github.com/ffuf/ffuf-scripts) repository for helper scripts and payload generators
-for different workflows and usage scenarios.
-
 ## License
 
 ffuf is released under MIT license. See [LICENSE](https://github.com/ffuf/ffuf/blob/master/LICENSE).
diff --git a/debian/changelog b/debian/changelog
index 55a1767..cf4e747 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,9 +1,11 @@
-ffuf (1.4.1-2) UNRELEASED; urgency=medium
+ffuf (2.0.0-1) UNRELEASED; urgency=medium
 
   * Trim trailing whitespace.
   * Fix day-of-week for changelog entry 1.4.1-1.
+  * New upstream release.
+  * New upstream release.
 
- -- Debian Janitor <janitor@jelmer.uk>  Sat, 09 Apr 2022 13:08:32 -0000
+ -- Debian Janitor <janitor@jelmer.uk>  Sat, 03 Jun 2023 06:49:51 -0000
 
 ffuf (1.4.1-1) unstable; urgency=medium
 
diff --git a/debian/patches/10-fix-spelling.patch b/debian/patches/10-fix-spelling.patch
index 0b265a8..a374b8b 100644
--- a/debian/patches/10-fix-spelling.patch
+++ b/debian/patches/10-fix-spelling.patch
@@ -1,10 +1,10 @@
 # Author: Pedro Loami Barbosa dos Santos <pedro@loami.eng.br>
 # Date: May 11 2020
 # Description: Fix spelling on /pkg/ffuf/multierror.go
-Index: ffuf-1.0.2/pkg/ffuf/multierror.go
+Index: ffuf.git/pkg/ffuf/multierror.go
 ===================================================================
---- ffuf-1.0.2.orig/pkg/ffuf/multierror.go
-+++ ffuf-1.0.2/pkg/ffuf/multierror.go
+--- ffuf.git.orig/pkg/ffuf/multierror.go
++++ ffuf.git/pkg/ffuf/multierror.go
 @@ -20,7 +20,7 @@ func (m *Multierror) Add(err error) {
  func (m *Multierror) ErrorOrNil() error {
  	var errString string
diff --git a/ffufrc.example b/ffufrc.example
index 059a8b8..a3ce1ce 100644
--- a/ffufrc.example
+++ b/ffufrc.example
@@ -27,6 +27,9 @@
         "randomtest",
         "admin"
     ]
+    autocalibration_strategy = "basic"
+    autocalibration_keyword = "FUZZ"
+    autocalibration_perhost = false
     colors = false
     delay = ""
     maxtime = 0
@@ -34,6 +37,7 @@
     noninteractive = false
     quiet = false
     rate = 0
+    scrapers = "all"
     stopon403 = false
     stoponall = false
     stoponerrors = false
@@ -57,7 +61,6 @@
         "/path/to/hostlist:HOST"
     ]
 
-
 [output]
     debuglog = "debug.log"
     outputdirectory = "/tmp/rawoutputdir"
@@ -66,6 +69,7 @@
     outputcreateemptyfile = false
 
 [filter]
+    mode = "or"
     lines = ""
     regexp = ""
     size = ""
@@ -74,6 +78,7 @@
     words = ""
 
 [matcher]
+    mode = "or"
     lines = ""
     regexp = ""
     size = ""
diff --git a/go.mod b/go.mod
index e33c141..5071691 100644
--- a/go.mod
+++ b/go.mod
@@ -1,5 +1,16 @@
-module github.com/ffuf/ffuf
+module github.com/ffuf/ffuf/v2
 
-go 1.13
+go 1.17
 
-require github.com/pelletier/go-toml v1.8.1
+require (
+	github.com/PuerkitoBio/goquery v1.8.0
+	github.com/adrg/xdg v0.4.0
+	github.com/pelletier/go-toml v1.9.5
+)
+
+require (
+	github.com/andybalholm/cascadia v1.3.1 // indirect
+	github.com/davecgh/go-spew v1.1.1 // indirect
+	golang.org/x/net v0.5.0 // indirect
+	golang.org/x/sys v0.4.0 // indirect
+)
diff --git a/go.sum b/go.sum
index 6537354..3304bd8 100644
--- a/go.sum
+++ b/go.sum
@@ -1,3 +1,52 @@
+github.com/PuerkitoBio/goquery v1.8.0 h1:PJTF7AmFCFKk1N6V6jmKfrNH9tV5pNE6lZMkG0gta/U=
+github.com/PuerkitoBio/goquery v1.8.0/go.mod h1:ypIiRMtY7COPGk+I/YbZLbxsxn9g5ejnI2HSMtkjZvI=
+github.com/adrg/xdg v0.4.0 h1:RzRqFcjH4nE5C6oTAxhBtoE2IRyjBSa62SCbyPidvls=
+github.com/adrg/xdg v0.4.0/go.mod h1:N6ag73EX4wyxeaoeHctc1mas01KZgsj5tYiAIwqJE/E=
+github.com/andybalholm/cascadia v1.3.1 h1:nhxRkql1kdYCc8Snf7D5/D3spOX+dBgjA6u8x004T2c=
+github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/pelletier/go-toml v1.8.1 h1:1Nf83orprkJyknT6h7zbuEGUEjcyVlCxSUGTENmNCRM=
-github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc=
+github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
+github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw=
+golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18=
+golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/help.go b/help.go
index 15af7d5..0f5fa43 100644
--- a/help.go
+++ b/help.go
@@ -5,7 +5,7 @@ import (
 	"fmt"
 	"os"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 type UsageSection struct {
@@ -16,7 +16,7 @@ type UsageSection struct {
 	ExpectedFlags []string
 }
 
-//PrintSection prints out the section name, description and each of the flags
+// PrintSection prints out the section name, description and each of the flags
 func (u *UsageSection) PrintSection(max_length int, extended bool) {
 	// Do not print if extended usage not requested and section marked as hidden
 	if !extended && u.Hidden {
@@ -35,7 +35,7 @@ type UsageFlag struct {
 	Default     string
 }
 
-//PrintFlag prints out the flag name, usage string and default value
+// PrintFlag prints out the flag name, usage string and default value
 func (f *UsageFlag) PrintFlag(max_length int) {
 	// Create format string, used for padding
 	format := fmt.Sprintf("  -%%-%ds %%s", max_length)
@@ -61,7 +61,7 @@ func Usage() {
 		Description:   "",
 		Flags:         make([]UsageFlag, 0),
 		Hidden:        false,
-		ExpectedFlags: []string{"ac", "acc", "c", "config", "json", "maxtime", "maxtime-job", "noninteractive", "p", "rate", "s", "sa", "se", "sf", "t", "v", "V"},
+		ExpectedFlags: []string{"ac", "acc", "ack", "ach", "acs", "c", "config", "json", "maxtime", "maxtime-job", "noninteractive", "p", "rate", "scraperfile", "scrapers", "search", "s", "sa", "se", "sf", "t", "v", "V"},
 	}
 	u_compat := UsageSection{
 		Name:          "COMPATIBILITY OPTIONS",
@@ -75,14 +75,14 @@ func Usage() {
 		Description:   "Matchers for the response filtering.",
 		Flags:         make([]UsageFlag, 0),
 		Hidden:        false,
-		ExpectedFlags: []string{"mc", "ml", "mr", "ms", "mt", "mw"},
+		ExpectedFlags: []string{"mmode", "mc", "ml", "mr", "ms", "mt", "mw"},
 	}
 	u_filter := UsageSection{
 		Name:          "FILTER OPTIONS",
 		Description:   "Filters for the response filtering.",
 		Flags:         make([]UsageFlag, 0),
 		Hidden:        false,
-		ExpectedFlags: []string{"fc", "fl", "fr", "fs", "ft", "fw"},
+		ExpectedFlags: []string{"fmode", "fc", "fl", "fr", "fs", "ft", "fw"},
 	}
 	u_input := UsageSection{
 		Name:          "INPUT OPTIONS",
@@ -105,7 +105,7 @@ func Usage() {
 	flag.VisitAll(func(f *flag.Flag) {
 		found := false
 		for i, section := range sections {
-			if strInSlice(f.Name, section.ExpectedFlags) {
+			if ffuf.StrInSlice(f.Name, section.ExpectedFlags) {
 				sections[i].Flags = append(sections[i].Flags, UsageFlag{
 					Name:        f.Name,
 					Description: f.Usage,
@@ -149,12 +149,3 @@ func Usage() {
 
 	fmt.Printf("  More information and examples: https://github.com/ffuf/ffuf\n\n")
 }
-
-func strInSlice(val string, slice []string) bool {
-	for _, v := range slice {
-		if v == val {
-			return true
-		}
-	}
-	return false
-}
diff --git a/main.go b/main.go
index 48abe64..dcbdc05 100644
--- a/main.go
+++ b/main.go
@@ -4,17 +4,19 @@ import (
 	"context"
 	"flag"
 	"fmt"
-	"io/ioutil"
+	"io"
 	"log"
 	"os"
 	"strings"
+	"time"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
-	"github.com/ffuf/ffuf/pkg/filter"
-	"github.com/ffuf/ffuf/pkg/input"
-	"github.com/ffuf/ffuf/pkg/interactive"
-	"github.com/ffuf/ffuf/pkg/output"
-	"github.com/ffuf/ffuf/pkg/runner"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/filter"
+	"github.com/ffuf/ffuf/v2/pkg/input"
+	"github.com/ffuf/ffuf/v2/pkg/interactive"
+	"github.com/ffuf/ffuf/v2/pkg/output"
+	"github.com/ffuf/ffuf/v2/pkg/runner"
+	"github.com/ffuf/ffuf/v2/pkg/scraper"
 )
 
 type multiStringFlag []string
@@ -45,7 +47,7 @@ func (m *wordlistFlag) Set(value string) error {
 	return nil
 }
 
-//ParseFlags parses the command line flags and (re)populates the ConfigOptions struct
+// ParseFlags parses the command line flags and (re)populates the ConfigOptions struct
 func ParseFlags(opts *ffuf.ConfigOptions) *ffuf.ConfigOptions {
 	var ignored bool
 	var cookies, autocalibrationstrings, headers, inputcommands multiStringFlag
@@ -62,6 +64,7 @@ func ParseFlags(opts *ffuf.ConfigOptions) *ffuf.ConfigOptions {
 	flag.BoolVar(&ignored, "k", false, "Dummy flag for backwards compatibility")
 	flag.BoolVar(&opts.Output.OutputSkipEmptyFile, "or", opts.Output.OutputSkipEmptyFile, "Don't create the output file if we don't have results")
 	flag.BoolVar(&opts.General.AutoCalibration, "ac", opts.General.AutoCalibration, "Automatically calibrate filtering options")
+	flag.BoolVar(&opts.General.AutoCalibrationPerHost, "ach", opts.General.AutoCalibration, "Per host autocalibration")
 	flag.BoolVar(&opts.General.Colors, "c", opts.General.Colors, "Colorize output.")
 	flag.BoolVar(&opts.General.Json, "json", opts.General.Json, "JSON output, printing newline-delimited JSON records")
 	flag.BoolVar(&opts.General.Noninteractive, "noninteractive", opts.General.Noninteractive, "Disable the interactive console functionality")
@@ -84,7 +87,12 @@ func ParseFlags(opts *ffuf.ConfigOptions) *ffuf.ConfigOptions {
 	flag.IntVar(&opts.HTTP.RecursionDepth, "recursion-depth", opts.HTTP.RecursionDepth, "Maximum recursion depth.")
 	flag.IntVar(&opts.HTTP.Timeout, "timeout", opts.HTTP.Timeout, "HTTP request timeout in seconds.")
 	flag.IntVar(&opts.Input.InputNum, "input-num", opts.Input.InputNum, "Number of inputs to test. Used in conjunction with --input-cmd.")
+	flag.StringVar(&opts.General.AutoCalibrationKeyword, "ack", opts.General.AutoCalibrationKeyword, "Autocalibration keyword")
+	flag.StringVar(&opts.General.AutoCalibrationStrategy, "acs", opts.General.AutoCalibrationStrategy, "Autocalibration strategy: \"basic\" or \"advanced\"")
 	flag.StringVar(&opts.General.ConfigFile, "config", "", "Load configuration from a file")
+	flag.StringVar(&opts.General.ScraperFile, "scraperfile", "", "Custom scraper file path")
+	flag.StringVar(&opts.General.Scrapers, "scrapers", opts.General.Scrapers, "Active scraper groups")
+	flag.StringVar(&opts.Filter.Mode, "fmode", opts.Filter.Mode, "Filter set operator. Either of: and, or")
 	flag.StringVar(&opts.Filter.Lines, "fl", opts.Filter.Lines, "Filter by amount of lines in response. Comma separated list of line counts and ranges")
 	flag.StringVar(&opts.Filter.Regexp, "fr", opts.Filter.Regexp, "Filter regexp")
 	flag.StringVar(&opts.Filter.Size, "fs", opts.Filter.Size, "Filter HTTP response size. Comma separated list of sizes and ranges")
@@ -92,6 +100,7 @@ func ParseFlags(opts *ffuf.ConfigOptions) *ffuf.ConfigOptions {
 	flag.StringVar(&opts.Filter.Time, "ft", opts.Filter.Time, "Filter by number of milliseconds to the first response byte, either greater or less than. EG: >100 or <100")
 	flag.StringVar(&opts.Filter.Words, "fw", opts.Filter.Words, "Filter by amount of words in response. Comma separated list of word counts and ranges")
 	flag.StringVar(&opts.General.Delay, "p", opts.General.Delay, "Seconds of `delay` between requests, or a range of random delay. For example \"0.1\" or \"0.1-2.0\"")
+	flag.StringVar(&opts.General.Searchhash, "search", opts.General.Searchhash, "Search for a FFUFHASH payload from ffuf history")
 	flag.StringVar(&opts.HTTP.Data, "d", opts.HTTP.Data, "POST data")
 	flag.StringVar(&opts.HTTP.Data, "data", opts.HTTP.Data, "POST data (alias of -d)")
 	flag.StringVar(&opts.HTTP.Data, "data-ascii", opts.HTTP.Data, "POST data (alias of -d)")
@@ -107,6 +116,7 @@ func ParseFlags(opts *ffuf.ConfigOptions) *ffuf.ConfigOptions {
 	flag.StringVar(&opts.Input.InputShell, "input-shell", opts.Input.InputShell, "Shell to be used for running command")
 	flag.StringVar(&opts.Input.Request, "request", opts.Input.Request, "File containing the raw http request")
 	flag.StringVar(&opts.Input.RequestProto, "request-proto", opts.Input.RequestProto, "Protocol to use along with raw request")
+	flag.StringVar(&opts.Matcher.Mode, "mmode", opts.Matcher.Mode, "Matcher set operator. Either of: and, or")
 	flag.StringVar(&opts.Matcher.Lines, "ml", opts.Matcher.Lines, "Match amount of lines in response")
 	flag.StringVar(&opts.Matcher.Regexp, "mr", opts.Matcher.Regexp, "Match regexp")
 	flag.StringVar(&opts.Matcher.Size, "ms", opts.Matcher.Size, "Match HTTP response size")
@@ -137,13 +147,43 @@ func ParseFlags(opts *ffuf.ConfigOptions) *ffuf.ConfigOptions {
 func main() {
 
 	var err, optserr error
-
+	ctx, cancel := context.WithCancel(context.Background())
+	defer cancel()
 	// prepare the default config options from default config file
 	var opts *ffuf.ConfigOptions
 	opts, optserr = ffuf.ReadDefaultConfig()
 
 	opts = ParseFlags(opts)
 
+	// Handle searchhash functionality and exit
+	if opts.General.Searchhash != "" {
+		coptions, pos, err := ffuf.SearchHash(opts.General.Searchhash)
+		if err != nil {
+			fmt.Printf("[ERR] %s\n", err)
+			os.Exit(1)
+		}
+		if len(coptions) > 0 {
+			fmt.Printf("Request candidate(s) for hash %s\n", opts.General.Searchhash)
+		}
+		for _, copt := range coptions {
+			conf, err := ffuf.ConfigFromOptions(&copt.ConfigOptions, ctx, cancel)
+			if err != nil {
+				continue
+			}
+			ok, reason := ffuf.HistoryReplayable(conf)
+			if ok {
+				printSearchResults(conf, pos, copt.Time, opts.General.Searchhash)
+			} else {
+				fmt.Printf("[ERR] Hash cannot be mapped back because %s\n", reason)
+			}
+
+		}
+		if err != nil {
+			fmt.Printf("[ERR] %s\n", err)
+		}
+		os.Exit(0)
+	}
+
 	if opts.General.ShowVersion {
 		fmt.Printf("ffuf version: %s\n", ffuf.Version())
 		os.Exit(0)
@@ -152,13 +192,13 @@ func main() {
 		f, err := os.OpenFile(opts.Output.DebugLog, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
 		if err != nil {
 			fmt.Fprintf(os.Stderr, "Disabling logging, encountered error(s): %s\n", err)
-			log.SetOutput(ioutil.Discard)
+			log.SetOutput(io.Discard)
 		} else {
 			log.SetOutput(f)
 			defer f.Close()
 		}
 	} else {
-		log.SetOutput(ioutil.Discard)
+		log.SetOutput(io.Discard)
 	}
 	if optserr != nil {
 		log.Printf("Error while opening default config file: %s", optserr)
@@ -178,9 +218,7 @@ func main() {
 		opts = ParseFlags(opts)
 	}
 
-	// Prepare context and set up Config struct
-	ctx, cancel := context.WithCancel(context.Background())
-	defer cancel()
+	// Set up Config struct
 	conf, err := ffuf.ConfigFromOptions(opts, ctx, cancel)
 	if err != nil {
 		fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
@@ -188,6 +226,7 @@ func main() {
 		fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
 		os.Exit(1)
 	}
+
 	job, err := prepareJob(conf)
 	if err != nil {
 		fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
@@ -195,17 +234,13 @@ func main() {
 		fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
 		os.Exit(1)
 	}
-	if err := filter.SetupFilters(opts, conf); err != nil {
+	if err := SetupFilters(opts, conf); err != nil {
 		fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
 		Usage()
 		fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
 		os.Exit(1)
 	}
 
-	if err := filter.CalibrateIfNeeded(job); err != nil {
-		fmt.Fprintf(os.Stderr, "Error in autocalibration, exiting: %s\n", err)
-		os.Exit(1)
-	}
 	if !conf.Noninteractive {
 		go func() {
 			err := interactive.Handle(job)
@@ -220,6 +255,7 @@ func main() {
 }
 
 func prepareJob(conf *ffuf.Config) (*ffuf.Job, error) {
+	var err error
 	job := ffuf.NewJob(conf)
 	var errs ffuf.Multierror
 	job.Input, errs = input.NewInputProvider(conf)
@@ -231,5 +267,139 @@ func prepareJob(conf *ffuf.Config) (*ffuf.Job, error) {
 	}
 	// We only have stdout outputprovider right now
 	job.Output = output.NewOutputProviderByName("stdout", conf)
+
+	// Initialize scraper
+	newscraper, scraper_err := scraper.FromDir(ffuf.SCRAPERDIR, conf.Scrapers)
+	if scraper_err.ErrorOrNil() != nil {
+		errs.Add(scraper_err.ErrorOrNil())
+	}
+	job.Scraper = newscraper
+	if conf.ScraperFile != "" {
+		err = job.Scraper.AppendFromFile(conf.ScraperFile)
+		if err != nil {
+			errs.Add(err)
+		}
+	}
 	return job, errs.ErrorOrNil()
 }
+
+func SetupFilters(parseOpts *ffuf.ConfigOptions, conf *ffuf.Config) error {
+	errs := ffuf.NewMultierror()
+	conf.MatcherManager = filter.NewMatcherManager()
+	// If any other matcher is set, ignore -mc default value
+	matcherSet := false
+	statusSet := false
+	warningIgnoreBody := false
+	flag.Visit(func(f *flag.Flag) {
+		if f.Name == "mc" {
+			statusSet = true
+		}
+		if f.Name == "ms" {
+			matcherSet = true
+			warningIgnoreBody = true
+		}
+		if f.Name == "ml" {
+			matcherSet = true
+			warningIgnoreBody = true
+		}
+		if f.Name == "mr" {
+			matcherSet = true
+		}
+		if f.Name == "mt" {
+			matcherSet = true
+		}
+		if f.Name == "mw" {
+			matcherSet = true
+			warningIgnoreBody = true
+		}
+	})
+	// Only set default matchers if no
+	if statusSet || !matcherSet {
+		if err := conf.MatcherManager.AddMatcher("status", parseOpts.Matcher.Status); err != nil {
+			errs.Add(err)
+		}
+	}
+
+	if parseOpts.Filter.Status != "" {
+		if err := conf.MatcherManager.AddFilter("status", parseOpts.Filter.Status, false); err != nil {
+			errs.Add(err)
+		}
+	}
+	if parseOpts.Filter.Size != "" {
+		warningIgnoreBody = true
+		if err := conf.MatcherManager.AddFilter("size", parseOpts.Filter.Size, false); err != nil {
+			errs.Add(err)
+		}
+	}
+	if parseOpts.Filter.Regexp != "" {
+		if err := conf.MatcherManager.AddFilter("regexp", parseOpts.Filter.Regexp, false); err != nil {
+			errs.Add(err)
+		}
+	}
+	if parseOpts.Filter.Words != "" {
+		warningIgnoreBody = true
+		if err := conf.MatcherManager.AddFilter("word", parseOpts.Filter.Words, false); err != nil {
+			errs.Add(err)
+		}
+	}
+	if parseOpts.Filter.Lines != "" {
+		warningIgnoreBody = true
+		if err := conf.MatcherManager.AddFilter("line", parseOpts.Filter.Lines, false); err != nil {
+			errs.Add(err)
+		}
+	}
+	if parseOpts.Filter.Time != "" {
+		if err := conf.MatcherManager.AddFilter("time", parseOpts.Filter.Time, false); err != nil {
+			errs.Add(err)
+		}
+	}
+	if parseOpts.Matcher.Size != "" {
+		if err := conf.MatcherManager.AddMatcher("size", parseOpts.Matcher.Size); err != nil {
+			errs.Add(err)
+		}
+	}
+	if parseOpts.Matcher.Regexp != "" {
+		if err := conf.MatcherManager.AddMatcher("regexp", parseOpts.Matcher.Regexp); err != nil {
+			errs.Add(err)
+		}
+	}
+	if parseOpts.Matcher.Words != "" {
+		if err := conf.MatcherManager.AddMatcher("word", parseOpts.Matcher.Words); err != nil {
+			errs.Add(err)
+		}
+	}
+	if parseOpts.Matcher.Lines != "" {
+		if err := conf.MatcherManager.AddMatcher("line", parseOpts.Matcher.Lines); err != nil {
+			errs.Add(err)
+		}
+	}
+	if parseOpts.Matcher.Time != "" {
+		if err := conf.MatcherManager.AddMatcher("time", parseOpts.Matcher.Time); err != nil {
+			errs.Add(err)
+		}
+	}
+	if conf.IgnoreBody && warningIgnoreBody {
+		fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n")
+	}
+	return errs.ErrorOrNil()
+}
+
+func printSearchResults(conf *ffuf.Config, pos int, exectime time.Time, hash string) {
+	inp, err := input.NewInputProvider(conf)
+	if err.ErrorOrNil() != nil {
+		fmt.Printf("-------------------------------------------\n")
+		fmt.Println("Encountered error that prevents reproduction of the request:")
+		fmt.Println(err.ErrorOrNil())
+		return
+	}
+	inp.SetPosition(pos)
+	inputdata := inp.Value()
+	inputdata["FFUFHASH"] = []byte(hash)
+	basereq := ffuf.BaseRequest(conf)
+	dummyrunner := runner.NewRunnerByName("simple", conf, false)
+	ffufreq, _ := dummyrunner.Prepare(inputdata, &basereq)
+	rawreq, _ := dummyrunner.Dump(&ffufreq)
+	fmt.Printf("-------------------------------------------\n")
+	fmt.Printf("ffuf job started at: %s\n\n", exectime.Format(time.RFC3339))
+	fmt.Printf("%s\n", string(rawreq))
+}
diff --git a/pkg/ffuf/autocalibration.go b/pkg/ffuf/autocalibration.go
new file mode 100644
index 0000000..e9a5aeb
--- /dev/null
+++ b/pkg/ffuf/autocalibration.go
@@ -0,0 +1,233 @@
+package ffuf
+
+import (
+	"fmt"
+	"log"
+	"math/rand"
+	"strconv"
+	"time"
+)
+
+func (j *Job) autoCalibrationStrings() map[string][]string {
+	rand.Seed(time.Now().UnixNano())
+	cInputs := make(map[string][]string)
+	if len(j.Config.AutoCalibrationStrings) < 1 {
+		cInputs["basic_admin"] = append(cInputs["basic_admin"], "admin"+RandomString(16))
+		cInputs["basic_admin"] = append(cInputs["basic_admin"], "admin"+RandomString(8))
+		cInputs["htaccess"] = append(cInputs["htaccess"], ".htaccess"+RandomString(16))
+		cInputs["htaccess"] = append(cInputs["htaccess"], ".htaccess"+RandomString(8))
+		cInputs["basic_random"] = append(cInputs["basic_random"], RandomString(16))
+		cInputs["basic_random"] = append(cInputs["basic_random"], RandomString(8))
+		if j.Config.AutoCalibrationStrategy == "advanced" {
+			// Add directory tests and .htaccess too
+			cInputs["admin_dir"] = append(cInputs["admin_dir"], "admin"+RandomString(16)+"/")
+			cInputs["admin_dir"] = append(cInputs["admin_dir"], "admin"+RandomString(8)+"/")
+			cInputs["random_dir"] = append(cInputs["random_dir"], RandomString(16)+"/")
+			cInputs["random_dir"] = append(cInputs["random_dir"], RandomString(8)+"/")
+		}
+	} else {
+		cInputs["custom"] = append(cInputs["custom"], j.Config.AutoCalibrationStrings...)
+	}
+	return cInputs
+}
+
+func (j *Job) calibrationRequest(inputs map[string][]byte) (Response, error) {
+	basereq := BaseRequest(j.Config)
+	req, err := j.Runner.Prepare(inputs, &basereq)
+	if err != nil {
+		j.Output.Error(fmt.Sprintf("Encountered an error while preparing autocalibration request: %s\n", err))
+		j.incError()
+		log.Printf("%s", err)
+		return Response{}, err
+	}
+	resp, err := j.Runner.Execute(&req)
+	if err != nil {
+		j.Output.Error(fmt.Sprintf("Encountered an error while executing autocalibration request: %s\n", err))
+		j.incError()
+		log.Printf("%s", err)
+		return Response{}, err
+	}
+	// Only calibrate on responses that would be matched otherwise
+	if j.isMatch(resp) {
+		return resp, nil
+	}
+	return resp, fmt.Errorf("Response wouldn't be matched")
+}
+
+// CalibrateForHost runs autocalibration for a specific host
+func (j *Job) CalibrateForHost(host string, baseinput map[string][]byte) error {
+	if j.Config.MatcherManager.CalibratedForDomain(host) {
+		return nil
+	}
+	if baseinput[j.Config.AutoCalibrationKeyword] == nil {
+		return fmt.Errorf("Autocalibration keyword \"%s\" not found in the request.", j.Config.AutoCalibrationKeyword)
+	}
+	cStrings := j.autoCalibrationStrings()
+	input := make(map[string][]byte)
+	for k, v := range baseinput {
+		input[k] = v
+	}
+	for _, v := range cStrings {
+		responses := make([]Response, 0)
+		for _, cs := range v {
+			input[j.Config.AutoCalibrationKeyword] = []byte(cs)
+			resp, err := j.calibrationRequest(input)
+			if err != nil {
+				continue
+			}
+			responses = append(responses, resp)
+			err = j.calibrateFilters(responses, true)
+			if err != nil {
+				j.Output.Error(fmt.Sprintf("%s", err))
+			}
+		}
+	}
+	j.Config.MatcherManager.SetCalibratedForHost(host, true)
+	return nil
+}
+
+// CalibrateResponses returns slice of Responses for randomly generated filter autocalibration requests
+func (j *Job) Calibrate(input map[string][]byte) error {
+	if j.Config.MatcherManager.Calibrated() {
+		return nil
+	}
+	cInputs := j.autoCalibrationStrings()
+
+	for _, v := range cInputs {
+		responses := make([]Response, 0)
+		for _, cs := range v {
+			input[j.Config.AutoCalibrationKeyword] = []byte(cs)
+			resp, err := j.calibrationRequest(input)
+			if err != nil {
+				continue
+			}
+			responses = append(responses, resp)
+		}
+		_ = j.calibrateFilters(responses, false)
+	}
+	j.Config.MatcherManager.SetCalibrated(true)
+	return nil
+}
+
+// CalibrateIfNeeded runs a self-calibration task for filtering options (if needed) by requesting random resources and
+//
+//	configuring the filters accordingly
+func (j *Job) CalibrateIfNeeded(host string, input map[string][]byte) error {
+	j.calibMutex.Lock()
+	defer j.calibMutex.Unlock()
+	if !j.Config.AutoCalibration {
+		return nil
+	}
+	if j.Config.AutoCalibrationPerHost {
+		return j.CalibrateForHost(host, input)
+	}
+	return j.Calibrate(input)
+}
+
+func (j *Job) calibrateFilters(responses []Response, perHost bool) error {
+	// Work down from the most specific common denominator
+	if len(responses) > 0 {
+		// Content length
+		baselineSize := responses[0].ContentLength
+		sizeMatch := true
+		for _, r := range responses {
+			if baselineSize != r.ContentLength {
+				sizeMatch = false
+			}
+		}
+		if sizeMatch {
+			if perHost {
+				// Check if already filtered
+				for _, f := range j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*responses[0].Request)) {
+					match, _ := f.Filter(&responses[0])
+					if match {
+						// Already filtered
+						return nil
+					}
+				}
+				_ = j.Config.MatcherManager.AddPerDomainFilter(HostURLFromRequest(*responses[0].Request), "size", strconv.FormatInt(baselineSize, 10))
+				return nil
+			} else {
+				// Check if already filtered
+				for _, f := range j.Config.MatcherManager.GetFilters() {
+					match, _ := f.Filter(&responses[0])
+					if match {
+						// Already filtered
+						return nil
+					}
+				}
+				_ = j.Config.MatcherManager.AddFilter("size", strconv.FormatInt(baselineSize, 10), false)
+				return nil
+			}
+		}
+
+		// Content words
+		baselineWords := responses[0].ContentWords
+		wordsMatch := true
+		for _, r := range responses {
+			if baselineWords != r.ContentWords {
+				wordsMatch = false
+			}
+		}
+		if wordsMatch {
+			if perHost {
+				// Check if already filtered
+				for _, f := range j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*responses[0].Request)) {
+					match, _ := f.Filter(&responses[0])
+					if match {
+						// Already filtered
+						return nil
+					}
+				}
+				_ = j.Config.MatcherManager.AddPerDomainFilter(HostURLFromRequest(*responses[0].Request), "word", strconv.FormatInt(baselineWords, 10))
+				return nil
+			} else {
+				// Check if already filtered
+				for _, f := range j.Config.MatcherManager.GetFilters() {
+					match, _ := f.Filter(&responses[0])
+					if match {
+						// Already filtered
+						return nil
+					}
+				}
+				_ = j.Config.MatcherManager.AddFilter("word", strconv.FormatInt(baselineWords, 10), false)
+				return nil
+			}
+		}
+
+		// Content lines
+		baselineLines := responses[0].ContentLines
+		linesMatch := true
+		for _, r := range responses {
+			if baselineLines != r.ContentLines {
+				linesMatch = false
+			}
+		}
+		if linesMatch {
+			if perHost {
+				// Check if already filtered
+				for _, f := range j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*responses[0].Request)) {
+					match, _ := f.Filter(&responses[0])
+					if match {
+						// Already filtered
+						return nil
+					}
+				}
+				_ = j.Config.MatcherManager.AddPerDomainFilter(HostURLFromRequest(*responses[0].Request), "line", strconv.FormatInt(baselineLines, 10))
+				return nil
+			} else {
+				// Check if already filtered
+				for _, f := range j.Config.MatcherManager.GetFilters() {
+					match, _ := f.Filter(&responses[0])
+					if match {
+						// Already filtered
+						return nil
+					}
+				}
+				_ = j.Config.MatcherManager.AddFilter("line", strconv.FormatInt(baselineLines, 10), false)
+				return nil
+			}
+		}
+	}
+	return fmt.Errorf("No common filtering values found")
+}
diff --git a/pkg/ffuf/config.go b/pkg/ffuf/config.go
index 48fc86c..81e3a39 100644
--- a/pkg/ffuf/config.go
+++ b/pkg/ffuf/config.go
@@ -5,54 +5,64 @@ import (
 )
 
 type Config struct {
-	AutoCalibration        bool                      `json:"autocalibration"`
-	AutoCalibrationStrings []string                  `json:"autocalibration_strings"`
-	Cancel                 context.CancelFunc        `json:"-"`
-	Colors                 bool                      `json:"colors"`
-	CommandKeywords        []string                  `json:"-"`
-	CommandLine            string                    `json:"cmdline"`
-	ConfigFile             string                    `json:"configfile"`
-	Context                context.Context           `json:"-"`
-	Data                   string                    `json:"postdata"`
-	Delay                  optRange                  `json:"delay"`
-	DirSearchCompat        bool                      `json:"dirsearch_compatibility"`
-	Extensions             []string                  `json:"extensions"`
-	Filters                map[string]FilterProvider `json:"filters"`
-	FollowRedirects        bool                      `json:"follow_redirects"`
-	Headers                map[string]string         `json:"headers"`
-	IgnoreBody             bool                      `json:"ignorebody"`
-	IgnoreWordlistComments bool                      `json:"ignore_wordlist_comments"`
-	InputMode              string                    `json:"inputmode"`
-	InputNum               int                       `json:"cmd_inputnum"`
-	InputProviders         []InputProviderConfig     `json:"inputproviders"`
-	InputShell             string                    `json:"inputshell"`
-	Json                   bool                      `json:"json"`
-	Matchers               map[string]FilterProvider `json:"matchers"`
-	MaxTime                int                       `json:"maxtime"`
-	MaxTimeJob             int                       `json:"maxtime_job"`
-	Method                 string                    `json:"method"`
-	Noninteractive         bool                      `json:"noninteractive"`
-	OutputDirectory        string                    `json:"outputdirectory"`
-	OutputFile             string                    `json:"outputfile"`
-	OutputFormat           string                    `json:"outputformat"`
-	OutputSkipEmptyFile    bool                      `json:"OutputSkipEmptyFile"`
-	ProgressFrequency      int                       `json:"-"`
-	ProxyURL               string                    `json:"proxyurl"`
-	Quiet                  bool                      `json:"quiet"`
-	Rate                   int64                     `json:"rate"`
-	Recursion              bool                      `json:"recursion"`
-	RecursionDepth         int                       `json:"recursion_depth"`
-	RecursionStrategy      string                    `json:"recursion_strategy"`
-	ReplayProxyURL         string                    `json:"replayproxyurl"`
-	SNI                    string                    `json:"sni"`
-	StopOn403              bool                      `json:"stop_403"`
-	StopOnAll              bool                      `json:"stop_all"`
-	StopOnErrors           bool                      `json:"stop_errors"`
-	Threads                int                       `json:"threads"`
-	Timeout                int                       `json:"timeout"`
-	Url                    string                    `json:"url"`
-	Verbose                bool                      `json:"verbose"`
-	Http2                  bool                      `json:"http2"`
+	AutoCalibration         bool                  `json:"autocalibration"`
+	AutoCalibrationKeyword  string                `json:"autocalibration_keyword"`
+	AutoCalibrationPerHost  bool                  `json:"autocalibration_perhost"`
+	AutoCalibrationStrategy string                `json:"autocalibration_strategy"`
+	AutoCalibrationStrings  []string              `json:"autocalibration_strings"`
+	Cancel                  context.CancelFunc    `json:"-"`
+	Colors                  bool                  `json:"colors"`
+	CommandKeywords         []string              `json:"-"`
+	CommandLine             string                `json:"cmdline"`
+	ConfigFile              string                `json:"configfile"`
+	Context                 context.Context       `json:"-"`
+	Data                    string                `json:"postdata"`
+	Debuglog                string                `json:"debuglog"`
+	Delay                   optRange              `json:"delay"`
+	DirSearchCompat         bool                  `json:"dirsearch_compatibility"`
+	Extensions              []string              `json:"extensions"`
+	FilterMode              string                `json:"fmode"`
+	FollowRedirects         bool                  `json:"follow_redirects"`
+	Headers                 map[string]string     `json:"headers"`
+	IgnoreBody              bool                  `json:"ignorebody"`
+	IgnoreWordlistComments  bool                  `json:"ignore_wordlist_comments"`
+	InputMode               string                `json:"inputmode"`
+	InputNum                int                   `json:"cmd_inputnum"`
+	InputProviders          []InputProviderConfig `json:"inputproviders"`
+	InputShell              string                `json:"inputshell"`
+	Json                    bool                  `json:"json"`
+	MatcherManager          MatcherManager        `json:"matchers"`
+	MatcherMode             string                `json:"mmode"`
+	MaxTime                 int                   `json:"maxtime"`
+	MaxTimeJob              int                   `json:"maxtime_job"`
+	Method                  string                `json:"method"`
+	Noninteractive          bool                  `json:"noninteractive"`
+	OutputDirectory         string                `json:"outputdirectory"`
+	OutputFile              string                `json:"outputfile"`
+	OutputFormat            string                `json:"outputformat"`
+	OutputSkipEmptyFile     bool                  `json:"OutputSkipEmptyFile"`
+	ProgressFrequency       int                   `json:"-"`
+	ProxyURL                string                `json:"proxyurl"`
+	Quiet                   bool                  `json:"quiet"`
+	Rate                    int64                 `json:"rate"`
+	Recursion               bool                  `json:"recursion"`
+	RecursionDepth          int                   `json:"recursion_depth"`
+	RecursionStrategy       string                `json:"recursion_strategy"`
+	ReplayProxyURL          string                `json:"replayproxyurl"`
+	RequestFile             string                `json:"requestfile"`
+	RequestProto            string                `json:"requestproto"`
+	ScraperFile             string                `json:"scraperfile"`
+	Scrapers                string                `json:"scrapers"`
+	SNI                     string                `json:"sni"`
+	StopOn403               bool                  `json:"stop_403"`
+	StopOnAll               bool                  `json:"stop_all"`
+	StopOnErrors            bool                  `json:"stop_errors"`
+	Threads                 int                   `json:"threads"`
+	Timeout                 int                   `json:"timeout"`
+	Url                     string                `json:"url"`
+	Verbose                 bool                  `json:"verbose"`
+	Wordlists               []string              `json:"wordlists"`
+	Http2                   bool                  `json:"http2"`
 }
 
 type InputProviderConfig struct {
@@ -64,15 +74,18 @@ type InputProviderConfig struct {
 
 func NewConfig(ctx context.Context, cancel context.CancelFunc) Config {
 	var conf Config
+	conf.AutoCalibrationKeyword = "FUZZ"
+	conf.AutoCalibrationStrategy = "basic"
 	conf.AutoCalibrationStrings = make([]string, 0)
 	conf.CommandKeywords = make([]string, 0)
 	conf.Context = ctx
 	conf.Cancel = cancel
 	conf.Data = ""
+	conf.Debuglog = ""
 	conf.Delay = optRange{0, 0, false, false}
 	conf.DirSearchCompat = false
 	conf.Extensions = make([]string, 0)
-	conf.Filters = make(map[string]FilterProvider)
+	conf.FilterMode = "or"
 	conf.FollowRedirects = false
 	conf.Headers = make(map[string]string)
 	conf.IgnoreWordlistComments = false
@@ -81,7 +94,7 @@ func NewConfig(ctx context.Context, cancel context.CancelFunc) Config {
 	conf.InputShell = ""
 	conf.InputProviders = make([]InputProviderConfig, 0)
 	conf.Json = false
-	conf.Matchers = make(map[string]FilterProvider)
+	conf.MatcherMode = "or"
 	conf.MaxTime = 0
 	conf.MaxTimeJob = 0
 	conf.Method = "GET"
@@ -93,13 +106,18 @@ func NewConfig(ctx context.Context, cancel context.CancelFunc) Config {
 	conf.Recursion = false
 	conf.RecursionDepth = 0
 	conf.RecursionStrategy = "default"
+	conf.RequestFile = ""
+	conf.RequestProto = "https"
 	conf.SNI = ""
+	conf.ScraperFile = ""
+	conf.Scrapers = "all"
 	conf.StopOn403 = false
 	conf.StopOnAll = false
 	conf.StopOnErrors = false
 	conf.Timeout = 10
 	conf.Url = ""
 	conf.Verbose = false
+	conf.Wordlists = []string{}
 	conf.Http2 = false
 	return conf
 }
diff --git a/pkg/ffuf/configmarshaller.go b/pkg/ffuf/configmarshaller.go
new file mode 100644
index 0000000..ce733a2
--- /dev/null
+++ b/pkg/ffuf/configmarshaller.go
@@ -0,0 +1,129 @@
+package ffuf
+
+import (
+	"fmt"
+	"strings"
+)
+
+func (c *Config) ToOptions() ConfigOptions {
+	o := ConfigOptions{}
+	// HTTP options
+	o.HTTP.Cookies = []string{}
+	o.HTTP.Data = c.Data
+	o.HTTP.FollowRedirects = c.FollowRedirects
+	o.HTTP.Headers = make([]string, 0)
+	for k, v := range c.Headers {
+		o.HTTP.Headers = append(o.HTTP.Headers, fmt.Sprintf("%s: %s", k, v))
+	}
+	o.HTTP.IgnoreBody = c.IgnoreBody
+	o.HTTP.Method = c.Method
+	o.HTTP.ProxyURL = c.ProxyURL
+	o.HTTP.Recursion = c.Recursion
+	o.HTTP.RecursionDepth = c.RecursionDepth
+	o.HTTP.RecursionStrategy = c.RecursionStrategy
+	o.HTTP.ReplayProxyURL = c.ReplayProxyURL
+	o.HTTP.SNI = c.SNI
+	o.HTTP.Timeout = c.Timeout
+	o.HTTP.URL = c.Url
+	o.HTTP.Http2 = c.Http2
+
+	o.General.AutoCalibration = c.AutoCalibration
+	o.General.AutoCalibrationKeyword = c.AutoCalibrationKeyword
+	o.General.AutoCalibrationPerHost = c.AutoCalibrationPerHost
+	o.General.AutoCalibrationStrategy = c.AutoCalibrationStrategy
+	o.General.AutoCalibrationStrings = c.AutoCalibrationStrings
+	o.General.Colors = c.Colors
+	o.General.ConfigFile = ""
+	if c.Delay.HasDelay {
+		if c.Delay.IsRange {
+			o.General.Delay = fmt.Sprintf("%.2f-%.2f", c.Delay.Min, c.Delay.Max)
+		} else {
+			o.General.Delay = fmt.Sprintf("%.2f", c.Delay.Min)
+		}
+	} else {
+		o.General.Delay = ""
+	}
+	o.General.Json = c.Json
+	o.General.MaxTime = c.MaxTime
+	o.General.MaxTimeJob = c.MaxTimeJob
+	o.General.Noninteractive = c.Noninteractive
+	o.General.Quiet = c.Quiet
+	o.General.Rate = int(c.Rate)
+	o.General.ScraperFile = c.ScraperFile
+	o.General.Scrapers = c.Scrapers
+	o.General.StopOn403 = c.StopOn403
+	o.General.StopOnAll = c.StopOnAll
+	o.General.StopOnErrors = c.StopOnErrors
+	o.General.Threads = c.Threads
+	o.General.Verbose = c.Verbose
+
+	o.Input.DirSearchCompat = c.DirSearchCompat
+	o.Input.Extensions = strings.Join(c.Extensions, ",")
+	o.Input.IgnoreWordlistComments = c.IgnoreWordlistComments
+	o.Input.InputMode = c.InputMode
+	o.Input.InputNum = c.InputNum
+	o.Input.InputShell = c.InputShell
+	o.Input.Inputcommands = []string{}
+	for _, v := range c.InputProviders {
+		if v.Name == "command" {
+			o.Input.Inputcommands = append(o.Input.Inputcommands, fmt.Sprintf("%s:%s", v.Value, v.Keyword))
+		}
+	}
+	o.Input.Request = c.RequestFile
+	o.Input.RequestProto = c.RequestProto
+	o.Input.Wordlists = c.Wordlists
+
+	o.Output.DebugLog = c.Debuglog
+	o.Output.OutputDirectory = c.OutputDirectory
+	o.Output.OutputFile = c.OutputFile
+	o.Output.OutputFormat = c.OutputFormat
+	o.Output.OutputSkipEmptyFile = c.OutputSkipEmptyFile
+
+	o.Filter.Mode = c.FilterMode
+	o.Filter.Lines = ""
+	o.Filter.Regexp = ""
+	o.Filter.Size = ""
+	o.Filter.Status = ""
+	o.Filter.Time = ""
+	o.Filter.Words = ""
+	for name, filter := range c.MatcherManager.GetFilters() {
+		switch name {
+		case "line":
+			o.Filter.Lines = filter.Repr()
+		case "regexp":
+			o.Filter.Regexp = filter.Repr()
+		case "size":
+			o.Filter.Size = filter.Repr()
+		case "status":
+			o.Filter.Status = filter.Repr()
+		case "time":
+			o.Filter.Time = filter.Repr()
+		case "words":
+			o.Filter.Words = filter.Repr()
+		}
+	}
+	o.Matcher.Mode = c.MatcherMode
+	o.Matcher.Lines = ""
+	o.Matcher.Regexp = ""
+	o.Matcher.Size = ""
+	o.Matcher.Status = ""
+	o.Matcher.Time = ""
+	o.Matcher.Words = ""
+	for name, filter := range c.MatcherManager.GetMatchers() {
+		switch name {
+		case "line":
+			o.Matcher.Lines = filter.Repr()
+		case "regexp":
+			o.Matcher.Regexp = filter.Repr()
+		case "size":
+			o.Matcher.Size = filter.Repr()
+		case "status":
+			o.Matcher.Status = filter.Repr()
+		case "time":
+			o.Matcher.Time = filter.Repr()
+		case "words":
+			o.Matcher.Words = filter.Repr()
+		}
+	}
+	return o
+}
diff --git a/pkg/ffuf/constants.go b/pkg/ffuf/constants.go
new file mode 100644
index 0000000..1dd0035
--- /dev/null
+++ b/pkg/ffuf/constants.go
@@ -0,0 +1,16 @@
+package ffuf
+
+import (
+	"github.com/adrg/xdg"
+	"path/filepath"
+)
+
+var (
+	//VERSION holds the current version number
+	VERSION = "2.0.0"
+	//VERSION_APPENDIX holds additional version definition
+	VERSION_APPENDIX = "-dev"
+	CONFIGDIR        = filepath.Join(xdg.ConfigHome, "ffuf")
+	HISTORYDIR       = filepath.Join(CONFIGDIR, "history")
+	SCRAPERDIR       = filepath.Join(CONFIGDIR, "scraper")
+)
diff --git a/pkg/ffuf/history.go b/pkg/ffuf/history.go
new file mode 100644
index 0000000..b3302ad
--- /dev/null
+++ b/pkg/ffuf/history.go
@@ -0,0 +1,92 @@
+package ffuf
+
+import (
+	"crypto/sha256"
+	"encoding/json"
+	"errors"
+	"fmt"
+	"os"
+	"path/filepath"
+	"strconv"
+	"strings"
+	"time"
+)
+
+type ConfigOptionsHistory struct {
+	ConfigOptions
+	Time time.Time `json:"time"`
+}
+
+func WriteHistoryEntry(conf *Config) (string, error) {
+	options := ConfigOptionsHistory{
+		ConfigOptions: conf.ToOptions(),
+		Time:          time.Now(),
+	}
+	jsonoptions, err := json.Marshal(options)
+	if err != nil {
+		return "", err
+	}
+	hashstr := calculateHistoryHash(jsonoptions)
+	err = createConfigDir(filepath.Join(HISTORYDIR, hashstr))
+	if err != nil {
+		return "", err
+	}
+	err = os.WriteFile(filepath.Join(HISTORYDIR, hashstr, "options"), jsonoptions, 0640)
+	return hashstr, err
+}
+
+func calculateHistoryHash(options []byte) string {
+	return fmt.Sprintf("%x", sha256.Sum256(options))
+}
+
+func SearchHash(hash string) ([]ConfigOptionsHistory, int, error) {
+	coptions := make([]ConfigOptionsHistory, 0)
+	if len(hash) < 6 {
+		return coptions, 0, errors.New("bad FFUFHASH value")
+	}
+	historypart := hash[0:5]
+	position, err := strconv.ParseInt(hash[5:], 16, 32)
+	if err != nil {
+		return coptions, 0, errors.New("bad positional value in FFUFHASH")
+	}
+	all_dirs, err := os.ReadDir(HISTORYDIR)
+	if err != nil {
+		return coptions, 0, err
+	}
+	matched_dirs := make([]string, 0)
+	for _, filename := range all_dirs {
+		if filename.IsDir() {
+			if strings.HasPrefix(strings.ToLower(filename.Name()), strings.ToLower(historypart)) {
+				matched_dirs = append(matched_dirs, filename.Name())
+			}
+		}
+	}
+	for _, dirname := range matched_dirs {
+		copts, err := configFromHistory(filepath.Join(HISTORYDIR, dirname))
+		if err != nil {
+			continue
+		}
+		coptions = append(coptions, copts)
+
+	}
+	return coptions, int(position), err
+}
+
+func HistoryReplayable(conf *Config) (bool, string) {
+	for _, w := range conf.Wordlists {
+		if w == "-" || strings.HasPrefix(w, "-:") {
+			return false, "stdin input was used for one of the wordlists"
+		}
+	}
+	return true, ""
+}
+
+func configFromHistory(dirname string) (ConfigOptionsHistory, error) {
+	jsonOptions, err := os.ReadFile(filepath.Join(dirname, "options"))
+	if err != nil {
+		return ConfigOptionsHistory{}, err
+	}
+	tmpOptions := ConfigOptionsHistory{}
+	err = json.Unmarshal(jsonOptions, &tmpOptions)
+	return tmpOptions, err
+}
diff --git a/pkg/ffuf/interfaces.go b/pkg/ffuf/interfaces.go
index c36021f..16f627f 100644
--- a/pkg/ffuf/interfaces.go
+++ b/pkg/ffuf/interfaces.go
@@ -1,37 +1,57 @@
 package ffuf
 
-import "time"
+import (
+	"time"
+)
 
-//FilterProvider is a generic interface for both Matchers and Filters
+// MatcherManager provides functions for managing matchers and filters
+type MatcherManager interface {
+	SetCalibrated(calibrated bool)
+	SetCalibratedForHost(host string, calibrated bool)
+	AddFilter(name string, option string, replace bool) error
+	AddPerDomainFilter(domain string, name string, option string) error
+	RemoveFilter(name string)
+	AddMatcher(name string, option string) error
+	GetFilters() map[string]FilterProvider
+	GetMatchers() map[string]FilterProvider
+	FiltersForDomain(domain string) map[string]FilterProvider
+	CalibratedForDomain(domain string) bool
+	Calibrated() bool
+}
+
+// FilterProvider is a generic interface for both Matchers and Filters
 type FilterProvider interface {
 	Filter(response *Response) (bool, error)
 	Repr() string
 	ReprVerbose() string
 }
 
-//RunnerProvider is an interface for request executors
+// RunnerProvider is an interface for request executors
 type RunnerProvider interface {
 	Prepare(input map[string][]byte, basereq *Request) (Request, error)
 	Execute(req *Request) (Response, error)
+	Dump(req *Request) ([]byte, error)
 }
 
-//InputProvider interface handles the input data for RunnerProvider
+// InputProvider interface handles the input data for RunnerProvider
 type InputProvider interface {
 	ActivateKeywords([]string)
 	AddProvider(InputProviderConfig) error
 	Keywords() []string
 	Next() bool
 	Position() int
+	SetPosition(int)
 	Reset()
 	Value() map[string][]byte
 	Total() int
 }
 
-//InternalInputProvider interface handles providing input data to InputProvider
+// InternalInputProvider interface handles providing input data to InputProvider
 type InternalInputProvider interface {
 	Keyword() string
 	Next() bool
 	Position() int
+	SetPosition(int)
 	ResetPosition()
 	IncrementPosition()
 	Value() []byte
@@ -41,7 +61,7 @@ type InternalInputProvider interface {
 	Disable()
 }
 
-//OutputProvider is responsible of providing output from the RunnerProvider
+// OutputProvider is responsible of providing output from the RunnerProvider
 type OutputProvider interface {
 	Banner()
 	Finalize() error
@@ -59,18 +79,31 @@ type OutputProvider interface {
 	Cycle()
 }
 
+type Scraper interface {
+	Execute(resp *Response, matched bool) []ScraperResult
+	AppendFromFile(path string) error
+}
+
+type ScraperResult struct {
+	Name    string   `json:"name"`
+	Type    string   `json:"type"`
+	Action  []string `json:"action"`
+	Results []string `json:"results"`
+}
+
 type Result struct {
-	Input            map[string][]byte `json:"input"`
-	Position         int               `json:"position"`
-	StatusCode       int64             `json:"status"`
-	ContentLength    int64             `json:"length"`
-	ContentWords     int64             `json:"words"`
-	ContentLines     int64             `json:"lines"`
-	ContentType      string            `json:"content-type"`
-	RedirectLocation string            `json:"redirectlocation"`
-	Url              string            `json:"url"`
-	Duration         time.Duration     `json:"duration"`
-	ResultFile       string            `json:"resultfile"`
-	Host             string            `json:"host"`
-	HTMLColor        string            `json:"-"`
+	Input            map[string][]byte   `json:"input"`
+	Position         int                 `json:"position"`
+	StatusCode       int64               `json:"status"`
+	ContentLength    int64               `json:"length"`
+	ContentWords     int64               `json:"words"`
+	ContentLines     int64               `json:"lines"`
+	ContentType      string              `json:"content-type"`
+	RedirectLocation string              `json:"redirectlocation"`
+	Url              string              `json:"url"`
+	Duration         time.Duration       `json:"duration"`
+	ScraperData      map[string][]string `json:"scraper"`
+	ResultFile       string              `json:"resultfile"`
+	Host             string              `json:"host"`
+	HTMLColor        string              `json:"-"`
 }
diff --git a/pkg/ffuf/job.go b/pkg/ffuf/job.go
index 539566a..e80bddf 100644
--- a/pkg/ffuf/job.go
+++ b/pkg/ffuf/job.go
@@ -11,14 +11,16 @@ import (
 	"time"
 )
 
-//Job ties together Config, Runner, Input and Output
+// Job ties together Config, Runner, Input and Output
 type Job struct {
 	Config               *Config
 	ErrorMutex           sync.Mutex
 	Input                InputProvider
 	Runner               RunnerProvider
 	ReplayRunner         RunnerProvider
+	Scraper              Scraper
 	Output               OutputProvider
+	Jobhash              string
 	Counter              int
 	ErrorCounter         int
 	SpuriousErrorCounter int
@@ -36,6 +38,7 @@ type Job struct {
 	queuepos             int
 	skipQueue            bool
 	currentDepth         int
+	calibMutex           sync.Mutex
 	pauseWg              sync.WaitGroup
 }
 
@@ -62,7 +65,7 @@ func NewJob(conf *Config) *Job {
 	return &j
 }
 
-//incError increments the error counter
+// incError increments the error counter
 func (j *Job) incError() {
 	j.ErrorMutex.Lock()
 	defer j.ErrorMutex.Unlock()
@@ -70,7 +73,7 @@ func (j *Job) incError() {
 	j.SpuriousErrorCounter++
 }
 
-//inc403 increments the 403 response counter
+// inc403 increments the 403 response counter
 func (j *Job) inc403() {
 	j.ErrorMutex.Lock()
 	defer j.ErrorMutex.Unlock()
@@ -84,25 +87,25 @@ func (j *Job) inc429() {
 	j.Count429++
 }
 
-//resetSpuriousErrors resets the spurious error counter
+// resetSpuriousErrors resets the spurious error counter
 func (j *Job) resetSpuriousErrors() {
 	j.ErrorMutex.Lock()
 	defer j.ErrorMutex.Unlock()
 	j.SpuriousErrorCounter = 0
 }
 
-//DeleteQueueItem deletes a recursion job from the queue by its index in the slice
+// DeleteQueueItem deletes a recursion job from the queue by its index in the slice
 func (j *Job) DeleteQueueItem(index int) {
 	index = j.queuepos + index - 1
 	j.queuejobs = append(j.queuejobs[:index], j.queuejobs[index+1:]...)
 }
 
-//QueuedJobs returns the slice of queued recursive jobs
+// QueuedJobs returns the slice of queued recursive jobs
 func (j *Job) QueuedJobs() []QueueJob {
 	return j.queuejobs[j.queuepos-1:]
 }
 
-//Start the execution of the Job
+// Start the execution of the Job
 func (j *Job) Start() {
 	if j.startTime.IsZero() {
 		j.startTime = time.Now()
@@ -179,9 +182,10 @@ func (j *Job) prepareQueueJob() {
 	//And activate / disable inputproviders as needed
 	j.Input.ActivateKeywords(found_kws)
 	j.queuepos += 1
+	j.Jobhash, _ = WriteHistoryEntry(j.Config)
 }
 
-//SkipQueue allows to skip the current job and advance to the next queued recursion job
+// SkipQueue allows to skip the current job and advance to the next queued recursion job
 func (j *Job) SkipQueue() {
 	j.skipQueue = true
 }
@@ -237,7 +241,7 @@ func (j *Job) startExecution() {
 	}
 
 	//Limiter blocks after reaching the buffer, ensuring limited concurrency
-	limiter := make(chan bool, j.Config.Threads)
+	threadlimiter := make(chan bool, j.Config.Threads)
 
 	for j.Input.Next() && !j.skipQueue {
 		// Check if we should stop the process
@@ -248,23 +252,27 @@ func (j *Job) startExecution() {
 			break
 		}
 		j.pauseWg.Wait()
-		limiter <- true
+		// Handle the rate & thread limiting
+		threadlimiter <- true
+		// Ratelimiter handles the rate ticker
+		<-j.Rate.RateLimiter.C
 		nextInput := j.Input.Value()
 		nextPosition := j.Input.Position()
+		// Add FFUFHASH and its value
+		nextInput["FFUFHASH"] = j.ffufHash(nextPosition)
+
 		wg.Add(1)
 		j.Counter++
 
 		go func() {
-			defer func() { <-limiter }()
+			defer func() { <-threadlimiter }()
 			defer wg.Done()
 			threadStart := time.Now()
 			j.runTask(nextInput, nextPosition, false)
 			j.sleepIfNeeded()
-			j.Rate.Throttle()
 			threadEnd := time.Now()
 			j.Rate.Tick(threadStart, threadEnd)
 		}()
-
 		if !j.RunningJob {
 			defer j.Output.Warning(j.Error)
 			return
@@ -305,7 +313,6 @@ func (j *Job) runBackgroundTasks(wg *sync.WaitGroup) {
 		if !j.RunningJob {
 			return
 		}
-		j.Rate.Adjust()
 		time.Sleep(time.Millisecond * time.Duration(j.Config.ProgressFrequency))
 	}
 }
@@ -325,31 +332,66 @@ func (j *Job) updateProgress() {
 
 func (j *Job) isMatch(resp Response) bool {
 	matched := false
-	for _, m := range j.Config.Matchers {
+	var matchers map[string]FilterProvider
+	var filters map[string]FilterProvider
+	if j.Config.AutoCalibrationPerHost {
+		filters = j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*resp.Request))
+	} else {
+		filters = j.Config.MatcherManager.GetFilters()
+	}
+	matchers = j.Config.MatcherManager.GetMatchers()
+	for _, m := range matchers {
 		match, err := m.Filter(&resp)
 		if err != nil {
 			continue
 		}
 		if match {
 			matched = true
+		} else if j.Config.MatcherMode == "and" {
+			// we already know this isn't "and" match
+			return false
+
 		}
 	}
 	// The response was not matched, return before running filters
 	if !matched {
 		return false
 	}
-	for _, f := range j.Config.Filters {
+	for _, f := range filters {
 		fv, err := f.Filter(&resp)
 		if err != nil {
 			continue
 		}
 		if fv {
-			return false
+			//	return false
+			if j.Config.FilterMode == "or" {
+				// return early, as filter matched
+				return false
+			}
+		} else {
+			if j.Config.FilterMode == "and" {
+				// return early as not all filters matched in "and" mode
+				return true
+			}
 		}
 	}
+	if len(filters) > 0 && j.Config.FilterMode == "and" {
+		// we did not return early, so all filters were matched
+		return false
+	}
 	return true
 }
 
+func (j *Job) ffufHash(pos int) []byte {
+	hashstring := ""
+	r := []rune(j.Jobhash)
+	if len(r) > 5 {
+		hashstring = string(r[:5])
+	}
+	hashstring += fmt.Sprintf("%x", pos)
+	return []byte(hashstring)
+}
+
 func (j *Job) runTask(input map[string][]byte, position int, retried bool) {
 	basereq := j.queuejobs[j.queuepos-1].req
 	req, err := j.Runner.Prepare(input, &basereq)
@@ -360,6 +402,7 @@ func (j *Job) runTask(input map[string][]byte, position int, retried bool) {
 		log.Printf("%s", err)
 		return
 	}
+
 	resp, err := j.Runner.Execute(&req)
 	if err != nil {
 		if retried {
@@ -386,6 +429,18 @@ func (j *Job) runTask(input map[string][]byte, position int, retried bool) {
 		}
 	}
 	j.pauseWg.Wait()
+
+	// Handle autocalibration, must be done after the actual request to ensure sane value in req.Host
+	_ = j.CalibrateIfNeeded(HostURLFromRequest(req), input)
+
+	// Handle scraper actions
+	if j.Scraper != nil {
+		for _, sres := range j.Scraper.Execute(&resp, j.isMatch(resp)) {
+			resp.ScraperData[sres.Name] = sres.Results
+			j.handleScraperResult(&resp, sres)
+		}
+	}
+
 	if j.isMatch(resp) {
 		// Re-send request through replay-proxy if needed
 		if j.ReplayRunner != nil {
@@ -406,6 +461,11 @@ func (j *Job) runTask(input map[string][]byte, position int, retried bool) {
 		if j.Config.Recursion && j.Config.RecursionStrategy == "greedy" {
 			j.handleGreedyRecursionJob(resp)
 		}
+	} else {
+		if len(resp.ScraperData) > 0 {
+			// print the result anyway, as scraper found something
+			j.Output.Result(resp)
+		}
 	}
 
 	if j.Config.Recursion && j.Config.RecursionStrategy == "default" && len(resp.GetRedirectLocation(false)) > 0 {
@@ -413,7 +473,16 @@ func (j *Job) runTask(input map[string][]byte, position int, retried bool) {
 	}
 }
 
-//handleGreedyRecursionJob adds a recursion job to the queue if the maximum depth has not been reached
+func (j *Job) handleScraperResult(resp *Response, sres ScraperResult) {
+	for _, a := range sres.Action {
+		switch a {
+		case "output":
+			resp.ScraperData[sres.Name] = sres.Results
+		}
+	}
+}
+
+// handleGreedyRecursionJob adds a recursion job to the queue if the maximum depth has not been reached
 func (j *Job) handleGreedyRecursionJob(resp Response) {
 	// Handle greedy recursion strategy. Match has been determined before calling handleRecursionJob
 	if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth {
@@ -426,8 +495,8 @@ func (j *Job) handleGreedyRecursionJob(resp Response) {
 	}
 }
 
-//handleDefaultRecursionJob adds a new recursion job to the job queue if a new directory is found and maximum depth has
-//not been reached
+// handleDefaultRecursionJob adds a new recursion job to the job queue if a new directory is found and maximum depth has
+// not been reached
 func (j *Job) handleDefaultRecursionJob(resp Response) {
 	recUrl := resp.Request.Url + "/" + "FUZZ"
 	if (resp.Request.Url + "/") != resp.GetRedirectLocation(true) {
@@ -444,47 +513,6 @@ func (j *Job) handleDefaultRecursionJob(resp Response) {
 	}
 }
 
-//CalibrateResponses returns slice of Responses for randomly generated filter autocalibration requests
-func (j *Job) CalibrateResponses() ([]Response, error) {
-	basereq := BaseRequest(j.Config)
-	cInputs := make([]string, 0)
-	rand.Seed(time.Now().UnixNano())
-	if len(j.Config.AutoCalibrationStrings) < 1 {
-		cInputs = append(cInputs, "admin"+RandomString(16)+"/")
-		cInputs = append(cInputs, ".htaccess"+RandomString(16))
-		cInputs = append(cInputs, RandomString(16)+"/")
-		cInputs = append(cInputs, RandomString(16))
-	} else {
-		cInputs = append(cInputs, j.Config.AutoCalibrationStrings...)
-	}
-
-	results := make([]Response, 0)
-	for _, input := range cInputs {
-		inputs := make(map[string][]byte, len(j.Config.InputProviders))
-		for _, v := range j.Config.InputProviders {
-			inputs[v.Keyword] = []byte(input)
-		}
-
-		req, err := j.Runner.Prepare(inputs, &basereq)
-		if err != nil {
-			j.Output.Error(fmt.Sprintf("Encountered an error while preparing request: %s\n", err))
-			j.incError()
-			log.Printf("%s", err)
-			return results, err
-		}
-		resp, err := j.Runner.Execute(&req)
-		if err != nil {
-			return results, err
-		}
-
-		// Only calibrate on responses that would be matched otherwise
-		if j.isMatch(resp) {
-			results = append(results, resp)
-		}
-	}
-	return results, nil
-}
-
 // CheckStop stops the job if stopping conditions are met
 func (j *Job) CheckStop() {
 	if j.Counter > 50 {
@@ -533,13 +561,13 @@ func (j *Job) CheckStop() {
 	}
 }
 
-//Stop the execution of the Job
+// Stop the execution of the Job
 func (j *Job) Stop() {
 	j.Running = false
 	j.Config.Cancel()
 }
 
-//Stop current, resume to next
+// Stop current, resume to next
 func (j *Job) Next() {
 	j.RunningJob = false
 }
diff --git a/pkg/ffuf/optionsparser.go b/pkg/ffuf/optionsparser.go
index 83f834e..38100f7 100644
--- a/pkg/ffuf/optionsparser.go
+++ b/pkg/ffuf/optionsparser.go
@@ -4,7 +4,7 @@ import (
 	"bufio"
 	"context"
 	"fmt"
-	"io/ioutil"
+	"io"
 	"net/textproto"
 	"net/url"
 	"os"
@@ -17,94 +17,103 @@ import (
 )
 
 type ConfigOptions struct {
-	Filter  FilterOptions
-	General GeneralOptions
-	HTTP    HTTPOptions
-	Input   InputOptions
-	Matcher MatcherOptions
-	Output  OutputOptions
+	Filter  FilterOptions  `json:"filters"`
+	General GeneralOptions `json:"general"`
+	HTTP    HTTPOptions    `json:"http"`
+	Input   InputOptions   `json:"input"`
+	Matcher MatcherOptions `json:"matchers"`
+	Output  OutputOptions  `json:"output"`
 }
 
 type HTTPOptions struct {
-	Cookies           []string
-	Data              string
-	FollowRedirects   bool
-	Headers           []string
-	IgnoreBody        bool
-	Method            string
-	ProxyURL          string
-	Recursion         bool
-	RecursionDepth    int
-	RecursionStrategy string
-	ReplayProxyURL    string
-	SNI               string
-	Timeout           int
-	URL               string
-	Http2             bool
+	Cookies           []string `json:"-"` // this is appended in headers
+	Data              string   `json:"data"`
+	FollowRedirects   bool     `json:"follow_redirects"`
+	Headers           []string `json:"headers"`
+	IgnoreBody        bool     `json:"ignore_body"`
+	Method            string   `json:"method"`
+	ProxyURL          string   `json:"proxy_url"`
+	Recursion         bool     `json:"recursion"`
+	RecursionDepth    int      `json:"recursion_depth"`
+	RecursionStrategy string   `json:"recursion_strategy"`
+	ReplayProxyURL    string   `json:"replay_proxy_url"`
+	SNI               string   `json:"sni"`
+	Timeout           int      `json:"timeout"`
+	URL               string   `json:"url"`
+	Http2             bool     `json:"http2"`
 }
 
 type GeneralOptions struct {
-	AutoCalibration        bool
-	AutoCalibrationStrings []string
-	Colors                 bool
-	ConfigFile             string `toml:"-"`
-	Delay                  string
-	Json                   bool
-	MaxTime                int
-	MaxTimeJob             int
-	Noninteractive         bool
-	Quiet                  bool
-	Rate                   int
-	ShowVersion            bool `toml:"-"`
-	StopOn403              bool
-	StopOnAll              bool
-	StopOnErrors           bool
-	Threads                int
-	Verbose                bool
+	AutoCalibration         bool     `json:"autocalibration"`
+	AutoCalibrationKeyword  string   `json:"autocalibration_keyword"`
+	AutoCalibrationPerHost  bool     `json:"autocalibration_per_host"`
+	AutoCalibrationStrategy string   `json:"autocalibration_strategy"`
+	AutoCalibrationStrings  []string `json:"autocalibration_strings"`
+	Colors                  bool     `json:"colors"`
+	ConfigFile              string   `toml:"-" json:"config_file"`
+	Delay                   string   `json:"delay"`
+	Json                    bool     `json:"json"`
+	MaxTime                 int      `json:"maxtime"`
+	MaxTimeJob              int      `json:"maxtime_job"`
+	Noninteractive          bool     `json:"noninteractive"`
+	Quiet                   bool     `json:"quiet"`
+	Rate                    int      `json:"rate"`
+	ScraperFile             string   `json:"scraperfile"`
+	Scrapers                string   `json:"scrapers"`
+	Searchhash              string   `json:"-"`
+	ShowVersion             bool     `toml:"-" json:"-"`
+	StopOn403               bool     `json:"stop_on_403"`
+	StopOnAll               bool     `json:"stop_on_all"`
+	StopOnErrors            bool     `json:"stop_on_errors"`
+	Threads                 int      `json:"threads"`
+	Verbose                 bool     `json:"verbose"`
 }
 
 type InputOptions struct {
-	DirSearchCompat        bool
-	Extensions             string
-	IgnoreWordlistComments bool
-	InputMode              string
-	InputNum               int
-	InputShell             string
-	Inputcommands          []string
-	Request                string
-	RequestProto           string
-	Wordlists              []string
+	DirSearchCompat        bool     `json:"dirsearch_compat"`
+	Extensions             string   `json:"extensions"`
+	IgnoreWordlistComments bool     `json:"ignore_wordlist_comments"`
+	InputMode              string   `json:"input_mode"`
+	InputNum               int      `json:"input_num"`
+	InputShell             string   `json:"input_shell"`
+	Inputcommands          []string `json:"input_commands"`
+	Request                string   `json:"request_file"`
+	RequestProto           string   `json:"request_proto"`
+	Wordlists              []string `json:"wordlists"`
 }
 
 type OutputOptions struct {
-	DebugLog            string
-	OutputDirectory     string
-	OutputFile          string
-	OutputFormat        string
-	OutputSkipEmptyFile bool
+	DebugLog            string `json:"debug_log"`
+	OutputDirectory     string `json:"output_directory"`
+	OutputFile          string `json:"output_file"`
+	OutputFormat        string `json:"output_format"`
+	OutputSkipEmptyFile bool   `json:"output_skip_empty"`
 }
 
 type FilterOptions struct {
-	Lines  string
-	Regexp string
-	Size   string
-	Status string
-	Time   string
-	Words  string
+	Mode   string `json:"mode"`
+	Lines  string `json:"lines"`
+	Regexp string `json:"regexp"`
+	Size   string `json:"size"`
+	Status string `json:"status"`
+	Time   string `json:"time"`
+	Words  string `json:"words"`
 }
 
 type MatcherOptions struct {
-	Lines  string
-	Regexp string
-	Size   string
-	Status string
-	Time   string
-	Words  string
+	Mode   string `json:"mode"`
+	Lines  string `json:"lines"`
+	Regexp string `json:"regexp"`
+	Size   string `json:"size"`
+	Status string `json:"status"`
+	Time   string `json:"time"`
+	Words  string `json:"words"`
 }
 
-//NewConfigOptions returns a newly created ConfigOptions struct with default values
+// NewConfigOptions returns a newly created ConfigOptions struct with default values
 func NewConfigOptions() *ConfigOptions {
 	c := &ConfigOptions{}
+	c.Filter.Mode = "or"
 	c.Filter.Lines = ""
 	c.Filter.Regexp = ""
 	c.Filter.Size = ""
@@ -112,6 +121,8 @@ func NewConfigOptions() *ConfigOptions {
 	c.Filter.Time = ""
 	c.Filter.Words = ""
 	c.General.AutoCalibration = false
+	c.General.AutoCalibrationKeyword = "FUZZ"
+	c.General.AutoCalibrationStrategy = "basic"
 	c.General.Colors = false
 	c.General.Delay = ""
 	c.General.Json = false
@@ -120,6 +131,9 @@ func NewConfigOptions() *ConfigOptions {
 	c.General.Noninteractive = false
 	c.General.Quiet = false
 	c.General.Rate = 0
+	c.General.Searchhash = ""
+	c.General.ScraperFile = ""
+	c.General.Scrapers = "all"
 	c.General.ShowVersion = false
 	c.General.StopOn403 = false
 	c.General.StopOnAll = false
@@ -146,6 +160,7 @@ func NewConfigOptions() *ConfigOptions {
 	c.Input.InputNum = 100
 	c.Input.Request = ""
 	c.Input.RequestProto = "https"
+	c.Matcher.Mode = "or"
 	c.Matcher.Lines = ""
 	c.Matcher.Regexp = ""
 	c.Matcher.Size = ""
@@ -160,7 +175,7 @@ func NewConfigOptions() *ConfigOptions {
 	return c
 }
 
-//ConfigFromOptions parses the values in ConfigOptions struct, ensures that the values are sane,
+// ConfigFromOptions parses the values in ConfigOptions struct, ensures that the values are sane,
 // and creates a Config struct out of them.
 func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel context.CancelFunc) (*Config, error) {
 	//TODO: refactor in a proper flag library that can handle things like required flags
@@ -211,6 +226,7 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
 		}
 	}
 
+	tmpWordlists := make([]string, 0)
 	for _, v := range parseOpts.Input.Wordlists {
 		var wl []string
 		if runtime.GOOS == "windows" {
@@ -234,6 +250,17 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
 		} else {
 			wl = strings.SplitN(v, ":", 2)
 		}
+		// Try to use absolute paths for wordlists
+		fullpath := ""
+		if wl[0] != "-" {
+			fullpath, err = filepath.Abs(wl[0])
+		} else {
+			fullpath = wl[0]
+		}
+
+		if err == nil {
+			wl[0] = fullpath
+		}
 		if len(wl) == 2 {
 			if conf.InputMode == "sniper" {
 				errs.Add(fmt.Errorf("sniper mode does not support wordlist keywords"))
@@ -252,7 +279,9 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
 				Template: template,
 			})
 		}
+		tmpWordlists = append(tmpWordlists, strings.Join(wl, ":"))
 	}
+	conf.Wordlists = tmpWordlists
 
 	for _, v := range parseOpts.Input.Inputcommands {
 		ic := strings.SplitN(v, ":", 2)
@@ -355,9 +384,9 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
 
 	// Verify proxy url format
 	if len(parseOpts.HTTP.ProxyURL) > 0 {
-		_, err := url.Parse(parseOpts.HTTP.ProxyURL)
-		if err != nil {
-			errs.Add(fmt.Errorf("Bad proxy url (-x) format: %s", err))
+		u, err := url.Parse(parseOpts.HTTP.ProxyURL)
+		if err != nil || u.Opaque != "" || (u.Scheme != "http" && u.Scheme != "https" && u.Scheme != "socks5") {
+			errs.Add(fmt.Errorf("Bad proxy url (-x) format. Expected http, https or socks5 url"))
 		} else {
 			conf.ProxyURL = parseOpts.HTTP.ProxyURL
 		}
@@ -365,9 +394,9 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
 
 	// Verify replayproxy url format
 	if len(parseOpts.HTTP.ReplayProxyURL) > 0 {
-		_, err := url.Parse(parseOpts.HTTP.ReplayProxyURL)
-		if err != nil {
-			errs.Add(fmt.Errorf("Bad replay-proxy url (-replay-proxy) format: %s", err))
+		u, err := url.Parse(parseOpts.HTTP.ReplayProxyURL)
+		if err != nil || u.Opaque != "" || (u.Scheme != "http" && u.Scheme != "https" && u.Scheme != "socks5" && u.Scheme != "socks5h") {
+			errs.Add(fmt.Errorf("Bad replay-proxy url (-replay-proxy) format. Expected http, https or socks5 url"))
 		} else {
 			conf.ReplayProxyURL = parseOpts.HTTP.ReplayProxyURL
 		}
@@ -437,6 +466,8 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
 	conf.OutputSkipEmptyFile = parseOpts.Output.OutputSkipEmptyFile
 	conf.IgnoreBody = parseOpts.HTTP.IgnoreBody
 	conf.Quiet = parseOpts.General.Quiet
+	conf.ScraperFile = parseOpts.General.ScraperFile
+	conf.Scrapers = parseOpts.General.Scrapers
 	conf.StopOn403 = parseOpts.General.StopOn403
 	conf.StopOnAll = parseOpts.General.StopOnAll
 	conf.StopOnErrors = parseOpts.General.StopOnErrors
@@ -445,6 +476,8 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
 	conf.RecursionDepth = parseOpts.HTTP.RecursionDepth
 	conf.RecursionStrategy = parseOpts.HTTP.RecursionStrategy
 	conf.AutoCalibration = parseOpts.General.AutoCalibration
+	conf.AutoCalibrationPerHost = parseOpts.General.AutoCalibrationPerHost
+	conf.AutoCalibrationStrategy = parseOpts.General.AutoCalibrationStrategy
 	conf.Threads = parseOpts.General.Threads
 	conf.Timeout = parseOpts.HTTP.Timeout
 	conf.MaxTime = parseOpts.General.MaxTime
@@ -454,6 +487,34 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
 	conf.Json = parseOpts.General.Json
 	conf.Http2 = parseOpts.HTTP.Http2
 
+	// Check that fmode and mmode have sane values
+	valid_opmodes := []string{"and", "or"}
+	fmode_found := false
+	mmode_found := false
+	for _, v := range valid_opmodes {
+		if v == parseOpts.Filter.Mode {
+			fmode_found = true
+		}
+		if v == parseOpts.Matcher.Mode {
+			mmode_found = true
+		}
+	}
+	if !fmode_found {
+		errmsg := fmt.Sprintf("Unrecognized value for parameter fmode: %s, valid values are: and, or", parseOpts.Filter.Mode)
+		errs.Add(fmt.Errorf(errmsg))
+	}
+	if !mmode_found {
+		errmsg := fmt.Sprintf("Unrecognized value for parameter mmode: %s, valid values are: and, or", parseOpts.Matcher.Mode)
+		errs.Add(fmt.Errorf(errmsg))
+	}
+	conf.FilterMode = parseOpts.Filter.Mode
+	conf.MatcherMode = parseOpts.Matcher.Mode
+
+	if conf.AutoCalibrationPerHost {
+		// AutoCalibrationPerHost implies AutoCalibration
+		conf.AutoCalibration = true
+	}
+
 	// Handle copy as curl situation where POST method is implied by --data flag. If method is set to anything but GET, NOOP
 	if len(conf.Data) > 0 &&
 		conf.Method == "GET" &&
@@ -491,11 +552,12 @@ func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel con
 	if parseOpts.General.Verbose && parseOpts.General.Json {
 		errs.Add(fmt.Errorf("Cannot have -json and -v"))
 	}
-
 	return &conf, errs.ErrorOrNil()
 }
 
 func parseRawRequest(parseOpts *ConfigOptions, conf *Config) error {
+	conf.RequestFile = parseOpts.Input.Request
+	conf.RequestProto = parseOpts.Input.RequestProto
 	file, err := os.Open(parseOpts.Input.Request)
 	if err != nil {
 		return fmt.Errorf("could not open request file: %s", err)
@@ -550,13 +612,14 @@ func parseRawRequest(parseOpts *ConfigOptions, conf *Config) error {
 	}
 
 	// Set the request body
-	b, err := ioutil.ReadAll(r)
+	b, err := io.ReadAll(r)
 	if err != nil {
 		return fmt.Errorf("could not read request body: %s", err)
 	}
 	conf.Data = string(b)
 
 	// Remove newline (typically added by the editor) at the end of the file
+	//nolint:gosimple // we specifically want to remove just a single newline, not all of them
 	if strings.HasSuffix(conf.Data, "\r\n") {
 		conf.Data = conf.Data[:len(conf.Data)-2]
 	} else if strings.HasSuffix(conf.Data, "\n") {
@@ -629,7 +692,7 @@ func templatePresent(template string, conf *Config) bool {
 
 func ReadConfig(configFile string) (*ConfigOptions, error) {
 	conf := NewConfigOptions()
-	configData, err := ioutil.ReadFile(configFile)
+	configData, err := os.ReadFile(configFile)
 	if err == nil {
 		err = toml.Unmarshal(configData, conf)
 	}
@@ -637,10 +700,14 @@ func ReadConfig(configFile string) (*ConfigOptions, error) {
 }
 
 func ReadDefaultConfig() (*ConfigOptions, error) {
-	userhome, err := os.UserHomeDir()
-	if err != nil {
-		return NewConfigOptions(), err
+	// Try to create configuration directory, ignore the potential error
+	_ = CheckOrCreateConfigDir()
+	conffile := filepath.Join(CONFIGDIR, "ffufrc")
+	if !FileExists(conffile) {
+		userhome, err := os.UserHomeDir()
+		if err == nil {
+			conffile = filepath.Join(userhome, ".ffufrc")
+		}
 	}
-	defaultconf := filepath.Join(userhome, ".ffufrc")
-	return ReadConfig(defaultconf)
+	return ReadConfig(conffile)
 }
diff --git a/pkg/ffuf/optionsparser_test.go b/pkg/ffuf/optionsparser_test.go
index 2e9913b..2ed741f 100644
--- a/pkg/ffuf/optionsparser_test.go
+++ b/pkg/ffuf/optionsparser_test.go
@@ -1,6 +1,7 @@
 package ffuf
 
 import (
+	"strings"
 	"testing"
 )
 
@@ -83,3 +84,97 @@ func TestTemplatePresent(t *testing.T) {
 		t.Errorf("Expected-bad config (Header key) failed validation")
 	}
 }
+
+func TestProxyParsing(t *testing.T) {
+	configOptions := NewConfigOptions()
+	errorString := "Bad proxy url (-x) format. Expected http, https or socks5 url"
+
+	// http should work
+	configOptions.HTTP.ProxyURL = "http://127.0.0.1:8080"
+	_, err := ConfigFromOptions(configOptions, nil, nil)
+	if strings.Contains(err.Error(), errorString) {
+		t.Errorf("Expected http proxy string to work")
+	}
+
+	// https should work
+	configOptions.HTTP.ProxyURL = "https://127.0.0.1"
+	_, err = ConfigFromOptions(configOptions, nil, nil)
+	if strings.Contains(err.Error(), errorString) {
+		t.Errorf("Expected https proxy string to work")
+	}
+
+	// socks5 should work
+	configOptions.HTTP.ProxyURL = "socks5://127.0.0.1"
+	_, err = ConfigFromOptions(configOptions, nil, nil)
+	if strings.Contains(err.Error(), errorString) {
+		t.Errorf("Expected socks5 proxy string to work")
+	}
+
+	// garbage data should FAIL
+	configOptions.HTTP.ProxyURL = "Y0 y0 it's GREASE"
+	_, err = ConfigFromOptions(configOptions, nil, nil)
+	if !strings.Contains(err.Error(), errorString) {
+		t.Errorf("Expected garbage proxy string to fail")
+	}
+
+	// Opaque URLs with the right scheme should FAIL
+	configOptions.HTTP.ProxyURL = "http:sixhours@dungeon"
+	_, err = ConfigFromOptions(configOptions, nil, nil)
+	if !strings.Contains(err.Error(), errorString) {
+		t.Errorf("Expected opaque proxy string to fail")
+	}
+
+	// Unsupported protocols should FAIL
+	configOptions.HTTP.ProxyURL = "imap://127.0.0.1"
+	_, err = ConfigFromOptions(configOptions, nil, nil)
+	if !strings.Contains(err.Error(), errorString) {
+		t.Errorf("Expected proxy string with unsupported protocol to fail")
+	}
+}
+
+func TestReplayProxyParsing(t *testing.T) {
+	configOptions := NewConfigOptions()
+	errorString := "Bad replay-proxy url (-replay-proxy) format. Expected http, https or socks5 url"
+
+	// http should work
+	configOptions.HTTP.ReplayProxyURL = "http://127.0.0.1:8080"
+	_, err := ConfigFromOptions(configOptions, nil, nil)
+	if strings.Contains(err.Error(), errorString) {
+		t.Errorf("Expected http replay proxy string to work")
+	}
+
+	// https should work
+	configOptions.HTTP.ReplayProxyURL = "https://127.0.0.1"
+	_, err = ConfigFromOptions(configOptions, nil, nil)
+	if strings.Contains(err.Error(), errorString) {
+		t.Errorf("Expected https proxy string to work")
+	}
+
+	// socks5 should work
+	configOptions.HTTP.ReplayProxyURL = "socks5://127.0.0.1"
+	_, err = ConfigFromOptions(configOptions, nil, nil)
+	if strings.Contains(err.Error(), errorString) {
+		t.Errorf("Expected socks5 proxy string to work")
+	}
+
+	// garbage data should FAIL
+	configOptions.HTTP.ReplayProxyURL = "Y0 y0 it's GREASE"
+	_, err = ConfigFromOptions(configOptions, nil, nil)
+	if !strings.Contains(err.Error(), errorString) {
+		t.Errorf("Expected garbage proxy string to fail")
+	}
+
+	// Opaque URLs with the right scheme should FAIL
+	configOptions.HTTP.ReplayProxyURL = "http:sixhours@dungeon"
+	_, err = ConfigFromOptions(configOptions, nil, nil)
+	if !strings.Contains(err.Error(), errorString) {
+		t.Errorf("Expected opaque proxy string to fail")
+	}
+
+	// Unsupported protocols should FAIL
+	configOptions.HTTP.ReplayProxyURL = "imap://127.0.0.1"
+	_, err = ConfigFromOptions(configOptions, nil, nil)
+	if !strings.Contains(err.Error(), errorString) {
+		t.Errorf("Expected proxy string with unsupported protocol to fail")
+	}
+}
diff --git a/pkg/ffuf/rate.go b/pkg/ffuf/rate.go
index 7c70992..48a6940 100644
--- a/pkg/ffuf/rate.go
+++ b/pkg/ffuf/rate.go
@@ -7,100 +7,76 @@ import (
 )
 
 type RateThrottle struct {
-	rateCounter       *ring.Ring
-	RateAdjustment    float64
-	RateAdjustmentPos int
-	Config            *Config
-	RateMutex         sync.Mutex
-	lastAdjustment    time.Time
+	rateCounter    *ring.Ring
+	Config         *Config
+	RateMutex      sync.Mutex
+	RateLimiter    *time.Ticker
+	lastAdjustment time.Time
 }
 
 func NewRateThrottle(conf *Config) *RateThrottle {
-	return &RateThrottle{
-		rateCounter:       ring.New(conf.Threads),
-		RateAdjustment:    0,
-		RateAdjustmentPos: 0,
-		Config:            conf,
-		lastAdjustment:    time.Now(),
+	r := &RateThrottle{
+		Config:         conf,
+		lastAdjustment: time.Now(),
 	}
+	if conf.Rate > 0 {
+		r.rateCounter = ring.New(int(conf.Rate * 5))
+	} else {
+		r.rateCounter = ring.New(conf.Threads * 5)
+	}
+	if conf.Rate > 0 {
+		ratemicros := 1000000 / conf.Rate
+		r.RateLimiter = time.NewTicker(time.Microsecond * time.Duration(ratemicros))
+	} else {
+		//Million rps is probably a decent hardcoded upper speedlimit
+		r.RateLimiter = time.NewTicker(time.Microsecond * 1)
+	}
+	return r
 }
 
-//CurrentRate calculates requests/second value from circular list of rate
+// CurrentRate calculates requests/second value from circular list of rate
 func (r *RateThrottle) CurrentRate() int64 {
 	n := r.rateCounter.Len()
-	var total int64
-	total = 0
+	lowest := int64(0)
+	highest := int64(0)
 	r.rateCounter.Do(func(r interface{}) {
 		switch val := r.(type) {
 		case int64:
-			total += val
+			if lowest == 0 || val < lowest {
+				lowest = val
+			}
+			if val > highest {
+				highest = val
+			}
 		default:
-			// circular list entry was nil, happens when < number_of_threads responses have been recorded.
+			// circular list entry was nil, happens when < number_of_threads * 5 responses have been recorded.
 			// the total number of entries is less than length of the list
 			n -= 1
 		}
 	})
-	if total > 0 {
-		avg := total / int64(n)
-		return time.Second.Nanoseconds() * int64(r.Config.Threads) / avg
-	}
-
-	return 0
-}
 
-//rateTick adds a new duration measurement tick to rate counter
-func (r *RateThrottle) Tick(start, end time.Time) {
-	if start.Before(r.lastAdjustment) {
-		// We don't want to store data for threads started pre-adjustment
-		return
+	earliest := time.UnixMicro(lowest)
+	latest := time.UnixMicro(highest)
+	elapsed := latest.Sub(earliest)
+	if n > 0 && elapsed.Milliseconds() > 1 {
+		return int64(1000 * int64(n) / elapsed.Milliseconds())
 	}
-	r.RateMutex.Lock()
-	defer r.RateMutex.Unlock()
-	dur := end.Sub(start).Nanoseconds()
-	r.rateCounter = r.rateCounter.Next()
-	r.RateAdjustmentPos += 1
-	r.rateCounter.Value = dur
+	return 0
 }
 
-func (r *RateThrottle) Throttle() {
-	if r.Config.Rate == 0 {
-		// No throttling
-		return
-	}
-	if r.RateAdjustment > 0.0 {
-		delayNS := float64(time.Second.Nanoseconds()) * r.RateAdjustment
-		time.Sleep(time.Nanosecond * time.Duration(delayNS))
-	}
+func (r *RateThrottle) ChangeRate(rate int) {
+	ratemicros := 1000000 / rate
+	r.RateLimiter.Stop()
+	r.RateLimiter = time.NewTicker(time.Microsecond * time.Duration(ratemicros))
+	r.Config.Rate = int64(rate)
+	// reset the rate counter
+	r.rateCounter = ring.New(rate * 5)
 }
 
-//Adjust changes the RateAdjustment value, which is multiplier of second to pause between requests in a thread
-func (r *RateThrottle) Adjust() {
-	if r.RateAdjustmentPos < r.Config.Threads {
-		// Do not adjust if we don't have enough data yet
-		return
-	}
+// rateTick adds a new duration measurement tick to rate counter
+func (r *RateThrottle) Tick(start, end time.Time) {
 	r.RateMutex.Lock()
 	defer r.RateMutex.Unlock()
-	currentRate := r.CurrentRate()
-
-	if r.RateAdjustment == 0.0 {
-		if currentRate > r.Config.Rate {
-			// If we're adjusting the rate for the first time, start at a safe point (0.2sec)
-			r.RateAdjustment = 0.2
-			return
-		} else {
-			// NOOP
-			return
-		}
-	}
-	difference := float64(currentRate) / float64(r.Config.Rate)
-	if r.RateAdjustment < 0.00001 && difference < 0.9 {
-		// Reset the rate adjustment as throttling is not relevant at current speed
-		r.RateAdjustment = 0.0
-	} else {
-		r.RateAdjustment = r.RateAdjustment * difference
-	}
-	// Reset the counters
-	r.lastAdjustment = time.Now()
-	r.RateAdjustmentPos = 0
+	r.rateCounter = r.rateCounter.Next()
+	r.rateCounter.Value = end.UnixMicro()
 }
diff --git a/pkg/ffuf/request.go b/pkg/ffuf/request.go
index 8cce661..de93cbf 100644
--- a/pkg/ffuf/request.go
+++ b/pkg/ffuf/request.go
@@ -172,10 +172,12 @@ func injectKeyword(input string, keyword string, startOffset int, endOffset int)
 	prefix := inputslice[:startOffset]
 	suffix := inputslice[endOffset+1:]
 
-	inputslice = append(prefix, keywordslice...)
-	inputslice = append(inputslice, suffix...)
+	var outputslice []rune
+	outputslice = append(outputslice, prefix...)
+	outputslice = append(outputslice, keywordslice...)
+	outputslice = append(outputslice, suffix...)
 
-	return string(inputslice)
+	return string(outputslice)
 }
 
 // scrubTemplates removes all template (§) strings from the request struct
diff --git a/pkg/ffuf/request_test.go b/pkg/ffuf/request_test.go
index 7c55f78..414993d 100644
--- a/pkg/ffuf/request_test.go
+++ b/pkg/ffuf/request_test.go
@@ -215,6 +215,23 @@ func TestInjectKeyword(t *testing.T) {
 		t.Errorf("injectKeyword offset validation failed")
 	}
 
+	input = "id=§a§&sort=desc"
+	offsetTuple = templateLocations("§", input)
+	expected = "id=FUZZ&sort=desc"
+
+	result = injectKeyword(input, "FUZZ", offsetTuple[0], offsetTuple[1])
+	if result != expected {
+		t.Errorf("injectKeyword returned unexpected result: " + result)
+	}
+
+	input = "feature=aaa&thingie=bbb&array[§0§]=baz"
+	offsetTuple = templateLocations("§", input)
+	expected = "feature=aaa&thingie=bbb&array[FUZZ]=baz"
+
+	result = injectKeyword(input, "FUZZ", offsetTuple[0], offsetTuple[1])
+	if result != expected {
+		t.Errorf("injectKeyword returned unexpected result: " + result)
+	}
 }
 
 func TestScrubTemplates(t *testing.T) {
diff --git a/pkg/ffuf/response.go b/pkg/ffuf/response.go
index 58f9f8d..64427fa 100644
--- a/pkg/ffuf/response.go
+++ b/pkg/ffuf/response.go
@@ -19,6 +19,7 @@ type Response struct {
 	Request       *Request
 	Raw           string
 	ResultFile    string
+	ScraperData   map[string][]string
 	Time          time.Duration
 }
 
@@ -86,5 +87,6 @@ func NewResponse(httpresp *http.Response, req *Request) Response {
 	resp.Cancelled = false
 	resp.Raw = ""
 	resp.ResultFile = ""
+	resp.ScraperData = make(map[string][]string)
 	return resp
 }
diff --git a/pkg/ffuf/util.go b/pkg/ffuf/util.go
index 1064de6..c7f5e13 100644
--- a/pkg/ffuf/util.go
+++ b/pkg/ffuf/util.go
@@ -1,16 +1,18 @@
 package ffuf
 
 import (
+	"errors"
 	"fmt"
 	"math/rand"
+	"net/url"
 	"os"
 	"strings"
 )
 
-//used for random string generation in calibration function
+// used for random string generation in calibration function
 var chars = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
 
-//RandomString returns a random string of length of parameter n
+// RandomString returns a random string of length of parameter n
 func RandomString(n int) string {
 	s := make([]rune, n)
 	for i := range s {
@@ -19,7 +21,7 @@ func RandomString(n int) string {
 	return string(s)
 }
 
-//UniqStringSlice returns an unordered slice of unique strings. The duplicates are dropped
+// UniqStringSlice returns an unordered slice of unique strings. The duplicates are dropped
 func UniqStringSlice(inslice []string) []string {
 	found := map[string]bool{}
 
@@ -33,8 +35,8 @@ func UniqStringSlice(inslice []string) []string {
 	return ret
 }
 
-//FileExists checks if the filepath exists and is not a directory.
-//Returns false in case it's not possible to describe the named file.
+// FileExists checks if the filepath exists and is not a directory.
+// Returns false in case it's not possible to describe the named file.
 func FileExists(path string) bool {
 	md, err := os.Stat(path)
 	if err != nil {
@@ -44,7 +46,7 @@ func FileExists(path string) bool {
 	return !md.IsDir()
 }
 
-//RequestContainsKeyword checks if a keyword is present in any field of a request
+// RequestContainsKeyword checks if a keyword is present in any field of a request
 func RequestContainsKeyword(req Request, kw string) bool {
 	if strings.Contains(req.Host, kw) {
 		return true
@@ -66,7 +68,51 @@ func RequestContainsKeyword(req Request, kw string) bool {
 	return false
 }
 
-//Version returns the ffuf version string
+// HostURLFromRequest gets a host + path without the filename or last part of the URL path
+func HostURLFromRequest(req Request) string {
+	u, _ := url.Parse(req.Url)
+	u.Host = req.Host
+	pathparts := strings.Split(u.Path, "/")
+	trimpath := strings.TrimSpace(strings.Join(pathparts[:len(pathparts)-1], "/"))
+	return u.Host + trimpath
+}
+
+// Version returns the ffuf version string
 func Version() string {
 	return fmt.Sprintf("%s%s", VERSION, VERSION_APPENDIX)
 }
+
+func CheckOrCreateConfigDir() error {
+	var err error
+	err = createConfigDir(CONFIGDIR)
+	if err != nil {
+		return err
+	}
+	err = createConfigDir(HISTORYDIR)
+	if err != nil {
+		return err
+	}
+	err = createConfigDir(SCRAPERDIR)
+	return err
+}
+
+func createConfigDir(path string) error {
+	_, err := os.Stat(path)
+	if err != nil {
+		var pError *os.PathError
+		if errors.As(err, &pError) {
+			return os.MkdirAll(path, 0750)
+		}
+		return err
+	}
+	return nil
+}
+
+func StrInSlice(key string, slice []string) bool {
+	for _, v := range slice {
+		if v == key {
+			return true
+		}
+	}
+	return false
+}
diff --git a/pkg/ffuf/version.go b/pkg/ffuf/version.go
deleted file mode 100644
index b4fd473..0000000
--- a/pkg/ffuf/version.go
+++ /dev/null
@@ -1,8 +0,0 @@
-package ffuf
-
-var (
-	//VERSION holds the current version number
-	VERSION = "1.4.1"
-	//VERSION_APPENDIX holds additional version definition
-	VERSION_APPENDIX = "-dev"
-)
diff --git a/pkg/filter/filter.go b/pkg/filter/filter.go
index 17234fd..8b7f29f 100644
--- a/pkg/filter/filter.go
+++ b/pkg/filter/filter.go
@@ -1,14 +1,57 @@
 package filter
 
 import (
-	"flag"
 	"fmt"
-	"strconv"
-	"strings"
+	"sync"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
+// MatcherManager handles both filters and matchers.
+type MatcherManager struct {
+	IsCalibrated     bool
+	Mutex            sync.Mutex
+	Matchers         map[string]ffuf.FilterProvider
+	Filters          map[string]ffuf.FilterProvider
+	PerDomainFilters map[string]*PerDomainFilter
+}
+
+type PerDomainFilter struct {
+	IsCalibrated bool
+	Filters      map[string]ffuf.FilterProvider
+}
+
+func NewPerDomainFilter(globfilters map[string]ffuf.FilterProvider) *PerDomainFilter {
+	return &PerDomainFilter{IsCalibrated: false, Filters: globfilters}
+}
+
+func (p *PerDomainFilter) SetCalibrated(value bool) {
+	p.IsCalibrated = value
+}
+
+func NewMatcherManager() ffuf.MatcherManager {
+	return &MatcherManager{
+		IsCalibrated:     false,
+		Matchers:         make(map[string]ffuf.FilterProvider),
+		Filters:          make(map[string]ffuf.FilterProvider),
+		PerDomainFilters: make(map[string]*PerDomainFilter),
+	}
+}
+
+func (f *MatcherManager) SetCalibrated(value bool) {
+	f.IsCalibrated = value
+}
+
+func (f *MatcherManager) SetCalibratedForHost(host string, value bool) {
+	if f.PerDomainFilters[host] != nil {
+		f.PerDomainFilters[host].IsCalibrated = value
+	} else {
+		newFilter := NewPerDomainFilter(f.Filters)
+		newFilter.IsCalibrated = true
+		f.PerDomainFilters[host] = newFilter
+	}
+}
+
 func NewFilterByName(name string, value string) (ffuf.FilterProvider, error) {
 	if name == "status" {
 		return NewStatusFilter(value)
@@ -31,195 +74,102 @@ func NewFilterByName(name string, value string) (ffuf.FilterProvider, error) {
 	return nil, fmt.Errorf("Could not create filter with name %s", name)
 }
 
-//AddFilter adds a new filter to Config
-func AddFilter(conf *ffuf.Config, name string, option string) error {
+//AddFilter adds a new filter to MatcherManager
+func (f *MatcherManager) AddFilter(name string, option string, replace bool) error {
+	f.Mutex.Lock()
+	defer f.Mutex.Unlock()
+	newf, err := NewFilterByName(name, option)
+	if err == nil {
+		// valid filter create or append
+		if f.Filters[name] == nil || replace {
+			f.Filters[name] = newf
+		} else {
+			newoption := f.Filters[name].Repr() + "," + option
+			newerf, err := NewFilterByName(name, newoption)
+			if err == nil {
+				f.Filters[name] = newerf
+			}
+		}
+	}
+	return err
+}
+
+//AddPerDomainFilter adds a new filter to PerDomainFilter configuration
+func (f *MatcherManager) AddPerDomainFilter(domain string, name string, option string) error {
+	f.Mutex.Lock()
+	defer f.Mutex.Unlock()
+	var pdFilters *PerDomainFilter
+	if filter, ok := f.PerDomainFilters[domain]; ok {
+		pdFilters = filter
+	} else {
+		pdFilters = NewPerDomainFilter(f.Filters)
+	}
 	newf, err := NewFilterByName(name, option)
 	if err == nil {
 		// valid filter create or append
-		if conf.Filters[name] == nil {
-			conf.Filters[name] = newf
+		if pdFilters.Filters[name] == nil {
+			pdFilters.Filters[name] = newf
 		} else {
-			newoption := conf.Filters[name].Repr() + "," + option
+			newoption := pdFilters.Filters[name].Repr() + "," + option
 			newerf, err := NewFilterByName(name, newoption)
 			if err == nil {
-				conf.Filters[name] = newerf
+				pdFilters.Filters[name] = newerf
 			}
 		}
 	}
+	f.PerDomainFilters[domain] = pdFilters
 	return err
 }
 
 //RemoveFilter removes a filter of a given type
-func RemoveFilter(conf *ffuf.Config, name string) {
-	delete(conf.Filters, name)
+func (f *MatcherManager) RemoveFilter(name string) {
+	f.Mutex.Lock()
+	defer f.Mutex.Unlock()
+	delete(f.Filters, name)
 }
 
 //AddMatcher adds a new matcher to Config
-func AddMatcher(conf *ffuf.Config, name string, option string) error {
+func (f *MatcherManager) AddMatcher(name string, option string) error {
+	f.Mutex.Lock()
+	defer f.Mutex.Unlock()
 	newf, err := NewFilterByName(name, option)
 	if err == nil {
-		conf.Matchers[name] = newf
+		// valid filter create or append
+		if f.Matchers[name] == nil {
+			f.Matchers[name] = newf
+		} else {
+			newoption := f.Matchers[name].Repr() + "," + option
+			newerf, err := NewFilterByName(name, newoption)
+			if err == nil {
+				f.Matchers[name] = newerf
+			}
+		}
 	}
 	return err
 }
 
-//CalibrateIfNeeded runs a self-calibration task for filtering options (if needed) by requesting random resources and acting accordingly
-func CalibrateIfNeeded(j *ffuf.Job) error {
-	var err error
-	if !j.Config.AutoCalibration {
-		return nil
-	}
-	// Handle the calibration
-	responses, err := j.CalibrateResponses()
-	if err != nil {
-		return err
-	}
-	if len(responses) > 0 {
-		err = calibrateFilters(j, responses)
-	}
-	return err
+func (f *MatcherManager) GetFilters() map[string]ffuf.FilterProvider {
+	return f.Filters
 }
 
-func calibrateFilters(j *ffuf.Job, responses []ffuf.Response) error {
-	sizeCalib := make([]string, 0)
-	wordCalib := make([]string, 0)
-	lineCalib := make([]string, 0)
-	for _, r := range responses {
-		if r.ContentLength > 0 {
-			// Only add if we have an actual size of responses
-			sizeCalib = append(sizeCalib, strconv.FormatInt(r.ContentLength, 10))
-		}
-		if r.ContentWords > 0 {
-			// Only add if we have an actual word length of response
-			wordCalib = append(wordCalib, strconv.FormatInt(r.ContentWords, 10))
-		}
-		if r.ContentLines > 1 {
-			// Only add if we have an actual word length of response
-			lineCalib = append(lineCalib, strconv.FormatInt(r.ContentLines, 10))
-		}
-	}
-
-	//Remove duplicates
-	sizeCalib = ffuf.UniqStringSlice(sizeCalib)
-	wordCalib = ffuf.UniqStringSlice(wordCalib)
-	lineCalib = ffuf.UniqStringSlice(lineCalib)
+func (f *MatcherManager) GetMatchers() map[string]ffuf.FilterProvider {
+	return f.Matchers
+}
 
-	if len(sizeCalib) > 0 {
-		err := AddFilter(j.Config, "size", strings.Join(sizeCalib, ","))
-		if err != nil {
-			return err
-		}
-	}
-	if len(wordCalib) > 0 {
-		err := AddFilter(j.Config, "word", strings.Join(wordCalib, ","))
-		if err != nil {
-			return err
-		}
-	}
-	if len(lineCalib) > 0 {
-		err := AddFilter(j.Config, "line", strings.Join(lineCalib, ","))
-		if err != nil {
-			return err
-		}
-	}
-	return nil
-}
-
-func SetupFilters(parseOpts *ffuf.ConfigOptions, conf *ffuf.Config) error {
-	errs := ffuf.NewMultierror()
-	// If any other matcher is set, ignore -mc default value
-	matcherSet := false
-	statusSet := false
-	warningIgnoreBody := false
-	flag.Visit(func(f *flag.Flag) {
-		if f.Name == "mc" {
-			statusSet = true
-		}
-		if f.Name == "ms" {
-			matcherSet = true
-			warningIgnoreBody = true
-		}
-		if f.Name == "ml" {
-			matcherSet = true
-			warningIgnoreBody = true
-		}
-		if f.Name == "mr" {
-			matcherSet = true
-		}
-		if f.Name == "mt" {
-			matcherSet = true
-		}
-		if f.Name == "mw" {
-			matcherSet = true
-			warningIgnoreBody = true
-		}
-	})
-	if statusSet || !matcherSet {
-		if err := AddMatcher(conf, "status", parseOpts.Matcher.Status); err != nil {
-			errs.Add(err)
-		}
+func (f *MatcherManager) FiltersForDomain(domain string) map[string]ffuf.FilterProvider {
+	if f.PerDomainFilters[domain] == nil {
+		return f.Filters
 	}
+	return f.PerDomainFilters[domain].Filters
+}
 
-	if parseOpts.Filter.Status != "" {
-		if err := AddFilter(conf, "status", parseOpts.Filter.Status); err != nil {
-			errs.Add(err)
-		}
+func (f *MatcherManager) CalibratedForDomain(domain string) bool {
+	if f.PerDomainFilters[domain] != nil {
+		return f.PerDomainFilters[domain].IsCalibrated
 	}
-	if parseOpts.Filter.Size != "" {
-		warningIgnoreBody = true
-		if err := AddFilter(conf, "size", parseOpts.Filter.Size); err != nil {
-			errs.Add(err)
-		}
-	}
-	if parseOpts.Filter.Regexp != "" {
-		if err := AddFilter(conf, "regexp", parseOpts.Filter.Regexp); err != nil {
-			errs.Add(err)
-		}
-	}
-	if parseOpts.Filter.Words != "" {
-		warningIgnoreBody = true
-		if err := AddFilter(conf, "word", parseOpts.Filter.Words); err != nil {
-			errs.Add(err)
-		}
-	}
-	if parseOpts.Filter.Lines != "" {
-		warningIgnoreBody = true
-		if err := AddFilter(conf, "line", parseOpts.Filter.Lines); err != nil {
-			errs.Add(err)
-		}
-	}
-	if parseOpts.Filter.Time != "" {
-		if err := AddFilter(conf, "time", parseOpts.Filter.Time); err != nil {
-			errs.Add(err)
-		}
-	}
-	if parseOpts.Matcher.Size != "" {
-		if err := AddMatcher(conf, "size", parseOpts.Matcher.Size); err != nil {
-			errs.Add(err)
-		}
-	}
-	if parseOpts.Matcher.Regexp != "" {
-		if err := AddMatcher(conf, "regexp", parseOpts.Matcher.Regexp); err != nil {
-			errs.Add(err)
-		}
-	}
-	if parseOpts.Matcher.Words != "" {
-		if err := AddMatcher(conf, "word", parseOpts.Matcher.Words); err != nil {
-			errs.Add(err)
-		}
-	}
-	if parseOpts.Matcher.Lines != "" {
-		if err := AddMatcher(conf, "line", parseOpts.Matcher.Lines); err != nil {
-			errs.Add(err)
-		}
-	}
-	if parseOpts.Matcher.Time != "" {
-		if err := AddFilter(conf, "time", parseOpts.Matcher.Time); err != nil {
-			errs.Add(err)
-		}
-	}
-	if conf.IgnoreBody && warningIgnoreBody {
-		fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n")
-	}
-	return errs.ErrorOrNil()
+	return false
+}
+
+func (f *MatcherManager) Calibrated() bool {
+	return f.IsCalibrated
 }
diff --git a/pkg/filter/lines.go b/pkg/filter/lines.go
index ced350b..a4df44b 100644
--- a/pkg/filter/lines.go
+++ b/pkg/filter/lines.go
@@ -6,7 +6,7 @@ import (
 	"strconv"
 	"strings"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 type LineFilter struct {
diff --git a/pkg/filter/lines_test.go b/pkg/filter/lines_test.go
index 92c36d4..5d119b1 100644
--- a/pkg/filter/lines_test.go
+++ b/pkg/filter/lines_test.go
@@ -4,7 +4,7 @@ import (
 	"strings"
 	"testing"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 func TestNewLineFilter(t *testing.T) {
diff --git a/pkg/filter/regex.go b/pkg/filter/regex.go
index 8f57025..47fbb9d 100644
--- a/pkg/filter/regex.go
+++ b/pkg/filter/regex.go
@@ -6,7 +6,7 @@ import (
 	"regexp"
 	"strings"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 type RegexpFilter struct {
diff --git a/pkg/filter/regexp_test.go b/pkg/filter/regexp_test.go
index 20033b4..e4e9efa 100644
--- a/pkg/filter/regexp_test.go
+++ b/pkg/filter/regexp_test.go
@@ -4,7 +4,7 @@ import (
 	"strings"
 	"testing"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 func TestNewRegexpFilter(t *testing.T) {
diff --git a/pkg/filter/size.go b/pkg/filter/size.go
index c940d4e..7db3e76 100644
--- a/pkg/filter/size.go
+++ b/pkg/filter/size.go
@@ -6,7 +6,7 @@ import (
 	"strconv"
 	"strings"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 type SizeFilter struct {
diff --git a/pkg/filter/size_test.go b/pkg/filter/size_test.go
index b5d7d3d..18935e7 100644
--- a/pkg/filter/size_test.go
+++ b/pkg/filter/size_test.go
@@ -4,7 +4,7 @@ import (
 	"strings"
 	"testing"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 func TestNewSizeFilter(t *testing.T) {
diff --git a/pkg/filter/status.go b/pkg/filter/status.go
index 0beb5b9..59382b1 100644
--- a/pkg/filter/status.go
+++ b/pkg/filter/status.go
@@ -6,7 +6,7 @@ import (
 	"strconv"
 	"strings"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 const AllStatuses = 0
diff --git a/pkg/filter/status_test.go b/pkg/filter/status_test.go
index 9006a5a..aaa1b52 100644
--- a/pkg/filter/status_test.go
+++ b/pkg/filter/status_test.go
@@ -4,7 +4,7 @@ import (
 	"strings"
 	"testing"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 func TestNewStatusFilter(t *testing.T) {
diff --git a/pkg/filter/time.go b/pkg/filter/time.go
index 1041708..9d27087 100755
--- a/pkg/filter/time.go
+++ b/pkg/filter/time.go
@@ -6,7 +6,7 @@ import (
 	"strconv"
 	"strings"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 type TimeFilter struct {
diff --git a/pkg/filter/time_test.go b/pkg/filter/time_test.go
index 03e1b8a..88b277a 100755
--- a/pkg/filter/time_test.go
+++ b/pkg/filter/time_test.go
@@ -4,7 +4,7 @@ import (
 	"testing"
 	"time"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 func TestNewTimeFilter(t *testing.T) {
diff --git a/pkg/filter/words.go b/pkg/filter/words.go
index 685e534..8c59234 100644
--- a/pkg/filter/words.go
+++ b/pkg/filter/words.go
@@ -6,7 +6,7 @@ import (
 	"strconv"
 	"strings"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 type WordFilter struct {
diff --git a/pkg/filter/words_test.go b/pkg/filter/words_test.go
index 3c760be..5054985 100644
--- a/pkg/filter/words_test.go
+++ b/pkg/filter/words_test.go
@@ -4,7 +4,7 @@ import (
 	"strings"
 	"testing"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 func TestNewWordFilter(t *testing.T) {
diff --git a/pkg/input/command.go b/pkg/input/command.go
index c6aa059..d230a08 100644
--- a/pkg/input/command.go
+++ b/pkg/input/command.go
@@ -6,7 +6,7 @@ import (
 	"os/exec"
 	"strconv"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 type CommandInput struct {
@@ -34,32 +34,37 @@ func NewCommandInput(keyword string, value string, conf *ffuf.Config) (*CommandI
 	return &cmd, nil
 }
 
-//Keyword returns the keyword assigned to this InternalInputProvider
+// Keyword returns the keyword assigned to this InternalInputProvider
 func (c *CommandInput) Keyword() string {
 	return c.keyword
 }
 
-//Position will return the current position in the input list
+// Position will return the current position in the input list
 func (c *CommandInput) Position() int {
 	return c.count
 }
 
-//ResetPosition will reset the current position of the InternalInputProvider
+// SetPosition will set the current position of the inputprovider
+func (c *CommandInput) SetPosition(pos int) {
+	c.count = pos
+}
+
+// ResetPosition will reset the current position of the InternalInputProvider
 func (c *CommandInput) ResetPosition() {
 	c.count = 0
 }
 
-//IncrementPosition increments the current position in the inputprovider
+// IncrementPosition increments the current position in the inputprovider
 func (c *CommandInput) IncrementPosition() {
 	c.count += 1
 }
 
-//Next will increment the cursor position, and return a boolean telling if there's iterations left
+// Next will increment the cursor position, and return a boolean telling if there's iterations left
 func (c *CommandInput) Next() bool {
 	return c.count < c.config.InputNum
 }
 
-//Value returns the input from command stdoutput
+// Value returns the input from command stdoutput
 func (c *CommandInput) Value() []byte {
 	var stdout bytes.Buffer
 	os.Setenv("FFUF_NUM", strconv.Itoa(c.count))
@@ -72,7 +77,7 @@ func (c *CommandInput) Value() []byte {
 	return stdout.Bytes()
 }
 
-//Total returns the size of wordlist
+// Total returns the size of wordlist
 func (c *CommandInput) Total() int {
 	return c.config.InputNum
 }
diff --git a/pkg/input/input.go b/pkg/input/input.go
index f5502c2..993246a 100644
--- a/pkg/input/input.go
+++ b/pkg/input/input.go
@@ -3,7 +3,7 @@ package input
 import (
 	"fmt"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 type MainInputProvider struct {
@@ -62,12 +62,21 @@ func (i *MainInputProvider) ActivateKeywords(kws []string) {
 	}
 }
 
-//Position will return the current position of progress
+// Position will return the current position of progress
 func (i *MainInputProvider) Position() int {
 	return i.position
 }
 
-//Keywords returns a slice of all keywords in the inputprovider
+// SetPosition will reset the MainInputProvider to a specific position
+func (i *MainInputProvider) SetPosition(pos int) {
+	if i.Config.InputMode == "clusterbomb" || i.Config.InputMode == "sniper" {
+		i.setclusterbombPosition(pos)
+	} else {
+		i.setpitchforkPosition(pos)
+	}
+}
+
+// Keywords returns a slice of all keywords in the inputprovider
 func (i *MainInputProvider) Keywords() []string {
 	kws := make([]string, 0)
 	for _, p := range i.Providers {
@@ -76,7 +85,7 @@ func (i *MainInputProvider) Keywords() []string {
 	return kws
 }
 
-//Next will increment the cursor position, and return a boolean telling if there's inputs left
+// Next will increment the cursor position, and return a boolean telling if there's inputs left
 func (i *MainInputProvider) Next() bool {
 	if i.position >= i.Total() {
 		return false
@@ -85,7 +94,7 @@ func (i *MainInputProvider) Next() bool {
 	return true
 }
 
-//Value returns a map of inputs for keywords
+// Value returns a map of inputs for keywords
 func (i *MainInputProvider) Value() map[string][]byte {
 	retval := make(map[string][]byte)
 	if i.Config.InputMode == "clusterbomb" || i.Config.InputMode == "sniper" {
@@ -97,7 +106,7 @@ func (i *MainInputProvider) Value() map[string][]byte {
 	return retval
 }
 
-//Reset resets all the inputproviders and counters
+// Reset resets all the inputproviders and counters
 func (i *MainInputProvider) Reset() {
 	for _, p := range i.Providers {
 		p.ResetPosition()
@@ -106,8 +115,8 @@ func (i *MainInputProvider) Reset() {
 	i.msbIterator = 0
 }
 
-//pitchforkValue returns a map of keyword:value pairs including all inputs.
-//This mode will iterate through wordlists in lockstep.
+// pitchforkValue returns a map of keyword:value pairs including all inputs.
+// This mode will iterate through wordlists in lockstep.
 func (i *MainInputProvider) pitchforkValue() map[string][]byte {
 	values := make(map[string][]byte)
 	for _, p := range i.Providers {
@@ -125,8 +134,14 @@ func (i *MainInputProvider) pitchforkValue() map[string][]byte {
 	return values
 }
 
-//clusterbombValue returns map of keyword:value pairs including all inputs.
-//this mode will iterate through all possible combinations.
+func (i *MainInputProvider) setpitchforkPosition(pos int) {
+	for _, p := range i.Providers {
+		p.SetPosition(pos)
+	}
+}
+
+// clusterbombValue returns map of keyword:value pairs including all inputs.
+// this mode will iterate through all possible combinations.
 func (i *MainInputProvider) clusterbombValue() map[string][]byte {
 	values := make(map[string][]byte)
 	// Should we signal the next InputProvider in the slice to increment
@@ -163,6 +178,18 @@ func (i *MainInputProvider) clusterbombValue() map[string][]byte {
 	return values
 }
 
+func (i *MainInputProvider) setclusterbombPosition(pos int) {
+	i.Reset()
+	if pos > i.Total() {
+		// noop
+		return
+	}
+	for i.position < pos-1 {
+		i.Next()
+		i.Value()
+	}
+}
+
 func (i *MainInputProvider) clusterbombIteratorReset() {
 	index := 0
 	for _, p := range i.Providers {
@@ -179,7 +206,7 @@ func (i *MainInputProvider) clusterbombIteratorReset() {
 	}
 }
 
-//Total returns the amount of input combinations available
+// Total returns the amount of input combinations available
 func (i *MainInputProvider) Total() int {
 	count := 0
 	if i.Config.InputMode == "pitchfork" {
@@ -204,7 +231,7 @@ func (i *MainInputProvider) Total() int {
 	return count
 }
 
-//sliceContains is a helper function that returns true if a string is included in a string slice
+// sliceContains is a helper function that returns true if a string is included in a string slice
 func sliceContains(sslice []string, str string) bool {
 	for _, v := range sslice {
 		if v == str {
diff --git a/pkg/input/wordlist.go b/pkg/input/wordlist.go
index f22dfd9..64db39e 100644
--- a/pkg/input/wordlist.go
+++ b/pkg/input/wordlist.go
@@ -6,7 +6,7 @@ import (
 	"regexp"
 	"strings"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 type WordlistInput struct {
@@ -42,57 +42,62 @@ func NewWordlistInput(keyword string, value string, conf *ffuf.Config) (*Wordlis
 	return &wl, err
 }
 
-//Position will return the current position in the input list
+// Position will return the current position in the input list
 func (w *WordlistInput) Position() int {
 	return w.position
 }
 
-//ResetPosition resets the position back to beginning of the wordlist.
+// SetPosition sets the current position of the inputprovider
+func (w *WordlistInput) SetPosition(pos int) {
+	w.position = pos
+}
+
+// ResetPosition resets the position back to beginning of the wordlist.
 func (w *WordlistInput) ResetPosition() {
 	w.position = 0
 }
 
-//Keyword returns the keyword assigned to this InternalInputProvider
+// Keyword returns the keyword assigned to this InternalInputProvider
 func (w *WordlistInput) Keyword() string {
 	return w.keyword
 }
 
-//Next will return a boolean telling if there's words left in the list
+// Next will return a boolean telling if there's words left in the list
 func (w *WordlistInput) Next() bool {
 	return w.position < len(w.data)
 }
 
-//IncrementPosition will increment the current position in the inputprovider data slice
+// IncrementPosition will increment the current position in the inputprovider data slice
 func (w *WordlistInput) IncrementPosition() {
 	w.position += 1
 }
 
-//Value returns the value from wordlist at current cursor position
+// Value returns the value from wordlist at current cursor position
 func (w *WordlistInput) Value() []byte {
 	return w.data[w.position]
 }
 
-//Total returns the size of wordlist
+// Total returns the size of wordlist
 func (w *WordlistInput) Total() int {
 	return len(w.data)
 }
 
-//Active returns boolean if the inputprovider is active
+// Active returns boolean if the inputprovider is active
 func (w *WordlistInput) Active() bool {
 	return w.active
 }
 
-//Enable sets the inputprovider as active
+// Enable sets the inputprovider as active
 func (w *WordlistInput) Enable() {
 	w.active = true
 }
 
-//Disable disables the inputprovider
+// Disable disables the inputprovider
 func (w *WordlistInput) Disable() {
 	w.active = false
 }
 
-//validFile checks that the wordlist file exists and can be read
+// validFile checks that the wordlist file exists and can be read
 func (w *WordlistInput) validFile(path string) (bool, error) {
 	_, err := os.Stat(path)
 	if err != nil {
@@ -106,7 +111,7 @@ func (w *WordlistInput) validFile(path string) (bool, error) {
 	return true, nil
 }
 
-//readFile reads the file line by line to a byte slice
+// readFile reads the file line by line to a byte slice
 func (w *WordlistInput) readFile(path string) error {
 	var file *os.File
 	var err error
diff --git a/pkg/interactive/termhandler.go b/pkg/interactive/termhandler.go
index 15a6c3a..77f5f6d 100644
--- a/pkg/interactive/termhandler.go
+++ b/pkg/interactive/termhandler.go
@@ -7,8 +7,7 @@ import (
 	"strings"
 	"time"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
-	"github.com/ffuf/ffuf/pkg/filter"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 type interactive struct {
@@ -81,7 +80,7 @@ func (i *interactive) handleInput(in []byte) {
 			} else if len(args) > 2 {
 				i.Job.Output.Error("Too many arguments for \"fc\"")
 			} else {
-				i.updateFilter("status", args[1])
+				i.updateFilter("status", args[1], true)
 				i.Job.Output.Info("New status code filter value set")
 			}
 		case "afc":
@@ -99,7 +98,7 @@ func (i *interactive) handleInput(in []byte) {
 			} else if len(args) > 2 {
 				i.Job.Output.Error("Too many arguments for \"fl\"")
 			} else {
-				i.updateFilter("line", args[1])
+				i.updateFilter("line", args[1], true)
 				i.Job.Output.Info("New line count filter value set")
 			}
 		case "afl":
@@ -117,7 +116,7 @@ func (i *interactive) handleInput(in []byte) {
 			} else if len(args) > 2 {
 				i.Job.Output.Error("Too many arguments for \"fw\"")
 			} else {
-				i.updateFilter("word", args[1])
+				i.updateFilter("word", args[1], true)
 				i.Job.Output.Info("New word count filter value set")
 			}
 		case "afw":
@@ -135,7 +134,7 @@ func (i *interactive) handleInput(in []byte) {
 			} else if len(args) > 2 {
 				i.Job.Output.Error("Too many arguments for \"fs\"")
 			} else {
-				i.updateFilter("size", args[1])
+				i.updateFilter("size", args[1], true)
 				i.Job.Output.Info("New response size filter value set")
 			}
 		case "afs":
@@ -153,7 +152,7 @@ func (i *interactive) handleInput(in []byte) {
 			} else if len(args) > 2 {
 				i.Job.Output.Error("Too many arguments for \"ft\"")
 			} else {
-				i.updateFilter("time", args[1])
+				i.updateFilter("time", args[1], true)
 				i.Job.Output.Info("New response time filter value set")
 			}
 		case "aft":
@@ -178,6 +177,20 @@ func (i *interactive) handleInput(in []byte) {
 		case "queueskip":
 			i.Job.SkipQueue()
 			i.Job.Output.Info("Skipping to the next queued job")
+		case "rate":
+			if len(args) < 2 {
+				i.Job.Output.Error("Please define the new rate")
+			} else if len(args) > 2 {
+				i.Job.Output.Error("Too many arguments for \"rate\"")
+			} else {
+				newrate, err := strconv.Atoi(args[1])
+				if err != nil {
+					i.Job.Output.Error(fmt.Sprintf("Could not adjust rate: %s", err))
+				} else {
+					i.Job.Rate.ChangeRate(newrate)
+				}
+			}
+
 		default:
 			if i.paused {
 				i.Job.Output.Warning(fmt.Sprintf("Unknown command: \"%s\". Enter \"help\" for a list of available commands", args[0]))
@@ -192,19 +205,10 @@ func (i *interactive) handleInput(in []byte) {
 	}
 }
 
-func (i *interactive) updateFilter(name, value string) {
-	if value == "none" {
-		filter.RemoveFilter(i.Job.Config, name)
-	} else {
-		newFc, err := filter.NewFilterByName(name, value)
-		if err != nil {
-			i.Job.Output.Error(fmt.Sprintf("Error while setting new filter value: %s", err))
-			return
-		} else {
-			i.Job.Config.Filters[name] = newFc
-		}
-
-		results := make([]ffuf.Result, 0)
+func (i *interactive) refreshResults() {
+	results := make([]ffuf.Result, 0)
+	filters := i.Job.Config.MatcherManager.GetFilters()
+	for _, filter := range filters {
 		for _, res := range i.Job.Output.GetCurrentResults() {
 			fakeResp := &ffuf.Response{
 				StatusCode:    res.StatusCode,
@@ -212,22 +216,26 @@ func (i *interactive) updateFilter(name, value string) {
 				ContentWords:  res.ContentWords,
 				ContentLength: res.ContentLength,
 			}
-			filterOut, _ := newFc.Filter(fakeResp)
+			filterOut, _ := filter.Filter(fakeResp)
 			if !filterOut {
 				results = append(results, res)
 			}
 		}
-		i.Job.Output.SetCurrentResults(results)
 	}
+	i.Job.Output.SetCurrentResults(results)
 }
 
-func (i *interactive) appendFilter(name, value string) {
-	if oldFc, found := i.Job.Config.Filters[name]; found {
-		oldVal := oldFc.Repr()
-		i.updateFilter(name, strings.Join([]string{oldVal, value}, ","))
+func (i *interactive) updateFilter(name, value string, replace bool) {
+	if value == "none" {
+		i.Job.Config.MatcherManager.RemoveFilter(name)
 	} else {
-		i.updateFilter(name, value)
+		_ = i.Job.Config.MatcherManager.AddFilter(name, value, replace)
 	}
+	i.refreshResults()
+}
+
+func (i *interactive) appendFilter(name, value string) {
+	i.updateFilter(name, value, false)
 }
 
 func (i *interactive) printQueue() {
@@ -270,7 +278,7 @@ func (i *interactive) printPrompt() {
 
 func (i *interactive) printHelp() {
 	var fc, fl, fs, ft, fw string
-	for name, filter := range i.Job.Config.Filters {
+	for name, filter := range i.Job.Config.MatcherManager.GetFilters() {
 		switch name {
 		case "status":
 			fc = "(active: " + filter.Repr() + ")"
@@ -284,26 +292,28 @@ func (i *interactive) printHelp() {
 			ft = "(active: " + filter.Repr() + ")"
 		}
 	}
+	rate := fmt.Sprintf("(active: %d)", i.Job.Config.Rate)
 	help := `
 available commands:
- afc [value]             - append to status code filter %s
- fc  [value]             - (re)configure status code filter %s
- afl [value]             - append to line count filter %s
- fl  [value]             - (re)configure line count filter %s
- afw [value]             - append to word count filter %s
- fw  [value]             - (re)configure word count filter %s
- afs [value]             - append to size filter %s
- fs  [value]             - (re)configure size filter %s
- aft [value]             - append to time filter %s
- ft  [value]			 - (re)configure time filter %s
- queueshow              - show job queue
- queuedel [number]      - delete a job in the queue
- queueskip              - advance to the next queued job
- restart                - restart and resume the current ffuf job
- resume                 - resume current ffuf job (or: ENTER) 
- show                   - show results for the current job
- savejson [filename]    - save current matches to a file
- help                   - you are looking at it
+ afc  [value]             - append to status code filter %s
+ fc   [value]             - (re)configure status code filter %s
+ afl  [value]             - append to line count filter %s
+ fl   [value]             - (re)configure line count filter %s
+ afw  [value]             - append to word count filter %s
+ fw   [value]             - (re)configure word count filter %s
+ afs  [value]             - append to size filter %s
+ fs   [value]             - (re)configure size filter %s
+ aft  [value]             - append to time filter %s
+ ft   [value]             - (re)configure time filter %s
+ rate [value]             - adjust rate of requests per second %s
+ queueshow                - show job queue
+ queuedel [number]        - delete a job in the queue
+ queueskip                - advance to the next queued job
+ restart                  - restart and resume the current ffuf job
+ resume                   - resume current ffuf job (or: ENTER) 
+ show                     - show results for the current job
+ savejson [filename]      - save current matches to a file
+ help                     - you are looking at it
 `
-	i.Job.Output.Raw(fmt.Sprintf(help, fc, fc, fl, fl, fw, fw, fs, fs, ft, ft))
+	i.Job.Output.Raw(fmt.Sprintf(help, fc, fc, fl, fl, fw, fw, fs, fs, ft, ft, rate))
 }
diff --git a/pkg/output/file_csv.go b/pkg/output/file_csv.go
index 3679708..8a7a767 100644
--- a/pkg/output/file_csv.go
+++ b/pkg/output/file_csv.go
@@ -6,7 +6,7 @@ import (
 	"os"
 	"strconv"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 var staticheaders = []string{"url", "redirectlocation", "position", "status_code", "content_length", "content_words", "content_lines", "content_type", "duration", "resultfile"}
diff --git a/pkg/output/file_csv_test.go b/pkg/output/file_csv_test.go
index a858ece..e1dbf71 100644
--- a/pkg/output/file_csv_test.go
+++ b/pkg/output/file_csv_test.go
@@ -5,7 +5,7 @@ import (
 	"testing"
 	"time"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 func TestToCSV(t *testing.T) {
diff --git a/pkg/output/file_html.go b/pkg/output/file_html.go
index 325a4dd..5a130d9 100644
--- a/pkg/output/file_html.go
+++ b/pkg/output/file_html.go
@@ -1,18 +1,35 @@
 package output
 
 import (
+	"html"
 	"html/template"
 	"os"
 	"time"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
+type htmlResult struct {
+	Input            map[string]string
+	Position         int
+	StatusCode       int64
+	ContentLength    int64
+	ContentWords     int64
+	ContentLines     int64
+	ContentType      string
+	RedirectLocation string
+	ScraperData      string
+	Duration         time.Duration
+	ResultFile       string
+	Url              string
+	Host             string
+}
+
 type htmlFileOutput struct {
 	CommandLine string
 	Time        string
 	Keys        []string
-	Results     []ffuf.Result
+	Results     []htmlResult
 }
 
 const (
@@ -65,7 +82,7 @@ const (
    <table id="ffufreport">
         <thead>
         <div style="display:none">
-|result_raw|StatusCode|Input|Position|ContentLength|ContentWords|ContentLines|
+|result_raw|StatusCode|Input|Position|ContentLength|ContentWords|ContentLines|ContentType|Duration|Resultfile|ScraperData|
         </div>
           <tr>
               <th>Status</th>
@@ -78,8 +95,9 @@ const (
               <th>Words</th>
 			  <th>Lines</th>
 			  <th>Type</th>
-        <th>Duration</th>
+              <th>Duration</th>
 			  <th>Resultfile</th>
+              <th>Scraper data</th>
           </tr>
         </thead>
 
@@ -100,8 +118,9 @@ const (
                     <td>{{ $result.ContentWords }}</td>
 					<td>{{ $result.ContentLines }}</td>
 					<td>{{ $result.ContentType }}</td>
-          <td>{{ $result.Duration }}</td>
+					<td>{{ $result.Duration }}</td>
                     <td>{{ $result.ResultFile }}</td>
+					<td>{{ $result.ScraperData }}
                 </tr>
             {{ end }}
         </tbody>
@@ -187,11 +206,49 @@ func writeHTML(filename string, config *ffuf.Config, results []ffuf.Result) erro
 	for _, inputprovider := range config.InputProviders {
 		keywords = append(keywords, inputprovider.Keyword)
 	}
+	htmlResults := make([]htmlResult, 0)
 
+	for _, r := range results {
+		strinput := make(map[string]string)
+		for k, v := range r.Input {
+			strinput[k] = string(v)
+		}
+		strscraper := ""
+		for k, v := range r.ScraperData {
+			if len(v) > 0 {
+				strscraper = strscraper + "<p><b>" + html.EscapeString(k) + ":</b><br />"
+				firstval := true
+				for _, val := range v {
+					if !firstval {
+						strscraper += "<br />"
+					}
+					strscraper += html.EscapeString(val)
+					firstval = false
+				}
+				strscraper += "</p>"
+			}
+		}
+		hres := htmlResult{
+			Input:            strinput,
+			Position:         r.Position,
+			StatusCode:       r.StatusCode,
+			ContentLength:    r.ContentLength,
+			ContentWords:     r.ContentWords,
+			ContentLines:     r.ContentLines,
+			ContentType:      r.ContentType,
+			RedirectLocation: r.RedirectLocation,
+			ScraperData:      strscraper,
+			Duration:         r.Duration,
+			ResultFile:       r.ResultFile,
+			Url:              r.Url,
+			Host:             r.Host,
+		}
+		htmlResults = append(htmlResults, hres)
+	}
 	outHTML := htmlFileOutput{
 		CommandLine: config.CommandLine,
 		Time:        ti.Format(time.RFC3339),
-		Results:     results,
+		Results:     htmlResults,
 		Keys:        keywords,
 	}
 
diff --git a/pkg/output/file_json.go b/pkg/output/file_json.go
index 61f5cc9..c0cb29c 100644
--- a/pkg/output/file_json.go
+++ b/pkg/output/file_json.go
@@ -2,10 +2,10 @@ package output
 
 import (
 	"encoding/json"
-	"io/ioutil"
+	"os"
 	"time"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 type ejsonFileOutput struct {
@@ -16,18 +16,19 @@ type ejsonFileOutput struct {
 }
 
 type JsonResult struct {
-	Input            map[string]string `json:"input"`
-	Position         int               `json:"position"`
-	StatusCode       int64             `json:"status"`
-	ContentLength    int64             `json:"length"`
-	ContentWords     int64             `json:"words"`
-	ContentLines     int64             `json:"lines"`
-	ContentType      string            `json:"content-type"`
-	RedirectLocation string            `json:"redirectlocation"`
-	Duration         time.Duration     `json:"duration"`
-	ResultFile       string            `json:"resultfile"`
-	Url              string            `json:"url"`
-	Host             string            `json:"host"`
+	Input            map[string]string   `json:"input"`
+	Position         int                 `json:"position"`
+	StatusCode       int64               `json:"status"`
+	ContentLength    int64               `json:"length"`
+	ContentWords     int64               `json:"words"`
+	ContentLines     int64               `json:"lines"`
+	ContentType      string              `json:"content-type"`
+	RedirectLocation string              `json:"redirectlocation"`
+	ScraperData      map[string][]string `json:"scraper"`
+	Duration         time.Duration       `json:"duration"`
+	ResultFile       string              `json:"resultfile"`
+	Url              string              `json:"url"`
+	Host             string              `json:"host"`
 }
 
 type jsonFileOutput struct {
@@ -49,7 +50,7 @@ func writeEJSON(filename string, config *ffuf.Config, res []ffuf.Result) error {
 	if err != nil {
 		return err
 	}
-	err = ioutil.WriteFile(filename, outBytes, 0644)
+	err = os.WriteFile(filename, outBytes, 0644)
 	if err != nil {
 		return err
 	}
@@ -73,6 +74,7 @@ func writeJSON(filename string, config *ffuf.Config, res []ffuf.Result) error {
 			ContentLines:     r.ContentLines,
 			ContentType:      r.ContentType,
 			RedirectLocation: r.RedirectLocation,
+			ScraperData:      r.ScraperData,
 			Duration:         r.Duration,
 			ResultFile:       r.ResultFile,
 			Url:              r.Url,
@@ -89,7 +91,7 @@ func writeJSON(filename string, config *ffuf.Config, res []ffuf.Result) error {
 	if err != nil {
 		return err
 	}
-	err = ioutil.WriteFile(filename, outBytes, 0644)
+	err = os.WriteFile(filename, outBytes, 0644)
 	if err != nil {
 		return err
 	}
diff --git a/pkg/output/file_md.go b/pkg/output/file_md.go
index a9186aa..b0e8a37 100644
--- a/pkg/output/file_md.go
+++ b/pkg/output/file_md.go
@@ -5,7 +5,7 @@ import (
 	"os"
 	"time"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 const (
@@ -14,13 +14,13 @@ const (
   Command line : ` + "`{{.CommandLine}}`" + `
   Time: ` + "{{ .Time }}" + `
 
-  {{ range .Keys }}| {{ . }} {{ end }}| URL | Redirectlocation | Position | Status Code | Content Length | Content Words | Content Lines | Content Type | Duration | ResultFile |
-  {{ range .Keys }}| :- {{ end }}| :-- | :--------------- | :---- | :------- | :---------- | :------------- | :------------ | :--------- | :----------- |
-  {{range .Results}}{{ range $keyword, $value := .Input }}| {{ $value | printf "%s" }} {{ end }}| {{ .Url }} | {{ .RedirectLocation }} | {{ .Position }} | {{ .StatusCode }} | {{ .ContentLength }} | {{ .ContentWords }} | {{ .ContentLines }} | {{ .ContentType }} | {{ .Duration}} | {{ .ResultFile }} |
+  {{ range .Keys }}| {{ . }} {{ end }}| URL | Redirectlocation | Position | Status Code | Content Length | Content Words | Content Lines | Content Type | Duration | ResultFile | ScraperData
+  {{ range .Keys }}| :- {{ end }}| :-- | :--------------- | :---- | :------- | :---------- | :------------- | :------------ | :--------- | :----------- | :------------ |
+  {{range .Results}}{{ range $keyword, $value := .Input }}| {{ $value | printf "%s" }} {{ end }}| {{ .Url }} | {{ .RedirectLocation }} | {{ .Position }} | {{ .StatusCode }} | {{ .ContentLength }} | {{ .ContentWords }} | {{ .ContentLines }} | {{ .ContentType }} | {{ .Duration}} | {{ .ResultFile }} | {{ .ScraperData }} |
   {{end}}` // The template format is not pretty but follows the markdown guide
 )
 
-func writeMarkdown(filename string, config *ffuf.Config, res []ffuf.Result) error {
+func writeMarkdown(filename string, config *ffuf.Config, results []ffuf.Result) error {
 	ti := time.Now()
 
 	keywords := make([]string, 0)
@@ -28,10 +28,50 @@ func writeMarkdown(filename string, config *ffuf.Config, res []ffuf.Result) erro
 		keywords = append(keywords, inputprovider.Keyword)
 	}
 
+	htmlResults := make([]htmlResult, 0)
+
+	for _, r := range results {
+		strinput := make(map[string]string)
+		for k, v := range r.Input {
+			strinput[k] = string(v)
+		}
+		strscraper := ""
+		for k, v := range r.ScraperData {
+			if len(v) > 0 {
+				strscraper = strscraper + "<p><b>" + k + ":</b><br />"
+				firstval := true
+				for _, val := range v {
+					if !firstval {
+						strscraper += "<br />"
+					}
+					strscraper += val
+					firstval = false
+				}
+				strscraper += "</p>"
+			}
+		}
+		hres := htmlResult{
+			Input:            strinput,
+			Position:         r.Position,
+			StatusCode:       r.StatusCode,
+			ContentLength:    r.ContentLength,
+			ContentWords:     r.ContentWords,
+			ContentLines:     r.ContentLines,
+			ContentType:      r.ContentType,
+			RedirectLocation: r.RedirectLocation,
+			ScraperData:      strscraper,
+			Duration:         r.Duration,
+			ResultFile:       r.ResultFile,
+			Url:              r.Url,
+			Host:             r.Host,
+		}
+		htmlResults = append(htmlResults, hres)
+	}
+
 	outMD := htmlFileOutput{
 		CommandLine: config.CommandLine,
 		Time:        ti.Format(time.RFC3339),
-		Results:     res,
+		Results:     htmlResults,
 		Keys:        keywords,
 	}
 
diff --git a/pkg/output/output.go b/pkg/output/output.go
index a1e0149..a87f096 100644
--- a/pkg/output/output.go
+++ b/pkg/output/output.go
@@ -1,7 +1,7 @@
 package output
 
 import (
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 func NewOutputProviderByName(name string, conf *ffuf.Config) ffuf.OutputProvider {
diff --git a/pkg/output/stdout.go b/pkg/output/stdout.go
index 588decb..0b11c0a 100644
--- a/pkg/output/stdout.go
+++ b/pkg/output/stdout.go
@@ -4,14 +4,14 @@ import (
 	"crypto/md5"
 	"encoding/json"
 	"fmt"
-	"io/ioutil"
 	"os"
 	"path"
+	"sort"
 	"strconv"
 	"strings"
 	"time"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 const (
@@ -28,6 +28,7 @@ const (
 
 type Stdoutput struct {
 	config         *ffuf.Config
+	fuzzkeywords   []string
 	Results        []ffuf.Result
 	CurrentResults []ffuf.Result
 }
@@ -37,6 +38,11 @@ func NewStdoutput(conf *ffuf.Config) *Stdoutput {
 	outp.config = conf
 	outp.Results = make([]ffuf.Result, 0)
 	outp.CurrentResults = make([]ffuf.Result, 0)
+	outp.fuzzkeywords = make([]string, 0)
+	for _, ip := range conf.InputProviders {
+		outp.fuzzkeywords = append(outp.fuzzkeywords, ip.Keyword)
+	}
+	sort.Strings(outp.fuzzkeywords)
 	return &outp
 }
 
@@ -124,11 +130,11 @@ func (s *Stdoutput) Banner() {
 	}
 
 	// Print matchers
-	for _, f := range s.config.Matchers {
+	for _, f := range s.config.MatcherManager.GetMatchers() {
 		printOption([]byte("Matcher"), []byte(f.ReprVerbose()))
 	}
 	// Print filters
-	for _, f := range s.config.Filters {
+	for _, f := range s.config.MatcherManager.GetFilters() {
 		printOption([]byte("Filter"), []byte(f.ReprVerbose()))
 	}
 	fmt.Fprintf(os.Stderr, "%s\n\n", BANNER_SEP)
@@ -324,6 +330,7 @@ func (s *Stdoutput) Result(resp ffuf.Response) {
 		ContentLines:     resp.ContentLines,
 		ContentType:      resp.ContentType,
 		RedirectLocation: resp.GetRedirectLocation(false),
+		ScraperData:      resp.ScraperData,
 		Url:              resp.Request.Url,
 		Duration:         resp.Time,
 		ResultFile:       resp.ResultFile,
@@ -352,7 +359,7 @@ func (s *Stdoutput) writeResultToFile(resp ffuf.Response) string {
 	fileName = fmt.Sprintf("%x", md5.Sum([]byte(fileContent)))
 
 	filePath = path.Join(s.config.OutputDirectory, fileName)
-	err := ioutil.WriteFile(filePath, []byte(fileContent), 0640)
+	err := os.WriteFile(filePath, []byte(fileContent), 0640)
 	if err != nil {
 		s.Error(err.Error())
 	}
@@ -361,11 +368,11 @@ func (s *Stdoutput) writeResultToFile(resp ffuf.Response) string {
 
 func (s *Stdoutput) PrintResult(res ffuf.Result) {
 	switch {
-	case s.config.Quiet:
-		s.resultQuiet(res)
 	case s.config.Json:
 		s.resultJson(res)
-	case len(res.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0:
+	case s.config.Quiet:
+		s.resultQuiet(res)
+	case len(res.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0 || len(res.ScraperData) > 0:
 		// Print a multi-line result (when using multiple input keywords and wordlists)
 		s.resultMultiline(res)
 	default:
@@ -377,7 +384,7 @@ func (s *Stdoutput) prepareInputsOneLine(res ffuf.Result) string {
 	inputs := ""
 	if len(res.Input) > 1 {
 		for k, v := range res.Input {
-			if inSlice(k, s.config.CommandKeywords) {
+			if ffuf.StrInSlice(k, s.config.CommandKeywords) {
 				// If we're using external command for input, display the position instead of input
 				inputs = fmt.Sprintf("%s%s : %s ", inputs, k, strconv.Itoa(res.Position))
 			} else {
@@ -386,7 +393,7 @@ func (s *Stdoutput) prepareInputsOneLine(res ffuf.Result) string {
 		}
 	} else {
 		for k, v := range res.Input {
-			if inSlice(k, s.config.CommandKeywords) {
+			if ffuf.StrInSlice(k, s.config.CommandKeywords) {
 				// If we're using external command for input, display the position instead of input
 				inputs = strconv.Itoa(res.Position)
 			} else {
@@ -416,13 +423,21 @@ func (s *Stdoutput) resultMultiline(res ffuf.Result) {
 	if res.ResultFile != "" {
 		reslines = fmt.Sprintf("%s%s| RES | %s\n", reslines, TERMINAL_CLEAR_LINE, res.ResultFile)
 	}
-	for k, v := range res.Input {
-		if inSlice(k, s.config.CommandKeywords) {
+	for _, k := range s.fuzzkeywords {
+		if ffuf.StrInSlice(k, s.config.CommandKeywords) {
 			// If we're using external command for input, display the position instead of input
 			reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, strconv.Itoa(res.Position))
 		} else {
 			// Wordlist input
-			reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, v)
+			reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, res.Input[k])
+		}
+	}
+	if len(res.ScraperData) > 0 {
+		reslines = fmt.Sprintf("%s%s| SCR |\n", reslines, TERMINAL_CLEAR_LINE)
+		for k, vslice := range res.ScraperData {
+			for _, v := range vslice {
+				reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, v)
+			}
 		}
 	}
 	fmt.Printf("%s\n%s\n", res_hdr, reslines)
@@ -466,12 +481,3 @@ func (s *Stdoutput) colorize(status int64) string {
 func printOption(name []byte, value []byte) {
 	fmt.Fprintf(os.Stderr, " :: %-16s : %s\n", name, value)
 }
-
-func inSlice(key string, slice []string) bool {
-	for _, v := range slice {
-		if v == key {
-			return true
-		}
-	}
-	return false
-}
diff --git a/pkg/runner/runner.go b/pkg/runner/runner.go
index 092d1e0..84abd1e 100644
--- a/pkg/runner/runner.go
+++ b/pkg/runner/runner.go
@@ -1,7 +1,7 @@
 package runner
 
 import (
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
 func NewRunnerByName(name string, conf *ffuf.Config, replay bool) ffuf.RunnerProvider {
diff --git a/pkg/runner/simple.go b/pkg/runner/simple.go
index 76ba525..8dd90b8 100644
--- a/pkg/runner/simple.go
+++ b/pkg/runner/simple.go
@@ -2,9 +2,10 @@ package runner
 
 import (
 	"bytes"
+	"compress/gzip"
 	"crypto/tls"
 	"fmt"
-	"io/ioutil"
+	"io"
 	"net"
 	"net/http"
 	"net/http/httptrace"
@@ -15,10 +16,10 @@ import (
 	"strings"
 	"time"
 
-	"github.com/ffuf/ffuf/pkg/ffuf"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
 )
 
-//Download results < 5MB
+// Download results < 5MB
 const MAX_DOWNLOAD_SIZE = 5242880
 
 type SimpleRunner struct {
@@ -47,7 +48,7 @@ func NewSimpleRunner(conf *ffuf.Config, replay bool) ffuf.RunnerProvider {
 		CheckRedirect: func(req *http.Request, via []*http.Request) error { return http.ErrUseLastResponse },
 		Timeout:       time.Duration(time.Duration(conf.Timeout) * time.Second),
 		Transport: &http.Transport{
-			ForceAttemptHTTP2: conf.Http2,
+			ForceAttemptHTTP2:   conf.Http2,
 			Proxy:               proxyURL,
 			MaxIdleConns:        1000,
 			MaxIdleConnsPerHost: 500,
@@ -131,7 +132,6 @@ func (r *SimpleRunner) Execute(req *ffuf.Request) (ffuf.Response, error) {
 	if len(r.config.OutputDirectory) > 0 {
 		rawreq, _ = httputil.DumpRequestOut(httpreq, true)
 	}
-
 	httpresp, err := r.client.Do(httpreq)
 	if err != nil {
 		return ffuf.Response{}, err
@@ -155,8 +155,18 @@ func (r *SimpleRunner) Execute(req *ffuf.Request) (ffuf.Response, error) {
 		resp.Request.Raw = string(rawreq)
 		resp.Raw = string(rawresp)
 	}
+	var bodyReader io.ReadCloser
+	if httpresp.Header.Get("Content-Encoding") == "gzip" {
+		bodyReader, err = gzip.NewReader(httpresp.Body)
+		if err != nil {
+			// fallback to raw data
+			bodyReader = httpresp.Body
+		}
+	} else {
+		bodyReader = httpresp.Body
+	}
 
-	if respbody, err := ioutil.ReadAll(httpresp.Body); err == nil {
+	if respbody, err := io.ReadAll(bodyReader); err == nil {
 		resp.ContentLength = int64(len(string(respbody)))
 		resp.Data = respbody
 	}
@@ -166,6 +176,31 @@ func (r *SimpleRunner) Execute(req *ffuf.Request) (ffuf.Response, error) {
 	resp.ContentWords = int64(wordsSize)
 	resp.ContentLines = int64(linesSize)
 	resp.Time = firstByteTime
-
 	return resp, nil
 }
+
+func (r *SimpleRunner) Dump(req *ffuf.Request) ([]byte, error) {
+	var httpreq *http.Request
+	var err error
+	data := bytes.NewReader(req.Data)
+	httpreq, err = http.NewRequestWithContext(r.config.Context, req.Method, req.Url, data)
+	if err != nil {
+		return []byte{}, err
+	}
+
+	// set default User-Agent header if not present
+	if _, ok := req.Headers["User-Agent"]; !ok {
+		req.Headers["User-Agent"] = fmt.Sprintf("%s v%s", "Fuzz Faster U Fool", ffuf.Version())
+	}
+
+	// Handle Go http.Request special cases
+	if _, ok := req.Headers["Host"]; ok {
+		httpreq.Host = req.Headers["Host"]
+	}
+
+	req.Host = httpreq.Host
+	for k, v := range req.Headers {
+		httpreq.Header.Set(k, v)
+	}
+	return httputil.DumpRequestOut(httpreq, true)
+}
diff --git a/pkg/scraper/scraper.go b/pkg/scraper/scraper.go
new file mode 100644
index 0000000..eda4402
--- /dev/null
+++ b/pkg/scraper/scraper.go
@@ -0,0 +1,168 @@
+package scraper
+
+import (
+	"encoding/json"
+	"fmt"
+	"os"
+	"path/filepath"
+	"regexp"
+	"strings"
+
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
+
+	"github.com/PuerkitoBio/goquery"
+)
+
+type ScraperRule struct {
+	Name         string `json:"name"`
+	Rule         string `json:"rule"`
+	Target       string `json:"target"`
+	compiledRule *regexp.Regexp
+	Type         string   `json:"type"`
+	OnlyMatched  bool     `json:"onlymatched"`
+	Action       []string `json:"action"`
+}
+
+type ScraperGroup struct {
+	Rules  []*ScraperRule `json:"rules"`
+	Name   string         `json:"groupname"`
+	Active bool           `json:"active"`
+}
+
+type Scraper struct {
+	Rules []*ScraperRule
+}
+
+func readGroupFromFile(filename string) (ScraperGroup, error) {
+	data, err := os.ReadFile(filename)
+	if err != nil {
+		return ScraperGroup{Rules: make([]*ScraperRule, 0)}, err
+	}
+	sc := ScraperGroup{}
+	err = json.Unmarshal([]byte(data), &sc)
+	return sc, err
+}
+
+func FromDir(dirname string, activestr string) (ffuf.Scraper, ffuf.Multierror) {
+	scr := Scraper{Rules: make([]*ScraperRule, 0)}
+	errs := ffuf.NewMultierror()
+	activegrps := parseActiveGroups(activestr)
+	all_files, err := os.ReadDir(ffuf.SCRAPERDIR)
+	if err != nil {
+		errs.Add(err)
+		return &scr, errs
+	}
+	for _, filename := range all_files {
+		if filename.Type().IsRegular() && strings.HasSuffix(filename.Name(), ".json") {
+			sg, err := readGroupFromFile(filepath.Join(dirname, filename.Name()))
+			if err != nil {
+				cerr := fmt.Errorf("%s : %s", filepath.Join(dirname, filename.Name()), err)
+				errs.Add(cerr)
+				continue
+			}
+			if (sg.Active && isActive("all", activegrps)) || isActive(sg.Name, activegrps) {
+				for _, r := range sg.Rules {
+					err = r.init()
+					if err != nil {
+						cerr := fmt.Errorf("%s : %s", filepath.Join(dirname, filename.Name()), err)
+						errs.Add(cerr)
+						continue
+					}
+					scr.Rules = append(scr.Rules, r)
+				}
+			}
+		}
+	}
+	return &scr, errs
+}
+
+// FromFile initializes a scraper instance and reads rules from a file
+func (s *Scraper) AppendFromFile(path string) error {
+	sg, err := readGroupFromFile(path)
+	if err != nil {
+		return err
+	}
+
+	for _, r := range sg.Rules {
+		err = r.init()
+		if err != nil {
+			continue
+		}
+		s.Rules = append(s.Rules, r)
+	}
+
+	return err
+}
+
+func (s *Scraper) Execute(resp *ffuf.Response, matched bool) []ffuf.ScraperResult {
+	res := make([]ffuf.ScraperResult, 0)
+	for _, rule := range s.Rules {
+		if !matched && rule.OnlyMatched {
+			// pass this rule as there was no match
+			continue
+		}
+		sourceData := ""
+		if rule.Target == "body" {
+			sourceData = string(resp.Data)
+		} else if rule.Target == "headers" {
+			sourceData = headerString(resp.Headers)
+		} else {
+			sourceData = headerString(resp.Headers) + string(resp.Data)
+		}
+		val := rule.Check(sourceData)
+		if len(val) > 0 {
+			res = append(res, ffuf.ScraperResult{
+				Name:    rule.Name,
+				Type:    rule.Type,
+				Action:  rule.Action,
+				Results: val,
+			})
+		}
+	}
+	return res
+}
+
+// init initializes the scraper rule, and returns an error in case there's an error in the syntax
+func (r *ScraperRule) init() error {
+	var err error
+	if r.Type == "regexp" {
+		r.compiledRule, err = regexp.Compile(r.Rule)
+		if err != nil {
+			return err
+		}
+	}
+	return err
+}
+
+func (r *ScraperRule) Check(data string) []string {
+	if r.Type == "regexp" {
+		return r.checkRegexp(data)
+	} else if r.Type == "query" {
+		return r.checkQuery(data)
+	}
+	return []string{}
+}
+
+func (r *ScraperRule) checkQuery(data string) []string {
+	val := make([]string, 0)
+	doc, err := goquery.NewDocumentFromReader(strings.NewReader(data))
+	if err != nil {
+		return []string{}
+	}
+	doc.Find(r.Rule).Each(func(i int, sel *goquery.Selection) {
+		val = append(val, sel.Text())
+	})
+	return val
+}
+
+func (r *ScraperRule) checkRegexp(data string) []string {
+	val := make([]string, 0)
+	if r.compiledRule != nil {
+		res := r.compiledRule.FindAllStringSubmatch(data, -1)
+		for _, grp := range res {
+			val = append(val, grp...)
+		}
+		return val
+	}
+	return []string{}
+}
diff --git a/pkg/scraper/util.go b/pkg/scraper/util.go
new file mode 100644
index 0000000..e93adb7
--- /dev/null
+++ b/pkg/scraper/util.go
@@ -0,0 +1,29 @@
+package scraper
+
+import (
+	"fmt"
+	"github.com/ffuf/ffuf/v2/pkg/ffuf"
+	"strings"
+)
+
+func headerString(headers map[string][]string) string {
+	val := ""
+	for k, vslice := range headers {
+		for _, v := range vslice {
+			val += fmt.Sprintf("%s: %s\n", k, v)
+		}
+	}
+	return val
+}
+
+func isActive(name string, activegroups []string) bool {
+	return ffuf.StrInSlice(strings.ToLower(strings.TrimSpace(name)), activegroups)
+}
+
+func parseActiveGroups(activestr string) []string {
+	retslice := make([]string, 0)
+	for _, v := range strings.Split(activestr, ",") {
+		retslice = append(retslice, strings.ToLower(strings.TrimSpace(v)))
+	}
+	return retslice
+}

More details

Full run details

Historical runs