Skip to content

Commit a44e39e

Browse files
devploitclaude
andcommitted
Improve reliability, output quality, and fix request-file query string bug
## Error Handling (graceful degradation) - Replace all log.Fatalf calls with log.Printf + return so a single technique failure no longer kills the entire scan - Fix malformed error string log.Fatalf("{#err}") in response body close - Rate limit (HTTP 429) now returns ErrRateLimited instead of crashing, allowing remaining techniques to continue - loadFlagsFromRequestFile no longer crashes on invalid/empty files ## Concurrency Safety - Replace global _verbose and defaultCl with sync/atomic operations (atomicVerbose, atomicDefaultCl, atomicDefaultSc, atomicCalibTolerance) - Add thread-safe getter/setter functions for all shared state ## Request Robustness - Add requestWithRetry() with up to 2 retries and exponential backoff (500ms, 1s) for transient errors (timeouts, connection refused, EOF) - Add isTransientError() to classify retryable vs permanent errors - Check curl availability with exec.LookPath before HTTP versions technique - Rate limiting retries with backoff instead of crashing ## Input Validation - Add validateURI() checking scheme (http/https), host, and format - Called at start of requester() before any requests are made - Bounds checking on SplitAfterN in loadFlagsFromRequestFile - Proxy parsing falls back to empty proxy instead of crashing ## Auto-Calibration (multi-sample with tolerance) - runAutocalibrate() now sends 3 requests with different paths - Calculates average content-length and dynamic tolerance range - isCalibrationMatch() uses Β±max(50, 2*deviation) instead of exact match - Reduces false positives on servers with dynamic content (CSRF tokens, etc.) ## Smart Output Filtering (non-verbose mode) - Filter results matching default response signature (same status + similar CL) - Per-technique dedup by status+CL group: show 3 examples + summary count - "... and N more with STATUS/CL bytes (use -v to see all)" message - Verbose mode (-v) remains unchanged, showing everything ## Progress Bar - Inline progress bar on stderr for each technique (e.g. β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–‘β–‘β–‘ 48% (267/544)) - TTY detection: only renders on interactive terminals, invisible in pipes/redirects - Coordinates with printMutex to avoid overlapping with result output - Clears before each result print, redraws after ## UI Improvements - Compact two-column configuration banner (8 lines vs 16) - Dim labels + bold values, "-" for unset fields, flags on one line - Content-length colored by divergence from default response: green (>2x larger), cyan (1.2-2x), blue (similar), yellow (smaller), red (<50%) ## Bug Fix: --request-file query string (fixes #48) - loadFlagsFromRequestFile no longer strips query parameters from URL - Was: strings.Split(req.RequestURI, "?")[0] β€” now uses req.RequestURI directly - URLs like /upload?action=get&user_id=123 are now processed in full ## Tests - 17 new tests with httptest.Server covering all bypass techniques - Tests for: verb tampering, headers, endpaths, midpaths, double-encoding, path case switching, verb case switching, rate limiting, missing payloads, URI validation, calibration tolerance, transient errors, auto-calibration, request file query string preservation Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent a43243f commit a44e39e

7 files changed

Lines changed: 1401 additions & 166 deletions

File tree

β€Žcmd/api.goβ€Ž

Lines changed: 142 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ import (
44
"bufio"
55
"crypto/tls"
66
"fmt"
7-
"io"
87
"log"
98
"net"
109
"net/http"
@@ -17,6 +16,9 @@ import (
1716
"github.com/fatih/color"
1817
)
1918

19+
// ErrRateLimited is returned when the server responds with HTTP 429.
20+
var ErrRateLimited = fmt.Errorf("rate limited (HTTP 429)")
21+
2022
// parseFile reads a file given its filename and returns a list containing each of its lines.
2123
func parseFile(filename string) ([]string, error) {
2224
file, err := os.Open(filename)
@@ -51,8 +53,69 @@ type header struct {
5153
value string
5254
}
5355

54-
// request makes an HTTP request using headers `headers` and proxy `proxy`.
55-
func request(method, uri string, headers []header, proxy *url.URL, rateLimit bool, timeout int, redirect bool) (int, []byte, error) {
56+
// requestWithRetry makes an HTTP request with retry logic and exponential backoff.
57+
// It retries up to maxRetries times on transient errors (timeouts, connection errors).
58+
// On HTTP 429, it retries with backoff if rateLimit is false; returns ErrRateLimited if rateLimit is true.
59+
func requestWithRetry(method, uri string, headers []header, proxy *url.URL, rateLimit bool, timeout int, redirect bool) (int, []byte, error) {
60+
const maxRetries = 2
61+
var lastErr error
62+
63+
for attempt := 0; attempt <= maxRetries; attempt++ {
64+
if attempt > 0 {
65+
backoff := time.Duration(1<<uint(attempt)) * 500 * time.Millisecond
66+
time.Sleep(backoff)
67+
}
68+
69+
statusCode, resp, err := request(method, uri, headers, proxy, timeout, redirect)
70+
if err == nil {
71+
// Handle rate limiting
72+
if statusCode == 429 {
73+
if rateLimit {
74+
return statusCode, resp, ErrRateLimited
75+
}
76+
lastErr = fmt.Errorf("HTTP 429 rate limited on attempt %d", attempt+1)
77+
continue
78+
}
79+
return statusCode, resp, nil
80+
}
81+
82+
lastErr = err
83+
// Only retry on transient errors (timeouts, connection refused, etc.)
84+
if !isTransientError(err) {
85+
return 0, nil, err
86+
}
87+
if attempt < maxRetries {
88+
log.Printf("[!] Transient error (attempt %d/%d): %v", attempt+1, maxRetries+1, err)
89+
}
90+
}
91+
92+
return 0, nil, fmt.Errorf("request failed after %d attempts: %w", maxRetries+1, lastErr)
93+
}
94+
95+
// isTransientError returns true for errors that are likely transient and worth retrying.
96+
func isTransientError(err error) bool {
97+
if err == nil {
98+
return false
99+
}
100+
errStr := err.Error()
101+
transientPatterns := []string{
102+
"timeout",
103+
"connection refused",
104+
"connection reset",
105+
"EOF",
106+
"temporary failure",
107+
"no such host", // DNS can be transient
108+
}
109+
for _, pattern := range transientPatterns {
110+
if strings.Contains(strings.ToLower(errStr), strings.ToLower(pattern)) {
111+
return true
112+
}
113+
}
114+
return false
115+
}
116+
117+
// request makes a single HTTP request using headers `headers` and proxy `proxy`.
118+
func request(method, uri string, headers []header, proxy *url.URL, timeout int, redirect bool) (int, []byte, error) {
56119
if method == "" {
57120
method = "GET"
58121
}
@@ -89,7 +152,6 @@ func request(method, uri string, headers []header, proxy *url.URL, rateLimit boo
89152
}
90153
}
91154

92-
// Use raw URL parser
93155
parsedURL, err := url.Parse(uri)
94156
if err != nil || parsedURL == nil || parsedURL.Scheme == "" || parsedURL.Host == "" {
95157
return 0, nil, fmt.Errorf("invalid URL: %q", uri)
@@ -113,22 +175,17 @@ func request(method, uri string, headers []header, proxy *url.URL, rateLimit boo
113175
if err != nil {
114176
return 0, nil, err
115177
}
116-
defer func(Body io.ReadCloser) {
117-
err := Body.Close()
118-
if err != nil {
119-
log.Fatalf("{#err}")
178+
defer func() {
179+
if cerr := res.Body.Close(); cerr != nil {
180+
log.Printf("[!] Error closing response body: %v", cerr)
120181
}
121-
}(res.Body)
182+
}()
122183

123184
resp, err := httputil.DumpResponse(res, true)
124185
if err != nil {
125186
return 0, nil, err
126187
}
127188

128-
if rateLimit && res.StatusCode == 429 {
129-
log.Fatalf("Rate limit detected (HTTP 429). Exiting...")
130-
}
131-
132189
return res.StatusCode, resp, nil
133190
}
134191

@@ -137,62 +194,107 @@ func loadFlagsFromRequestFile(requestFile string, schema bool, verbose bool, tec
137194
// Read the content of the request file
138195
content, err := os.ReadFile(requestFile)
139196
if err != nil {
140-
log.Fatalf("Error reading request file: %v", err)
197+
log.Printf("[!] Error reading request file: %v", err)
198+
return
141199
}
142-
//Down HTTP/2 to HTTP/1.1
200+
143201
temp := strings.Split(string(content), "\n")
144-
fistLine := strings.Replace(temp[0], "HTTP/2", "HTTP/1.1", 1)
145-
content = []byte(strings.Join(append([]string{fistLine}, temp[1:]...), "\n"))
202+
if len(temp) == 0 {
203+
log.Printf("[!] Request file is empty: %s", requestFile)
204+
return
205+
}
206+
207+
// Down HTTP/2 to HTTP/1.1
208+
firstLine := strings.Replace(temp[0], "HTTP/2", "HTTP/1.1", 1)
209+
content = []byte(strings.Join(append([]string{firstLine}, temp[1:]...), "\n"))
146210

147211
reqReader := strings.NewReader(string(content))
148212
req, err := http.ReadRequest(bufio.NewReader(reqReader))
149213
if err != nil {
150-
log.Fatalf("Error parsing request: %v", err)
214+
log.Printf("[!] Error parsing request file: %v", err)
215+
return
151216
}
217+
152218
if strings.HasPrefix(req.RequestURI, "http://") {
153-
req.RequestURI = "/" + strings.SplitAfterN(req.RequestURI, "/", 4)[3]
219+
parts := strings.SplitAfterN(req.RequestURI, "/", 4)
220+
if len(parts) >= 4 {
221+
req.RequestURI = "/" + parts[3]
222+
}
154223
}
155224

156225
httpSchema := "https://"
157-
158226
if schema {
159227
httpSchema = "http://"
160228
}
161229

162-
uri := httpSchema + req.Host + strings.Split(req.RequestURI, "?")[0]
230+
uri := httpSchema + req.Host + req.RequestURI
163231

164-
// Extract headers from the request and assign them to the req_headers slice
232+
// Extract headers from the request
165233
var reqHeaders []string
166-
// Append req.Header to reqHeaders
167234
for k, v := range req.Header {
168235
reqHeaders = append(reqHeaders, k+": "+strings.Join(v, ""))
169236
}
170237
httpMethod := req.Method
171-
// Assign the extracted values to the corresponding flag variables
172238
requester(uri, proxy, userAgent, reqHeaders, bypassIP, folder, httpMethod, verbose, techniques, nobanner, rateLimit, timeout, redirect, randomAgent)
173239
}
174240

175-
func runAutocalibrate(options RequestOptions) int {
176-
calibrationURI := options.uri
177-
if !strings.HasSuffix(calibrationURI, "/") {
178-
calibrationURI += "/"
241+
// calibrationTolerance defines the acceptable variance in content-length between calibration samples.
242+
const calibrationTolerance = 50
243+
244+
func runAutocalibrate(options RequestOptions) (int, int) {
245+
calibrationPaths := []string{"calibration_test_123456", "calib_nonexist_789xyz", "zz_calibrate_000"}
246+
var samples []int
247+
248+
baseURI := options.uri
249+
if !strings.HasSuffix(baseURI, "/") {
250+
baseURI += "/"
179251
}
180-
calibrationURI += "calibration_test_123456"
181252

182-
statusCode, response, err := request("GET", calibrationURI, options.headers, options.proxy, options.rateLimit, options.timeout, options.redirect)
183-
if err != nil {
184-
log.Printf("[!] Error during calibration request: %v\n", err)
185-
return 0
253+
var lastStatusCode int
254+
for _, path := range calibrationPaths {
255+
calibrationURI := baseURI + path
256+
statusCode, response, err := requestWithRetry("GET", calibrationURI, options.headers, options.proxy, options.rateLimit, options.timeout, options.redirect)
257+
if err != nil {
258+
log.Printf("[!] Error during calibration request (%s): %v\n", path, err)
259+
continue
260+
}
261+
lastStatusCode = statusCode
262+
samples = append(samples, len(response))
263+
}
264+
265+
if len(samples) == 0 {
266+
log.Printf("[!] All calibration requests failed, disabling auto-calibration filtering")
267+
return 0, 0
268+
}
269+
270+
// Calculate average and max deviation
271+
sum := 0
272+
for _, s := range samples {
273+
sum += s
186274
}
275+
avgCl := sum / len(samples)
187276

188-
// Save default response
189-
defaultSc := statusCode
190-
defaultCl := len(response)
277+
maxDeviation := 0
278+
for _, s := range samples {
279+
dev := s - avgCl
280+
if dev < 0 {
281+
dev = -dev
282+
}
283+
if dev > maxDeviation {
284+
maxDeviation = dev
285+
}
286+
}
287+
288+
// Use tolerance = max(calibrationTolerance, maxDeviation*2) to handle dynamic content
289+
tolerance := calibrationTolerance
290+
if maxDeviation*2 > tolerance {
291+
tolerance = maxDeviation * 2
292+
}
191293

192294
fmt.Println(color.MagentaString("\n━━━━━━━━━━━━━━━ AUTO-CALIBRATION RESULTS ━━━━━━━━━━━━━━━"))
193-
fmt.Printf("[βœ”] Calibration URI: %s\n", calibrationURI)
194-
fmt.Printf("[βœ”] Status Code: %d\n", defaultSc)
195-
fmt.Printf("[βœ”] Content Length: %d bytes\n", defaultCl)
295+
fmt.Printf("[βœ”] Calibration samples: %d\n", len(samples))
296+
fmt.Printf("[βœ”] Status Code: %d\n", lastStatusCode)
297+
fmt.Printf("[βœ”] Avg Content Length: %d bytes (tolerance: Β±%d)\n", avgCl, tolerance)
196298

197-
return defaultCl
299+
return avgCl, tolerance
198300
}

0 commit comments

Comments
Β (0)