diff --git a/README.md b/README.md index e24af9b2..98993b72 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,6 @@ - Fast And fully configurable web crawling - **Standard** and **Headless** mode - - **Active** and **Passive** mode - **JavaScript** parsing / crawling - Customizable **automatic form filling** - **Scope control** - Preconfigured field / Regex @@ -157,10 +156,6 @@ HEADLESS: -cwu, -chrome-ws-url string use chrome browser instance launched elsewhere with the debugger listening at this URL -xhr, -xhr-extraction extract xhr request url,method in jsonl output -PASSIVE: - -ps, -passive enable passive sources to discover target endpoints - -pss, -passive-source string[] passive source to use for url discovery (waybackarchive,commoncrawl,alienvault) - SCOPE: -cs, -crawl-scope string[] in scope url regex to be followed by crawler -cos, -crawl-out-scope string[] out of scope url regex to be excluded by crawler diff --git a/cmd/katana/main.go b/cmd/katana/main.go index 338b4068..1be5933d 100644 --- a/cmd/katana/main.go +++ b/cmd/katana/main.go @@ -134,10 +134,6 @@ pipelines offering both headless and non-headless crawling.`) flagSet.StringVarP(&options.ChromeWSUrl, "chrome-ws-url", "cwu", "", "use chrome browser instance launched elsewhere with the debugger listening at this URL"), flagSet.BoolVarP(&options.XhrExtraction, "xhr-extraction", "xhr", false, "extract xhr request url,method in jsonl output"), ) - flagSet.CreateGroup("passive", "Passive", - flagSet.BoolVarP(&options.Passive, "passive", "ps", false, "enable passive sources to discover target endpoints"), - flagSet.StringSliceVarP(&options.PassiveSource, "passive-source", "pss", nil, "passive source to use for url discovery (waybackarchive,commoncrawl,alienvault)", goflags.NormalizedStringSliceOptions), - ) flagSet.CreateGroup("scope", "Scope", flagSet.StringSliceVarP(&options.Scope, "crawl-scope", "cs", nil, "in scope url regex to be followed by crawler", goflags.FileCommaSeparatedStringSliceOptions), diff --git a/internal/runner/options.go b/internal/runner/options.go index eb7f1738..61805e16 100644 --- a/internal/runner/options.go +++ b/internal/runner/options.go @@ -32,10 +32,6 @@ func validateOptions(options *types.Options) error { gologger.Info().Msgf("Automatic form fill (-aff) has been disabled for headless navigation.") } - if options.Headless && options.Passive { - return errorutil.New("headless mode (-headless) and passive mode (-passive) cannot be used together") - } - if (options.HeadlessOptionalArguments != nil || options.HeadlessNoSandbox || options.SystemChromePath != "") && !options.Headless { return errorutil.New("headless mode (-hl) is required if -ho, -nos or -scp are set") } diff --git a/internal/runner/runner.go b/internal/runner/runner.go index 1b91133e..9fcd09fe 100644 --- a/internal/runner/runner.go +++ b/internal/runner/runner.go @@ -9,7 +9,6 @@ import ( "github.com/projectdiscovery/katana/pkg/engine" "github.com/projectdiscovery/katana/pkg/engine/hybrid" "github.com/projectdiscovery/katana/pkg/engine/parser" - "github.com/projectdiscovery/katana/pkg/engine/passive" "github.com/projectdiscovery/katana/pkg/engine/standard" "github.com/projectdiscovery/katana/pkg/types" "github.com/projectdiscovery/mapcidr" @@ -98,8 +97,6 @@ func New(options *types.Options) (*Runner, error) { switch { case options.Headless: crawler, err = hybrid.New(crawlerOptions) - case options.Passive: - crawler, err = passive.New(crawlerOptions) default: crawler, err = standard.New(crawlerOptions) } diff --git a/pkg/engine/common/base.go b/pkg/engine/common/base.go index 3fff4da7..55576a0c 100644 --- a/pkg/engine/common/base.go +++ b/pkg/engine/common/base.go @@ -72,7 +72,7 @@ func (s *Shared) Enqueue(queue *queue.Queue, navigationRequests ...*navigation.R // if the user requested anyway out of scope items // they are sent to output without visiting if s.Options.Options.DisplayOutScope { - s.Output(nr, nil, nil, ErrOutOfScope) + s.Output(nr, nil, ErrOutOfScope) } continue } @@ -95,18 +95,17 @@ func (s *Shared) ValidateScope(URL string, root string) bool { return err == nil && scopeValidated } -func (s *Shared) Output(navigationRequest *navigation.Request, navigationResponse *navigation.Response, passiveReference *navigation.PassiveReference, err error) { +func (s *Shared) Output(navigationRequest *navigation.Request, navigationResponse *navigation.Response, err error) { var errData string if err != nil { errData = err.Error() } // Write the found result to output result := &output.Result{ - Timestamp: time.Now(), - Request: navigationRequest, - Response: navigationResponse, - PassiveReference: passiveReference, - Error: errData, + Timestamp: time.Now(), + Request: navigationRequest, + Response: navigationResponse, + Error: errData, } outputErr := s.Options.OutputWriter.Write(result) @@ -230,7 +229,7 @@ func (s *Shared) Do(crawlSession *CrawlSession, doRequest DoRequestFunc) error { resp, err := doRequest(crawlSession, req) if inScope { - s.Output(req, resp, nil, err) + s.Output(req, resp, err) } if err != nil { diff --git a/pkg/engine/passive/doc.go b/pkg/engine/passive/doc.go deleted file mode 100644 index 915dd7b4..00000000 --- a/pkg/engine/passive/doc.go +++ /dev/null @@ -1,3 +0,0 @@ -// Package passive implements the functionality for a non-headless crawler. -// It uses net/http for making requests and goquery for scraping web page HTML. -package passive diff --git a/pkg/engine/passive/httpclient/httpclient.go b/pkg/engine/passive/httpclient/httpclient.go deleted file mode 100644 index e3c671da..00000000 --- a/pkg/engine/passive/httpclient/httpclient.go +++ /dev/null @@ -1,124 +0,0 @@ -package httpclient - -import ( - "bytes" - "context" - "crypto/tls" - "fmt" - "io" - "net" - "net/http" - "net/url" - "time" - - "github.com/projectdiscovery/gologger" - "github.com/projectdiscovery/useragent" -) - -type HttpClient struct { - Client *http.Client -} - -type BasicAuth struct { - Username string - Password string -} - -func NewHttpClient(timeout int) *HttpClient { - Transport := &http.Transport{ - MaxIdleConns: 100, - MaxIdleConnsPerHost: 100, - TLSClientConfig: &tls.Config{ - InsecureSkipVerify: true, - }, - Dial: (&net.Dialer{ - Timeout: time.Duration(timeout) * time.Second, - }).Dial, - } - - client := &http.Client{ - Transport: Transport, - Timeout: time.Duration(timeout) * time.Second, - } - - httpClient := &HttpClient{Client: client} - - return httpClient -} - -func (hc *HttpClient) Get(ctx context.Context, getURL, cookies string, headers map[string]string) (*http.Response, error) { - return hc.HTTPRequest(ctx, http.MethodGet, getURL, cookies, headers, nil, BasicAuth{}) -} - -func (hc *HttpClient) SimpleGet(ctx context.Context, getURL string) (*http.Response, error) { - return hc.HTTPRequest(ctx, http.MethodGet, getURL, "", map[string]string{}, nil, BasicAuth{}) -} - -func (hc *HttpClient) Post(ctx context.Context, postURL, cookies string, headers map[string]string, body io.Reader) (*http.Response, error) { - return hc.HTTPRequest(ctx, http.MethodPost, postURL, cookies, headers, body, BasicAuth{}) -} - -func (hc *HttpClient) SimplePost(ctx context.Context, postURL, contentType string, body io.Reader) (*http.Response, error) { - return hc.HTTPRequest(ctx, http.MethodPost, postURL, "", map[string]string{"Content-Type": contentType}, body, BasicAuth{}) -} - -func (hc *HttpClient) HTTPRequest(ctx context.Context, method, requestURL, cookies string, headers map[string]string, body io.Reader, basicAuth BasicAuth) (*http.Response, error) { - req, err := http.NewRequestWithContext(ctx, method, requestURL, body) - if err != nil { - return nil, err - } - - userAgent := useragent.PickRandom() - req.Header.Set("User-Agent", userAgent.String()) - req.Header.Set("Accept", "*/*") - req.Header.Set("Accept-Language", "en") - req.Header.Set("Connection", "close") - - if basicAuth.Username != "" || basicAuth.Password != "" { - req.SetBasicAuth(basicAuth.Username, basicAuth.Password) - } - - if cookies != "" { - req.Header.Set("Cookie", cookies) - } - - for key, value := range headers { - req.Header.Set(key, value) - } - - return httpRequestWrapper(hc.Client, req) -} - -func (hc *HttpClient) DiscardHTTPResponse(response *http.Response) { - if response != nil { - _, err := io.Copy(io.Discard, response.Body) - if err != nil { - gologger.Warning().Msgf("Could not discard response body: %s\n", err) - return - } - response.Body.Close() - } -} - -func (hc *HttpClient) Close() { - hc.Client.CloseIdleConnections() -} - -func httpRequestWrapper(client *http.Client, request *http.Request) (*http.Response, error) { - response, err := client.Do(request) - if err != nil { - return nil, err - } - - if response.StatusCode != http.StatusOK { - requestURL, _ := url.QueryUnescape(request.URL.String()) - - gologger.Debug().MsgFunc(func() string { - buffer := new(bytes.Buffer) - _, _ = buffer.ReadFrom(response.Body) - return fmt.Sprintf("Response for failed request against %s:\n%s", requestURL, buffer.String()) - }) - return response, fmt.Errorf("unexpected status code %d received from %s", response.StatusCode, requestURL) - } - return response, nil -} diff --git a/pkg/engine/passive/passive.go b/pkg/engine/passive/passive.go deleted file mode 100644 index 0a6188f7..00000000 --- a/pkg/engine/passive/passive.go +++ /dev/null @@ -1,145 +0,0 @@ -package passive - -import ( - "context" - "fmt" - "net/http" - "strings" - "sync" - "time" - - "github.com/projectdiscovery/gologger" - "github.com/projectdiscovery/katana/pkg/engine/common" - "github.com/projectdiscovery/katana/pkg/engine/passive/httpclient" - "github.com/projectdiscovery/katana/pkg/engine/passive/source" - "github.com/projectdiscovery/katana/pkg/navigation" - "github.com/projectdiscovery/katana/pkg/types" - "github.com/projectdiscovery/katana/pkg/utils" - errorutil "github.com/projectdiscovery/utils/errors" - urlutil "github.com/projectdiscovery/utils/url" - "golang.org/x/exp/maps" -) - -// Crawler is a passive crawler instance -type Crawler struct { - *common.Shared - sources []source.Source - httpClient *httpclient.HttpClient -} - -// New returns a new passive crawler instance -func New(options *types.CrawlerOptions) (*Crawler, error) { - shared, err := common.NewShared(options) - if err != nil { - return nil, errorutil.NewWithErr(err).WithTag("passive") - } - - sources := make(map[string]source.Source, len(Sources)) - if len(options.Options.PassiveSource) > 0 { - for _, source := range options.Options.PassiveSource { - if s, ok := Sources[source]; ok { - sources[source] = s - } - } - } else { - sources = Sources - } - - if len(sources) == 0 { - gologger.Fatal().Msg("No sources selected for this search") - } - - gologger.Debug().Msgf(fmt.Sprintf("Selected source(s) for this crawl: %s", strings.Join(maps.Keys(sources), ", "))) - - httpClient := httpclient.NewHttpClient(options.Options.Timeout) - return &Crawler{Shared: shared, sources: maps.Values(sources), httpClient: httpClient}, nil -} - -// Close closes the crawler process -func (c *Crawler) Close() error { - return nil -} - -// Crawl crawls a URL with the specified options -func (c *Crawler) Crawl(rootURL string) error { - gologger.Info().Msgf("Enumerating passive endpoints for %s", rootURL) - - rootUrlParsed, _ := urlutil.ParseURL(rootURL, true) - results := make(chan source.Result) - var timeTaken time.Duration - go func() { - defer func(startTime time.Time) { - timeTaken = time.Since(startTime) - close(results) - }(time.Now()) - - ctx := context.Background() - wg := &sync.WaitGroup{} - for _, s := range c.sources { - wg.Add(1) - go func(source source.Source) { - for result := range source.Run(ctx, c.Shared, rootURL) { - results <- result - } - wg.Done() - }(s) - } - wg.Wait() - }() - - seenURLs := make(map[string]struct{}) - sourceStats := make(map[string]int) - for result := range results { - if _, found := seenURLs[result.Value]; found { - continue - } - - if !utils.IsURL(result.Value) { - gologger.Debug().Msgf("`%v` not a url. skipping", result.Value) - continue - } - - if ok, err := c.Options.ValidateScope(result.Value, rootUrlParsed.Hostname()); err != nil || !ok { - gologger.Debug().Msgf("`%v` not in scope. skipping", result.Value) - continue - } - - if !c.Options.ExtensionsValidator.ValidatePath(result.Value) { - gologger.Debug().Msgf("`%v` not allowed extension. skipping", result.Value) - continue - } - - seenURLs[result.Value] = struct{}{} - sourceStats[result.Source]++ - - passiveURL, _ := urlutil.Parse(result.Value) - req := &navigation.Request{ - Method: http.MethodGet, - URL: result.Value, - } - resp := &navigation.Response{ - StatusCode: http.StatusOK, - RootHostname: passiveURL.Hostname(), - Resp: &http.Response{ - StatusCode: http.StatusOK, - Request: &http.Request{ - Method: http.MethodGet, - URL: passiveURL.URL, - }, - }, - } - passiveReference := &navigation.PassiveReference{ - Source: result.Source, - Reference: result.Reference, - } - c.Output(req, resp, passiveReference, nil) - } - - var stats []string - for source, count := range sourceStats { - stats = append(stats, fmt.Sprintf("%s: %d", source, count)) - } - - gologger.Info().Msgf("Found %d endpoints for %s in %s (%s)", len(seenURLs), rootURL, timeTaken.String(), strings.Join(stats, ", ")) - return nil -} diff --git a/pkg/engine/passive/regexp/regexp.go b/pkg/engine/passive/regexp/regexp.go deleted file mode 100644 index 785d7f90..00000000 --- a/pkg/engine/passive/regexp/regexp.go +++ /dev/null @@ -1,16 +0,0 @@ -package regexp - -import ( - "regexp" - "strings" -) - -var re, _ = regexp.Compile(`(?:http|https)?://(?:www\.)?[a-zA-Z0-9./?=_%:-]*`) - -func Extract(text string) []string { - matches := re.FindAllString(text, -1) - for i, match := range matches { - matches[i] = strings.ToLower(match) - } - return matches -} diff --git a/pkg/engine/passive/registry.go b/pkg/engine/passive/registry.go deleted file mode 100644 index b3a7f1d6..00000000 --- a/pkg/engine/passive/registry.go +++ /dev/null @@ -1,14 +0,0 @@ -package passive - -import ( - "github.com/projectdiscovery/katana/pkg/engine/passive/source" - "github.com/projectdiscovery/katana/pkg/engine/passive/source/alienvault" - "github.com/projectdiscovery/katana/pkg/engine/passive/source/commoncrawl" - "github.com/projectdiscovery/katana/pkg/engine/passive/source/waybackarchive" -) - -var Sources = map[string]source.Source{ - "waybackarchive": &waybackarchive.Source{}, - "commoncrawl": &commoncrawl.Source{}, - "alienvault": &alienvault.Source{}, -} diff --git a/pkg/engine/passive/source/alienvault/alienvault.go b/pkg/engine/passive/source/alienvault/alienvault.go deleted file mode 100644 index 8a4e25d3..00000000 --- a/pkg/engine/passive/source/alienvault/alienvault.go +++ /dev/null @@ -1,81 +0,0 @@ -package alienvault - -import ( - "context" - "encoding/json" - "fmt" - - "github.com/projectdiscovery/katana/pkg/engine/common" - "github.com/projectdiscovery/katana/pkg/engine/passive/httpclient" - "github.com/projectdiscovery/katana/pkg/engine/passive/source" - urlutil "github.com/projectdiscovery/utils/url" -) - -type alienvaultResponse struct { - URLList []url `json:"url_list"` - HasNext bool `json:"has_next"` -} - -type url struct { - URL string `json:"url"` -} - -type Source struct { -} - -func (s *Source) Run(ctx context.Context, sharedCtx *common.Shared, rootUrl string) <-chan source.Result { - results := make(chan source.Result) - - go func() { - defer close(results) - - if parsedRootUrl, err := urlutil.Parse(rootUrl); err == nil { - rootUrl = parsedRootUrl.Hostname() - } - - httpClient := httpclient.NewHttpClient(sharedCtx.Options.Options.Timeout) - page := 1 - for { - apiURL := fmt.Sprintf("https://otx.alienvault.com/api/v1/indicators/domain/%s/url_list?page=%d", rootUrl, page) - resp, err := httpClient.SimpleGet(ctx, apiURL) - if err != nil && resp == nil { - results <- source.Result{Source: s.Name(), Error: err} - httpClient.DiscardHTTPResponse(resp) - return - } - - var response alienvaultResponse - // Get the response body and decode - err = json.NewDecoder(resp.Body).Decode(&response) - if err != nil { - results <- source.Result{Source: s.Name(), Error: err} - resp.Body.Close() - return - } - resp.Body.Close() - - for _, record := range response.URLList { - results <- source.Result{Source: s.Name(), Value: record.URL, Reference: apiURL} - } - - if !response.HasNext { - break - } - page++ - } - }() - - return results -} - -func (s *Source) Name() string { - return "alienvault" -} - -func (s *Source) NeedsKey() bool { - return false -} - -func (s *Source) AddApiKeys(_ []string) { - // no key needed -} diff --git a/pkg/engine/passive/source/commoncrawl/commoncrawl.go b/pkg/engine/passive/source/commoncrawl/commoncrawl.go deleted file mode 100644 index 9ea8a8dc..00000000 --- a/pkg/engine/passive/source/commoncrawl/commoncrawl.go +++ /dev/null @@ -1,136 +0,0 @@ -// Package commoncrawl logic -package commoncrawl - -import ( - "bufio" - "context" - "fmt" - "net/url" - "strconv" - "strings" - "time" - - jsoniter "github.com/json-iterator/go" - - "github.com/projectdiscovery/katana/pkg/engine/common" - "github.com/projectdiscovery/katana/pkg/engine/passive/httpclient" - "github.com/projectdiscovery/katana/pkg/engine/passive/regexp" - "github.com/projectdiscovery/katana/pkg/engine/passive/source" -) - -const ( - indexURL = "https://index.commoncrawl.org/collinfo.json" - maxYearsBack = 5 -) - -var year = time.Now().Year() - -type indexResponse struct { - ID string `json:"id"` - APIURL string `json:"cdx-api"` -} - -type Source struct { -} - -func (s *Source) Run(ctx context.Context, sharedCtx *common.Shared, rootUrl string) <-chan source.Result { - results := make(chan source.Result) - - go func() { - defer close(results) - - httpClient := httpclient.NewHttpClient(sharedCtx.Options.Options.Timeout) - resp, err := httpClient.SimpleGet(ctx, indexURL) - if err != nil { - results <- source.Result{Source: s.Name(), Error: err} - httpClient.DiscardHTTPResponse(resp) - return - } - - var indexes []indexResponse - err = jsoniter.NewDecoder(resp.Body).Decode(&indexes) - if err != nil { - results <- source.Result{Source: s.Name(), Error: err} - resp.Body.Close() - return - } - resp.Body.Close() - - years := make([]string, 0) - for i := 0; i < maxYearsBack; i++ { - years = append(years, strconv.Itoa(year-i)) - } - - searchIndexes := make(map[string]string) - for _, year := range years { - for _, index := range indexes { - if strings.Contains(index.ID, year) { - if _, ok := searchIndexes[year]; !ok { - searchIndexes[year] = index.APIURL - break - } - } - } - } - - for _, apiURL := range searchIndexes { - further := s.getURLs(ctx, apiURL, rootUrl, httpClient, results) - if !further { - break - } - } - }() - - return results -} - -func (s *Source) Name() string { - return "commoncrawl" -} - -func (s *Source) NeedsKey() bool { - return false -} - -func (s *Source) AddApiKeys(_ []string) { - // no key needed -} - -func (s *Source) getURLs(ctx context.Context, searchURL, rootURL string, httpClient *httpclient.HttpClient, results chan source.Result) bool { - for { - select { - case <-ctx.Done(): - return false - default: - var headers = map[string]string{"Host": "index.commoncrawl.org"} - currentSearchURL := fmt.Sprintf("%s?url=*.%s", searchURL, rootURL) - resp, err := httpClient.Get(ctx, currentSearchURL, "", headers) - if err != nil { - results <- source.Result{Source: s.Name(), Error: err} - httpClient.DiscardHTTPResponse(resp) - return false - } - - scanner := bufio.NewScanner(resp.Body) - - for scanner.Scan() { - line := scanner.Text() - if line == "" { - continue - } - line, _ = url.QueryUnescape(line) - for _, extractedURL := range regexp.Extract(line) { - // fix for triple encoded URL - extractedURL = strings.ToLower(extractedURL) - extractedURL = strings.TrimPrefix(extractedURL, "25") - extractedURL = strings.TrimPrefix(extractedURL, "2f") - if extractedURL != "" { - results <- source.Result{Source: s.Name(), Value: extractedURL, Reference: currentSearchURL} - } - } - } - resp.Body.Close() - return true - } - } -} diff --git a/pkg/engine/passive/source/source.go b/pkg/engine/passive/source/source.go deleted file mode 100644 index db5bc897..00000000 --- a/pkg/engine/passive/source/source.go +++ /dev/null @@ -1,21 +0,0 @@ -package source - -import ( - "context" - - "github.com/projectdiscovery/katana/pkg/engine/common" -) - -type Source interface { - Run(context.Context, *common.Shared, string) <-chan Result - Name() string - NeedsKey() bool - AddApiKeys([]string) -} - -type Result struct { - Source string - Value string - Reference string - Error error -} diff --git a/pkg/engine/passive/source/waybackarchive/waybackarchive.go b/pkg/engine/passive/source/waybackarchive/waybackarchive.go deleted file mode 100644 index db023d75..00000000 --- a/pkg/engine/passive/source/waybackarchive/waybackarchive.go +++ /dev/null @@ -1,65 +0,0 @@ -package waybackarchive - -import ( - "bufio" - "context" - "fmt" - "net/url" - "strings" - - "github.com/projectdiscovery/katana/pkg/engine/common" - "github.com/projectdiscovery/katana/pkg/engine/passive/httpclient" - "github.com/projectdiscovery/katana/pkg/engine/passive/regexp" - "github.com/projectdiscovery/katana/pkg/engine/passive/source" -) - -type Source struct { -} - -func (s *Source) Run(ctx context.Context, sharedCtx *common.Shared, rootUrl string) <-chan source.Result { - results := make(chan source.Result) - go func() { - defer close(results) - - httpClient := httpclient.NewHttpClient(sharedCtx.Options.Options.Timeout) - searchURL := fmt.Sprintf("http://web.archive.org/cdx/search/cdx?url=*.%s/*&output=txt&fl=original&collapse=urlkey", rootUrl) - resp, err := httpClient.Get(ctx, searchURL, "", nil) - if err != nil { - results <- source.Result{Source: s.Name(), Error: err} - return - } - defer resp.Body.Close() - - scanner := bufio.NewScanner(resp.Body) - for scanner.Scan() { - line := scanner.Text() - if line == "" { - continue - } - line, _ = url.QueryUnescape(line) - for _, extractedURL := range regexp.Extract(line) { - // fix for triple encoded URL - extractedURL = strings.ToLower(extractedURL) - extractedURL = strings.TrimPrefix(extractedURL, "25") - extractedURL = strings.TrimPrefix(extractedURL, "2f") - - results <- source.Result{Source: s.Name(), Value: extractedURL, Reference: searchURL} - } - - } - }() - - return results -} - -func (s *Source) Name() string { - return "waybackarchive" -} - -func (s *Source) NeedsKey() bool { - return false -} - -func (s *Source) AddApiKeys(_ []string) { - // no key needed -} diff --git a/pkg/navigation/response.go b/pkg/navigation/response.go index 2ecbd5a5..666b4fc7 100644 --- a/pkg/navigation/response.go +++ b/pkg/navigation/response.go @@ -42,11 +42,6 @@ type Response struct { StoredResponsePath string `json:"stored_response_path,omitempty"` } -type PassiveReference struct { - Source string `json:"source"` - Reference string `json:"reference"` -} - func (n Response) AbsoluteURL(path string) string { if strings.HasPrefix(path, "#") { return "" diff --git a/pkg/output/result.go b/pkg/output/result.go index 90d04c3b..9c93922d 100644 --- a/pkg/output/result.go +++ b/pkg/output/result.go @@ -8,11 +8,10 @@ import ( // Result of the crawling type Result struct { - Timestamp time.Time `json:"timestamp,omitempty"` - Request *navigation.Request `json:"request,omitempty"` - Response *navigation.Response `json:"response,omitempty"` - PassiveReference *navigation.PassiveReference `json:"passive,omitempty"` - Error string `json:"error,omitempty"` + Timestamp time.Time `json:"timestamp,omitempty"` + Request *navigation.Request `json:"request,omitempty"` + Response *navigation.Response `json:"response,omitempty"` + Error string `json:"error,omitempty"` } // HasResponse checks if the result has a valid response diff --git a/pkg/types/options.go b/pkg/types/options.go index 6b8eefb1..43e6e8f3 100644 --- a/pkg/types/options.go +++ b/pkg/types/options.go @@ -131,10 +131,6 @@ type Options struct { HeadlessNoIncognito bool // XhrExtraction extract xhr requests XhrExtraction bool - // Passive enables passive crawling - Passive bool - // PassiveSource is the list of sources for passive crawling - PassiveSource goflags.StringSlice // HealthCheck determines if a self-healthcheck should be performed HealthCheck bool // ErrorLogFile specifies a file to write with the errors of all requests