Skip to content

Commit

Permalink
Update: Refactor codes using analisys by SonarQube
Browse files Browse the repository at this point in the history
  • Loading branch information
Nicolas-Moliterno committed Nov 13, 2024
1 parent b5d799d commit 568bb61
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 29 deletions.
64 changes: 36 additions & 28 deletions scrapy/commands/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,43 +68,51 @@ def add_options(self, parser: argparse.ArgumentParser) -> None:
)

def run(self, args: list[str], opts: argparse.Namespace) -> None:
# load contracts
contracts = build_component_list(self.settings.getwithbase("SPIDER_CONTRACTS"))
conman = ContractsManager(load_object(c) for c in contracts)
runner = TextTestRunner(verbosity=2 if opts.verbose else 1)
result = TextTestResult(runner.stream, runner.descriptions, runner.verbosity)

# contract requests
contract_reqs = defaultdict(list)

assert self.crawler_process
spider_loader = self.crawler_process.spider_loader

with set_environ(SCRAPY_CHECK="true"):
for spidername in args or spider_loader.list():
spidercls = spider_loader.load(spidername)
spidercls.start_requests = lambda s: conman.from_spider(s, result) # type: ignore[assignment,method-assign,return-value]

tested_methods = conman.tested_methods_from_spidercls(spidercls)
if opts.list:
for method in tested_methods:
contract_reqs[spidercls.name].append(method)
elif tested_methods:
self.crawler_process.crawl(spidercls)

# start checks
contract_reqs = self._collect_contract_requests(args, spider_loader, conman, result, opts)

if opts.list:
for spider, methods in sorted(contract_reqs.items()):
if not methods and not opts.verbose:
continue
print(spider)
for method in sorted(methods):
print(f" * {method}")
self._list_contract_methods(contract_reqs, opts.verbose)
else:
start = time.time()
self.crawler_process.start()
stop = time.time()
self._run_contract_tests(result)

result.printErrors()
result.printSummary(start, stop)
self.exitcode = int(not result.wasSuccessful())
def _collect_contract_requests(self, args, spider_loader, conman, result, opts):
"""Collects tested methods for each spider and configures crawling processes."""
contract_reqs = defaultdict(list)
for spidername in args or spider_loader.list():
spidercls = spider_loader.load(spidername)
spidercls.start_requests = lambda s: conman.from_spider(s, result) # type: ignore[assignment,method-assign,return-value]

tested_methods = conman.tested_methods_from_spidercls(spidercls)
if opts.list:
contract_reqs[spidercls.name].extend(tested_methods)
elif tested_methods:
self.crawler_process.crawl(spidercls)
return contract_reqs

def _list_contract_methods(self, contract_reqs, verbose):
"""Prints tested methods for each spider if listing mode is active."""
for spider, methods in sorted(contract_reqs.items()):
if not methods and not verbose:
continue
print(spider)
for method in sorted(methods):
print(f" * {method}")

def _run_contract_tests(self, result):
"""Starts the crawling process to check contracts and displays a summary of the results."""
start = time.time()
self.crawler_process.start()
stop = time.time()

result.printErrors()
result.printSummary(start, stop)
self.exitcode = int(not result.wasSuccessful())
2 changes: 1 addition & 1 deletion scrapy/commands/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,7 @@ def process_request_cb_kwargs(self, opts: argparse.Namespace) -> None:

def run(self, args: list[str], opts: argparse.Namespace) -> None:
# parse arguments
if not len(args) == 1 or not is_url(args[0]):
if len(args) != 1 or not is_url(args[0]):
raise UsageError()
else:
url = args[0]
Expand Down

0 comments on commit 568bb61

Please sign in to comment.