From 20cda87f1af19a41e4b99f10ea105a237189e55a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Witkowski?= Date: Sat, 3 Aug 2024 21:53:57 +0200 Subject: [PATCH] Fix lint errors, broke coverage requireemnts --- backend/job_fetch_offers.py | 6 ------ backend/run_tests.sh | 2 +- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/backend/job_fetch_offers.py b/backend/job_fetch_offers.py index 760581e..7278e4e 100644 --- a/backend/job_fetch_offers.py +++ b/backend/job_fetch_offers.py @@ -1,8 +1,6 @@ from datetime import datetime from scrapy.utils.project import get_project_settings -from twisted.internet import reactor from scrapy.crawler import CrawlerProcess -from scrapy.crawler import CrawlerRunner import pprint from my_logging import * @@ -14,7 +12,6 @@ if __name__ == '__main__': try: settings = get_project_settings() - runner = CrawlerRunner(settings) process = CrawlerProcess(settings) spiders = { @@ -27,9 +24,6 @@ spiders[spider_cls] = crawler process.crawl(crawler) - d = runner.join() - d.addBoth(lambda _: reactor.stop()) - process.start() # the script will block here until all crawling jobs are finished stats_per_spider = {} diff --git a/backend/run_tests.sh b/backend/run_tests.sh index 1094bf8..3972987 100755 --- a/backend/run_tests.sh +++ b/backend/run_tests.sh @@ -3,4 +3,4 @@ export PYTHONPATH=$PYTHONPATH':./' set -e coverage run --source ./ -m xmlrunner -o ./test-results -coverage report --fail-under=75 \ No newline at end of file +coverage report --fail-under=80 \ No newline at end of file