From 4ab327bf7a1758ee00e79a12e789e9bde7bd367c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Witkowski?= Date: Sat, 3 Aug 2024 21:51:01 +0200 Subject: [PATCH 1/3] Dev: added linter (pylint) --- .github/workflows/cd-backend.yaml | 4 ++++ backend/job_fetch_offers.py | 9 ++++++--- backend/mailer.py | 3 +-- backend/requirements.txt | 1 + backend/run_lint.sh | 5 +++++ backend/run_tests.sh | 9 +++------ 6 files changed, 20 insertions(+), 11 deletions(-) create mode 100755 backend/run_lint.sh diff --git a/.github/workflows/cd-backend.yaml b/.github/workflows/cd-backend.yaml index 1c947be..99fb918 100644 --- a/.github/workflows/cd-backend.yaml +++ b/.github/workflows/cd-backend.yaml @@ -36,6 +36,10 @@ jobs: run: | pip3 install --quiet -r requirements.txt -r tests/requirements.txt + - name: 'Static analysis (Lint)' + working-directory: ./backend + run: ./run_lint.sh + - name: 'Run tests and check coverage' working-directory: ./backend run: | diff --git a/backend/job_fetch_offers.py b/backend/job_fetch_offers.py index 7cde5c0..760581e 100644 --- a/backend/job_fetch_offers.py +++ b/backend/job_fetch_offers.py @@ -1,6 +1,7 @@ from datetime import datetime from scrapy.utils.project import get_project_settings from twisted.internet import reactor +from scrapy.crawler import CrawlerProcess from scrapy.crawler import CrawlerRunner import pprint @@ -14,6 +15,7 @@ try: settings = get_project_settings() runner = CrawlerRunner(settings) + process = CrawlerProcess(settings) spiders = { SoaringDeSpider.SoaringDeSpider: None, @@ -21,13 +23,14 @@ #PlaneCheckComSpider.PlaneCheckComSpider: None } for spider_cls in spiders.keys(): - crawler = runner.create_crawler(spider_cls) + crawler = process.create_crawler(spider_cls) spiders[spider_cls] = crawler - runner.crawl(crawler) + process.crawl(crawler) d = runner.join() d.addBoth(lambda _: reactor.stop()) - reactor.run() # the script will block here until all crawling jobs are finished + + process.start() # the script will block here until all crawling jobs are finished stats_per_spider = {} diff --git a/backend/mailer.py b/backend/mailer.py index c61b5ba..f3414d8 100644 --- a/backend/mailer.py +++ b/backend/mailer.py @@ -9,8 +9,7 @@ def send_mail(text=""): if not SEND_RESULT_MAIL: return msg = email.mime.text.MIMEText(text) - # TODO put your mail address here - # me = u'ralf.thaenert@googlemail.com' + me = 'dev@aerooffers.pl' msg['Subject'] = 'Aircraft Offers Crawling Result' msg['From'] = SMTP_USER msg['To'] = me diff --git a/backend/requirements.txt b/backend/requirements.txt index 991b895..0b15cf3 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -1,6 +1,7 @@ coverage==7.6.0 Twisted==24.3.0 psycopg2-binary==2.9.9 +pylint==3.2.6 SQLAlchemy==2.0.31 price-parser==0.3.4 Scrapy==2.11.2 diff --git a/backend/run_lint.sh b/backend/run_lint.sh new file mode 100755 index 0000000..1dbb35a --- /dev/null +++ b/backend/run_lint.sh @@ -0,0 +1,5 @@ +export PYTHONPATH=$PYTHONPATH':./' + +set -e + +pylint --fail-on=E --errors-only ./ \ No newline at end of file diff --git a/backend/run_tests.sh b/backend/run_tests.sh index 2cef050..1094bf8 100755 --- a/backend/run_tests.sh +++ b/backend/run_tests.sh @@ -1,9 +1,6 @@ export PYTHONPATH=$PYTHONPATH':./' -coverage run --source ./ -m xmlrunner -o ./test-results +set -e -if [[ $? -ne 0 ]]; then - exit 1 -else - coverage report --fail-under=75 -fi \ No newline at end of file +coverage run --source ./ -m xmlrunner -o ./test-results +coverage report --fail-under=75 \ No newline at end of file From 20cda87f1af19a41e4b99f10ea105a237189e55a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Witkowski?= Date: Sat, 3 Aug 2024 21:53:57 +0200 Subject: [PATCH 2/3] Fix lint errors, broke coverage requireemnts --- backend/job_fetch_offers.py | 6 ------ backend/run_tests.sh | 2 +- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/backend/job_fetch_offers.py b/backend/job_fetch_offers.py index 760581e..7278e4e 100644 --- a/backend/job_fetch_offers.py +++ b/backend/job_fetch_offers.py @@ -1,8 +1,6 @@ from datetime import datetime from scrapy.utils.project import get_project_settings -from twisted.internet import reactor from scrapy.crawler import CrawlerProcess -from scrapy.crawler import CrawlerRunner import pprint from my_logging import * @@ -14,7 +12,6 @@ if __name__ == '__main__': try: settings = get_project_settings() - runner = CrawlerRunner(settings) process = CrawlerProcess(settings) spiders = { @@ -27,9 +24,6 @@ spiders[spider_cls] = crawler process.crawl(crawler) - d = runner.join() - d.addBoth(lambda _: reactor.stop()) - process.start() # the script will block here until all crawling jobs are finished stats_per_spider = {} diff --git a/backend/run_tests.sh b/backend/run_tests.sh index 1094bf8..3972987 100755 --- a/backend/run_tests.sh +++ b/backend/run_tests.sh @@ -3,4 +3,4 @@ export PYTHONPATH=$PYTHONPATH':./' set -e coverage run --source ./ -m xmlrunner -o ./test-results -coverage report --fail-under=75 \ No newline at end of file +coverage report --fail-under=80 \ No newline at end of file From 61ba09ddfe3fe6a9649f63832e0b395e1c1ff5d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Witkowski?= Date: Sat, 3 Aug 2024 21:57:16 +0200 Subject: [PATCH 3/3] Exclude test files from coverage --- backend/run_tests.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/run_tests.sh b/backend/run_tests.sh index 3972987..ef38f17 100755 --- a/backend/run_tests.sh +++ b/backend/run_tests.sh @@ -2,5 +2,5 @@ export PYTHONPATH=$PYTHONPATH':./' set -e -coverage run --source ./ -m xmlrunner -o ./test-results -coverage report --fail-under=80 \ No newline at end of file +coverage run --source ./ --omit="tests/*" -m xmlrunner -o ./test-results +coverage report --fail-under=65 \ No newline at end of file