From e6b792a4b323e4902961af9bd080176504548937 Mon Sep 17 00:00:00 2001 From: Audel Rouhi Date: Sun, 23 Apr 2023 13:51:42 -0500 Subject: [PATCH] Removed debug prints and don't try a second time if it fails. --- webarchiver/webarchiver.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/webarchiver/webarchiver.py b/webarchiver/webarchiver.py index 5c63a75..4d1665a 100644 --- a/webarchiver/webarchiver.py +++ b/webarchiver/webarchiver.py @@ -701,7 +701,14 @@ def screenshot_urls(self, parallel_urls): self.launch_browser() for url in parallel_urls: self.set_zoom_level(self.zoom_level) - self.full_page_screenshot(url=f'{url}', zoom_percentage=self.zoom_level) + try: + self.full_page_screenshot(url=f'{url}', zoom_percentage=self.zoom_level) + except Exception as e: + print(f"Unable to capture screenshot\nError: {e}\nTrying again...") + try: + self.full_page_screenshot(url=f'{url}', zoom_percentage=self.zoom_level) + except Exception as e: + print(f"Unable to capture screenshot\nError: {e}") self.url_count = self.url_count + 1 percentage = '%.3f' % ((self.url_count / len(self.urls)) * 100) urls_processed = '{0: <25}'.format(f"URLs Processed: {self.url_count}") @@ -791,7 +798,6 @@ def webarchiver(argv): processes = len(archive.urls) archive.set_processes(processes=processes) parallel_urls = list(archive.chunks(archive.urls, processes)) - print(f"PARALLEL URLS: {parallel_urls}") archive.screenshot_urls_in_parallel(parallel_urls=parallel_urls) if scrape_flag: