From d98d496bd69b8f7159cfc5985c77cb3b22e6a770 Mon Sep 17 00:00:00 2001 From: DJObleezy Date: Thu, 17 Apr 2025 10:05:51 -0700 Subject: [PATCH] Limit worker data fetching to 10 pages Updated the `get_all_worker_rows` method in the `MiningDashboardService` class to restrict the number of pages fetched to a maximum of 10. Enhanced logging to provide clearer information about the current page and maximum limit, and added a log message for when the maximum page limit is reached to improve visibility during data collection. --- data_service.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/data_service.py b/data_service.py index b61d6c5..ac52e3f 100644 --- a/data_service.py +++ b/data_service.py @@ -476,15 +476,18 @@ class MiningDashboardService: def get_all_worker_rows(self): """ Iterate through wpage parameter values to collect all worker table rows. + Limited to 10 pages to balance between showing enough workers and maintaining performance. Returns: list: A list of BeautifulSoup row elements containing worker data. """ all_rows = [] page_num = 0 - while True: + max_pages = 10 # Limit to 10 pages of worker data + + while page_num < max_pages: # Only fetch up to max_pages url = f"https://ocean.xyz/stats/{self.wallet}?wpage={page_num}#workers-fulltable" - logging.info(f"Fetching worker data from: {url}") + logging.info(f"Fetching worker data from: {url} (page {page_num+1} of max {max_pages})") response = self.session.get(url, timeout=15) if not response.ok: logging.error(f"Error fetching page {page_num}: status code {response.status_code}") @@ -505,6 +508,11 @@ class MiningDashboardService: all_rows.extend(rows) page_num += 1 + if page_num >= max_pages: + logging.info(f"Reached maximum page limit ({max_pages}). Collected {len(all_rows)} worker rows total.") + else: + logging.info(f"Completed fetching all available worker data. Collected {len(all_rows)} worker rows from {page_num} pages.") + return all_rows def get_worker_data(self):