Limit worker data fetching to 10 pages

Updated the `get_all_worker_rows` method in the `MiningDashboardService` class to restrict the number of pages fetched to a maximum of 10. Enhanced logging to provide clearer information about the current page and maximum limit, and added a log message for when the maximum page limit is reached to improve visibility during data collection.
This commit is contained in:
DJObleezy 2025-04-17 10:05:51 -07:00
parent 0bb90b1aca
commit d98d496bd6

View File

@ -476,15 +476,18 @@ class MiningDashboardService:
def get_all_worker_rows(self):
"""
Iterate through wpage parameter values to collect all worker table rows.
Limited to 10 pages to balance between showing enough workers and maintaining performance.
Returns:
list: A list of BeautifulSoup row elements containing worker data.
"""
all_rows = []
page_num = 0
while True:
max_pages = 10 # Limit to 10 pages of worker data
while page_num < max_pages: # Only fetch up to max_pages
url = f"https://ocean.xyz/stats/{self.wallet}?wpage={page_num}#workers-fulltable"
logging.info(f"Fetching worker data from: {url}")
logging.info(f"Fetching worker data from: {url} (page {page_num+1} of max {max_pages})")
response = self.session.get(url, timeout=15)
if not response.ok:
logging.error(f"Error fetching page {page_num}: status code {response.status_code}")
@ -505,6 +508,11 @@ class MiningDashboardService:
all_rows.extend(rows)
page_num += 1
if page_num >= max_pages:
logging.info(f"Reached maximum page limit ({max_pages}). Collected {len(all_rows)} worker rows total.")
else:
logging.info(f"Completed fetching all available worker data. Collected {len(all_rows)} worker rows from {page_num} pages.")
return all_rows
def get_worker_data(self):