Automatic Scraping of dataset options #9

Merged
dylan merged 36 commits from feat/automatic-scraping-datasets into main 2026-03-14 21:58:49 +00:00
2 changed files with 1 additions and 5 deletions
Showing only changes of commit 12cbc24074 - Show all commits

View File

@@ -19,7 +19,7 @@ from server.exceptions import NotAuthorisedException, NonExistentDatasetExceptio
from server.db.database import PostgresConnector from server.db.database import PostgresConnector
from server.core.auth import AuthManager from server.core.auth import AuthManager
from server.core.datasets import DatasetManager from server.core.datasets import DatasetManager
from server.utils import get_request_filters, split_limit, get_env from server.utils import get_request_filters, get_env
from server.queue.tasks import process_dataset, fetch_and_process_dataset from server.queue.tasks import process_dataset, fetch_and_process_dataset
from server.connectors.registry import get_available_connectors, get_connector_metadata from server.connectors.registry import get_available_connectors, get_connector_metadata

View File

@@ -50,10 +50,6 @@ def get_request_filters() -> dict:
return filters return filters
def split_limit(limit: int, n: int) -> list[int]:
base, remainder = divmod(limit, n)
return [base + (1 if i < remainder else 0) for i in range(n)]
def get_env(name: str) -> str: def get_env(name: str) -> str:
value = os.getenv(name) value = os.getenv(name)
if not value: if not value: