diff --git a/lumigator/backend/backend/api/routes/experiments.py b/lumigator/backend/backend/api/routes/experiments.py index 233fc6e23..68aadf517 100644 --- a/lumigator/backend/backend/api/routes/experiments.py +++ b/lumigator/backend/backend/api/routes/experiments.py @@ -21,9 +21,10 @@ def experiment_exception_mappings() -> dict[type[ServiceError], HTTPStatus]: @router.post("/", status_code=status.HTTP_201_CREATED) -def create_experiment_id(service: ExperimentServiceDep, request: ExperimentCreate) -> GetExperimentResponse: +async def create_experiment_id(service: ExperimentServiceDep, request: ExperimentCreate) -> GetExperimentResponse: """Create an experiment ID.""" - return GetExperimentResponse.model_validate(service.create_experiment(request).model_dump()) + experiment = await service.create_experiment(request) + return GetExperimentResponse.model_validate(experiment.model_dump()) @router.get("/{experiment_id}") @@ -45,6 +46,6 @@ async def list_experiments( @router.delete("/{experiment_id}") -def delete_experiment(service: ExperimentServiceDep, experiment_id: str) -> None: +async def delete_experiment(service: ExperimentServiceDep, experiment_id: str) -> None: """Delete an experiment by ID.""" - service.delete_experiment(experiment_id) + await service.delete_experiment(experiment_id) diff --git a/lumigator/backend/backend/api/routes/workflows.py b/lumigator/backend/backend/api/routes/workflows.py index 18be40a46..8f530cea8 100644 --- a/lumigator/backend/backend/api/routes/workflows.py +++ b/lumigator/backend/backend/api/routes/workflows.py @@ -44,13 +44,14 @@ async def get_workflow(service: WorkflowServiceDep, workflow_id: str) -> Workflo # get the logs @router.get("/{workflow_id}/logs") -def get_workflow_logs(service: WorkflowServiceDep, workflow_id: str) -> JobLogsResponse: +async def get_workflow_logs(service: WorkflowServiceDep, workflow_id: str) -> JobLogsResponse: """Get the logs for a workflow.""" - return JobLogsResponse.model_validate(service.get_workflow_logs(workflow_id).model_dump()) + logs = await service.get_workflow_logs(workflow_id) + return JobLogsResponse.model_validate(logs.model_dump()) @router.get("/{workflow_id}/result/download") -def get_workflow_result_download( +async def get_workflow_result_download( service: WorkflowServiceDep, workflow_id: str, ) -> str: @@ -60,12 +61,14 @@ def get_workflow_result_download( service: Workflow service dependency workflow_id: ID of the workflow whose results will be returned """ - return service.get_workflow_result_download(workflow_id) + return await service.get_workflow_result_download(workflow_id) # delete a workflow @router.delete("/{workflow_id}") -def delete_workflow(service: WorkflowServiceDep, workflow_id: str, force: bool = False) -> WorkflowDetailsResponse: +async def delete_workflow( + service: WorkflowServiceDep, workflow_id: str, force: bool = False +) -> WorkflowDetailsResponse: """Delete a workflow by ID. Args: @@ -73,4 +76,5 @@ def delete_workflow(service: WorkflowServiceDep, workflow_id: str, force: bool = workflow_id: ID of the workflow to delete force: If True, force deletion even if the workflow is active or has dependencies """ - return WorkflowDetailsResponse.model_validate(service.delete_workflow(workflow_id, force=force).model_dump()) + result = await service.delete_workflow(workflow_id, force=force) + return WorkflowDetailsResponse.model_validate(result.model_dump()) diff --git a/lumigator/backend/backend/services/datasets.py b/lumigator/backend/backend/services/datasets.py index 55d89c8f1..2ff5512fb 100644 --- a/lumigator/backend/backend/services/datasets.py +++ b/lumigator/backend/backend/services/datasets.py @@ -140,7 +140,6 @@ def _save_dataset_to_s3(self, temp_fname, record): # Upload to S3 dataset_key = self._get_s3_key(record.id, record.filename) dataset_path = self._get_s3_path(dataset_key) - # Deprecated!!! dataset_hf.save_to_disk(dataset_path, storage_options=self.s3_filesystem.storage_options) # Use the converted HF format files to rebuild the CSV and store it as 'dataset.csv'. diff --git a/lumigator/backend/backend/services/experiments.py b/lumigator/backend/backend/services/experiments.py index 3bf1fd126..852d5ed99 100644 --- a/lumigator/backend/backend/services/experiments.py +++ b/lumigator/backend/backend/services/experiments.py @@ -25,8 +25,8 @@ def __init__( self._dataset_service = dataset_service self._tracking_session = tracking_session - def create_experiment(self, request: ExperimentCreate) -> GetExperimentResponse: - experiment = self._tracking_session.create_experiment( + async def create_experiment(self, request: ExperimentCreate) -> GetExperimentResponse: + experiment = await self._tracking_session.create_experiment( request.name, request.description, request.task_definition, @@ -50,5 +50,5 @@ async def list_experiments(self, skip: int, limit: int) -> ListingResponse[GetEx items=[GetExperimentResponse.model_validate(x) for x in records], ) - def delete_experiment(self, experiment_id: str): - self._tracking_session.delete_experiment(experiment_id) + async def delete_experiment(self, experiment_id: str): + await self._tracking_session.delete_experiment(experiment_id) diff --git a/lumigator/backend/backend/services/workflows.py b/lumigator/backend/backend/services/workflows.py index b4c60f02a..7ebc3dd87 100644 --- a/lumigator/backend/backend/services/workflows.py +++ b/lumigator/backend/backend/services/workflows.py @@ -22,6 +22,7 @@ WorkflowStatus, ) from pydantic_core._pydantic_core import ValidationError +from typing_extensions import deprecated from backend.repositories.jobs import JobRepository from backend.services.datasets import DatasetService @@ -92,12 +93,12 @@ async def _handle_workflow_failure(self, workflow_id: str): loguru.logger.error("Workflow failed: {} ... updating status and stopping jobs", workflow_id) # Mark the workflow as failed. - self._tracking_client.update_workflow_status(workflow_id, WorkflowStatus.FAILED) + await self._tracking_client.update_workflow_status(workflow_id, WorkflowStatus.FAILED) # Get the list of jobs in the workflow to stop any that are still running. stop_tasks = [ self._job_service.stop_job(UUID(ray_job_id)) - for job in self._tracking_client.list_jobs(workflow_id) + for job in await self._tracking_client.list_jobs(workflow_id) if (ray_job_id := job.data.params.get("ray_job_id")) ] # Wait for all stop tasks to complete concurrently @@ -147,8 +148,8 @@ async def _run_inference_eval_pipeline( return # Track the workflow status as running and add the inference job. - self._tracking_client.update_workflow_status(workflow.id, WorkflowStatus.RUNNING) - inference_run_id = self._tracking_client.create_job( + await self._tracking_client.update_workflow_status(workflow.id, WorkflowStatus.RUNNING) + inference_run_id = await self._tracking_client.create_job( request.experiment_id, workflow.id, "inference", inference_job.id ) @@ -228,7 +229,7 @@ async def _run_inference_eval_pipeline( metrics=inf_output.metrics, ray_job_id=str(inference_job.id), ) - self._tracking_client.update_job(inference_run_id, inference_job_output) + await self._tracking_client.update_job(inference_run_id, inference_job_output) except Exception as e: loguru.logger.error( "Workflow pipeline error: Workflow {}. Inference job: {}. Cannot update DB with with result data: {}", @@ -272,7 +273,7 @@ async def _run_inference_eval_pipeline( return # Track the evaluation job. - eval_run_id = self._tracking_client.create_job( + eval_run_id = await self._tracking_client.create_job( request.experiment_id, workflow.id, "evaluation", evaluation_job.id ) @@ -323,9 +324,9 @@ async def _run_inference_eval_pipeline( parameters={"eval_output_s3_path": f"{settings.S3_BUCKET}/{result_key}"}, ray_job_id=str(evaluation_job.id), ) - self._tracking_client.update_job(eval_run_id, outputs) - self._tracking_client.update_workflow_status(workflow.id, WorkflowStatus.SUCCEEDED) - self._tracking_client.get_workflow(workflow.id) + await self._tracking_client.update_job(eval_run_id, outputs) + await self._tracking_client.update_workflow_status(workflow.id, WorkflowStatus.SUCCEEDED) + await self._tracking_client.get_workflow(workflow.id) except Exception as e: loguru.logger.error( "Workflow pipeline error: Workflow {}. Evaluation job: {} Error validating results: {}", @@ -336,13 +337,13 @@ async def _run_inference_eval_pipeline( await self._handle_workflow_failure(workflow.id) return - def get_workflow_result_download(self, workflow_id: str) -> str: + async def get_workflow_result_download(self, workflow_id: str) -> str: """Return workflow results file URL for downloading. Args: workflow_id: ID of the workflow whose results will be returned """ - workflow_details = self.get_workflow(workflow_id) + workflow_details = await self.get_workflow(workflow_id) if workflow_details.artifacts_download_url: return workflow_details.artifacts_download_url else: @@ -391,7 +392,7 @@ async def create_workflow(self, request: WorkflowCreateRequest) -> WorkflowRespo ) request.system_prompt = default_system_prompt - workflow = self._tracking_client.create_workflow( + workflow = await self._tracking_client.create_workflow( experiment_id=request.experiment_id, description=request.description, name=request.name, @@ -406,17 +407,18 @@ async def create_workflow(self, request: WorkflowCreateRequest) -> WorkflowRespo return workflow - def delete_workflow(self, workflow_id: str, force: bool) -> WorkflowResponse: + async def delete_workflow(self, workflow_id: str, force: bool) -> WorkflowResponse: """Delete a workflow by ID.""" # if the workflow is running, we should throw an error workflow = self.get_workflow(workflow_id) if workflow.status == WorkflowStatus.RUNNING and not force: raise WorkflowValidationError("Cannot delete a running workflow") - return self._tracking_client.delete_workflow(workflow_id) + return await self._tracking_client.delete_workflow(workflow_id) - def get_workflow_logs(self, workflow_id: str) -> JobLogsResponse: + @deprecated("get_workflow_logs is deprecated, it will be removed in future versions.") + async def get_workflow_logs(self, workflow_id: str) -> JobLogsResponse: """Get the logs for a workflow.""" - job_list = self._tracking_client.list_jobs(workflow_id) + job_list = await self._tracking_client.list_jobs(workflow_id) # sort the jobs by created_at, with the oldest last job_list = sorted(job_list, key=lambda x: x.info.start_time) all_ray_job_ids = [run.data.params.get("ray_job_id") for run in job_list] diff --git a/lumigator/backend/backend/tests/conftest.py b/lumigator/backend/backend/tests/conftest.py index b6b167cc4..fbbf8dc64 100644 --- a/lumigator/backend/backend/tests/conftest.py +++ b/lumigator/backend/backend/tests/conftest.py @@ -277,6 +277,7 @@ def boto_s3fs() -> Generator[S3FileSystem, None, None]: aws_endpoint_url = os.environ.get("AWS_ENDPOINT_URL", "http://localhost:9000") aws_default_region = os.environ.get("AWS_DEFAULT_REGION", "us-east-2") + # Mock the S3 'storage_options' property to match the real client. s3fs = S3FileSystem( key=aws_access_key_id, secret=aws_secret_access_key, @@ -284,7 +285,15 @@ def boto_s3fs() -> Generator[S3FileSystem, None, None]: client_kwargs={"region_name": aws_default_region}, ) - mock_s3fs = MagicMock(wraps=s3fs, storage_options={"endpoint_url": aws_endpoint_url}) + mock_s3fs = MagicMock( + wraps=s3fs, + storage_options={ + "client_kwargs": {"region_name": aws_default_region}, + "key": aws_access_key_id, + "secret": aws_secret_access_key, + "endpoint_url": aws_endpoint_url, + }, + ) yield mock_s3fs logger.info(f"intercepted s3fs calls: {str(mock_s3fs.mock_calls)}") diff --git a/lumigator/backend/backend/tracking/mlflow.py b/lumigator/backend/backend/tracking/mlflow.py index 2281404c4..cf08f3e17 100644 --- a/lumigator/backend/backend/tracking/mlflow.py +++ b/lumigator/backend/backend/tracking/mlflow.py @@ -33,7 +33,7 @@ def __init__(self, tracking_uri: str, s3_file_system: S3FileSystem): self._client = MlflowClient(tracking_uri=tracking_uri) self._s3_file_system = s3_file_system - def create_experiment( + async def create_experiment( self, name: str, description: str, @@ -79,7 +79,7 @@ def create_experiment( created_at=datetime.fromtimestamp(experiment.creation_time / 1000), ) - def delete_experiment(self, experiment_id: str) -> None: + async def delete_experiment(self, experiment_id: str) -> None: """Delete an experiment. Although Mflow has a delete_experiment method, We will use the functions of this class instead, so that we make sure we correctly clean up all the artifacts/runs/etc. associated with the experiment. @@ -87,7 +87,7 @@ def delete_experiment(self, experiment_id: str) -> None: workflow_ids = self._find_workflows(experiment_id) # delete all the workflows for workflow_id in workflow_ids: - self.delete_workflow(workflow_id) + await self.delete_workflow(workflow_id) # delete the experiment self._client.delete_experiment(experiment_id) @@ -164,7 +164,7 @@ async def _format_experiment(self, experiment: MlflowExperiment) -> GetExperimen workflows=workflows, ) - def update_experiment(self, experiment_id: str, new_name: str) -> None: + async def update_experiment(self, experiment_id: str, new_name: str) -> None: """Update the name of an experiment.""" raise NotImplementedError @@ -199,7 +199,7 @@ async def experiments_count(self): # this corresponds to creating a run in MLflow. # The run will have n number of nested runs, # which correspond to what we call "jobs" in our system - def create_workflow( + async def create_workflow( self, experiment_id: str, description: str, name: str, model: str, system_prompt: str ) -> WorkflowResponse: """Create a new workflow.""" @@ -256,7 +256,7 @@ async def get_workflow(self, workflow_id: str) -> WorkflowDetailsResponse | None system_prompt=workflow.data.tags.get("system_prompt"), status=WorkflowStatus(workflow.data.tags.get("status")), created_at=datetime.fromtimestamp(workflow.info.start_time / 1000), - jobs=[self.get_job(job_id) for job_id in all_job_ids], + jobs=[await self.get_job(job_id) for job_id in all_job_ids], metrics=self._compile_metrics(all_job_ids), parameters=self._compile_parameters(all_job_ids), ) @@ -302,7 +302,7 @@ async def get_workflow(self, workflow_id: str) -> WorkflowDetailsResponse | None workflow_details.artifacts_download_url = download_url return workflow_details - def update_workflow_status(self, workflow_id: str, status: WorkflowStatus) -> None: + async def update_workflow_status(self, workflow_id: str, status: WorkflowStatus) -> None: """Update the status of a workflow.""" self._client.set_tag(workflow_id, "status", status.value) @@ -328,7 +328,7 @@ def _get_ray_job_logs(self, ray_job_id: str): loguru.logger.error(f"Response text: {resp.text}") raise JobUpstreamError(ray_job_id, "JSON decode error in Ray response") from e - def get_workflow_logs(self, workflow_id: str) -> JobLogsResponse: + async def get_workflow_logs(self, workflow_id: str) -> JobLogsResponse: workflow_run = self._client.get_run(workflow_id) # get the jobs associated with the workflow all_jobs = self._client.search_runs( @@ -343,7 +343,7 @@ def get_workflow_logs(self, workflow_id: str) -> JobLogsResponse: # TODO: This is not a great solution but it matches the current API return JobLogsResponse(logs="\n================\n".join([log.logs for log in logs])) - def delete_workflow(self, workflow_id: str) -> WorkflowResponse: + async def delete_workflow(self, workflow_id: str) -> WorkflowResponse: """Delete a workflow.""" # first, get the workflow workflow = self._client.get_run(workflow_id) @@ -370,11 +370,11 @@ def delete_workflow(self, workflow_id: str) -> WorkflowResponse: created_at=datetime.fromtimestamp(workflow.info.start_time / 1000), ) - def list_workflows(self, experiment_id: str) -> list: + async def list_workflows(self, experiment_id: str) -> list: """List all workflows in an experiment.""" raise NotImplementedError - def create_job(self, experiment_id: str, workflow_id: str, name: str, job_id: str) -> str: + async def create_job(self, experiment_id: str, workflow_id: str, name: str, job_id: str) -> str: """Link a started job to an experiment and a workflow.""" run = self._client.create_run( experiment_id=experiment_id, @@ -384,14 +384,14 @@ def create_job(self, experiment_id: str, workflow_id: str, name: str, job_id: st self._client.log_param(run.info.run_id, "ray_job_id", job_id) return run.info.run_id - def update_job(self, job_id: str, data: RunOutputs): + async def update_job(self, job_id: str, data: RunOutputs): """Update the metrics and parameters of a job.""" for metric, value in data.metrics.items(): self._client.log_metric(job_id, metric, value) for parameter, value in data.parameters.items(): self._client.log_param(job_id, parameter, value) - def get_job(self, job_id: str): + async def get_job(self, job_id: str): """Get the results of a job.""" run = self._client.get_run(job_id) if run.info.lifecycle_stage == "deleted": @@ -404,11 +404,11 @@ def get_job(self, job_id: str): artifact_url="TODO", ) - def delete_job(self, job_id: str): + async def delete_job(self, job_id: str): """Delete a job.""" self._client.delete_run(job_id) - def list_jobs(self, workflow_id: str): + async def list_jobs(self, workflow_id: str): """List all jobs in a workflow.""" workflow_run = self._client.get_run(workflow_id) # get the jobs associated with the workflow diff --git a/lumigator/backend/backend/tracking/tracking_interface.py b/lumigator/backend/backend/tracking/tracking_interface.py index 4b142b1e8..95609d475 100644 --- a/lumigator/backend/backend/tracking/tracking_interface.py +++ b/lumigator/backend/backend/tracking/tracking_interface.py @@ -16,7 +16,7 @@ class TrackingClient(Protocol): """Interface for tracking clients.""" - def create_experiment( + async def create_experiment( self, name: str, description: str, @@ -31,11 +31,11 @@ async def get_experiment(self, experiment_id: str) -> GetExperimentResponse | No """Get an experiment.""" ... - def update_experiment(self, experiment_id: str, new_name: str) -> None: + async def update_experiment(self, experiment_id: str, new_name: str) -> None: """Update an experiment.""" ... - def delete_experiment(self, experiment_id: str) -> None: + async def delete_experiment(self, experiment_id: str) -> None: """Delete an experiment.""" ... @@ -47,7 +47,7 @@ async def experiments_count(self) -> int: """Count all experiments.""" ... - def create_workflow( + async def create_workflow( self, experiment_id: str, description: str, name: str, model: str, system_prompt: str ) -> WorkflowResponse: """Create a new workflow.""" @@ -58,7 +58,7 @@ async def get_workflow(self, workflow_id: str) -> WorkflowDetailsResponse | None ... @deprecated("get_workflow_logs is deprecated, it will be removed in future versions.") - def get_workflow_logs(self, workflow_id: str) -> JobLogsResponse: + async def get_workflow_logs(self, workflow_id: str) -> JobLogsResponse: """Get workflow logs. .. deprecated:: @@ -69,39 +69,39 @@ def get_workflow_logs(self, workflow_id: str) -> JobLogsResponse: ) ... - def update_workflow_status(self, workflow_id: str, status: WorkflowStatus) -> None: + async def update_workflow_status(self, workflow_id: str, status: WorkflowStatus) -> None: """Update a workflow.""" ... - def delete_workflow(self, workflow_id: str) -> WorkflowResponse: + async def delete_workflow(self, workflow_id: str) -> WorkflowResponse: """Delete a workflow.""" ... - def list_workflows(self, experiment_id: str) -> list: + async def list_workflows(self, experiment_id: str) -> list: """List all workflows for an experiment.""" ... - def create_job(self, experiment_id: str, workflow_id: str, name: str, job_id: str): + async def create_job(self, experiment_id: str, workflow_id: str, name: str, job_id: str): """Link a started job to an experiment and a workflow.""" ... - def update_workflow(self, workflow_id: str, data: RunOutputs): + async def update_workflow(self, workflow_id: str, data: RunOutputs): """Update the outputs of a workflow""" ... - def get_job(self, job_id: str) -> JobResults | None: + async def get_job(self, job_id: str) -> JobResults | None: """Get a job.""" ... - def update_job(self, job_id: str, data: RunOutputs): + async def update_job(self, job_id: str, data: RunOutputs): """Update a job.""" ... - def delete_job(self, job_id: str) -> None: + async def delete_job(self, job_id: str) -> None: """Delete a job.""" ... - def list_jobs(self, workflow_id: str) -> list: + async def list_jobs(self, workflow_id: str) -> list: """List all jobs for a workflow.""" ... diff --git a/lumigator/backend/pyproject.toml b/lumigator/backend/pyproject.toml index 4fcbbc9a4..c0dedbb8a 100644 --- a/lumigator/backend/pyproject.toml +++ b/lumigator/backend/pyproject.toml @@ -6,17 +6,14 @@ readme = "README.md" requires-python = ">=3.11" dependencies = [ "fastapi[standard]>=0.115.0", - "boto3==1.34.105", - "boto3-stubs[essential,s3]==1.34.105", "loguru==0.7.2", - "mypy-boto3==1.34.105", "pydantic>=2.10.0", "pydantic-settings==2.2.1", "requests>=2,<3", "sqlalchemy[asyncio]==2.0.28", "uvicorn[standard]==0.28.0", - "s3fs==2024.5.0", - "datasets==2.20.0", + "s3fs>=2024.12.0", + "datasets==3.4.1", "ray[client]==2.30.0", "psycopg2-binary==2.9.9", # For sqlalchemy @@ -32,7 +29,6 @@ dev = [ "pytest>=8.3.3", "pytest-asyncio>=0.25.3", "requests-mock>=1.12.1", - "moto[s3]>=5.0,<6", "debugpy>=1.8.11" ] diff --git a/lumigator/backend/uv.lock b/lumigator/backend/uv.lock index e46b87e65..2a9cb41d2 100644 --- a/lumigator/backend/uv.lock +++ b/lumigator/backend/uv.lock @@ -170,15 +170,12 @@ version = "0.1.3a0" source = { virtual = "." } dependencies = [ { name = "alembic" }, - { name = "boto3" }, - { name = "boto3-stubs", extra = ["essential", "s3"] }, { name = "cryptography" }, { name = "datasets" }, { name = "fastapi", extra = ["standard"] }, { name = "loguru" }, { name = "lumigator-schemas" }, { name = "mlflow" }, - { name = "mypy-boto3" }, { name = "psycopg2-binary" }, { name = "pydantic" }, { name = "pydantic-settings" }, @@ -193,7 +190,6 @@ dependencies = [ [package.dev-dependencies] dev = [ { name = "debugpy" }, - { name = "moto", extra = ["s3"] }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "requests-mock" }, @@ -202,22 +198,19 @@ dev = [ [package.metadata] requires-dist = [ { name = "alembic", specifier = ">=1.13.3" }, - { name = "boto3", specifier = "==1.34.105" }, - { name = "boto3-stubs", extras = ["essential", "s3"], specifier = "==1.34.105" }, { name = "cryptography", specifier = ">=43.0.0" }, - { name = "datasets", specifier = "==2.20.0" }, + { name = "datasets", specifier = "==3.4.1" }, { name = "fastapi", extras = ["standard"], specifier = ">=0.115.0" }, { name = "loguru", specifier = "==0.7.2" }, { name = "lumigator-schemas", editable = "../schemas" }, { name = "mlflow", specifier = ">=2.20.3" }, - { name = "mypy-boto3", specifier = "==1.34.105" }, { name = "psycopg2-binary", specifier = "==2.9.9" }, { name = "pydantic", specifier = ">=2.10.0" }, { name = "pydantic-settings", specifier = "==2.2.1" }, { name = "python-dotenv", specifier = ">=1.0.1" }, { name = "ray", extras = ["client"], specifier = "==2.30.0" }, { name = "requests", specifier = ">=2,<3" }, - { name = "s3fs", specifier = "==2024.5.0" }, + { name = "s3fs", specifier = ">=2024.12.0" }, { name = "sqlalchemy", extras = ["asyncio"], specifier = "==2.0.28" }, { name = "uvicorn", extras = ["standard"], specifier = "==0.28.0" }, ] @@ -225,7 +218,6 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ { name = "debugpy", specifier = ">=1.8.11" }, - { name = "moto", extras = ["s3"], specifier = ">=5.0,<6" }, { name = "pytest", specifier = ">=8.3.3" }, { name = "pytest-asyncio", specifier = ">=0.25.3" }, { name = "requests-mock", specifier = ">=1.12.1" }, @@ -240,48 +232,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458 }, ] -[[package]] -name = "boto3" -version = "1.34.105" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "botocore" }, - { name = "jmespath" }, - { name = "s3transfer" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/79/cc/39b8f568e56c94dc2e71402e44ddc9d3d4ca6ec56912b1a02d4c79813cb8/boto3-1.34.105.tar.gz", hash = "sha256:f2c11635be0de7b7c06eb606ece1add125e02d6ed521592294a0a21af09af135", size = 108305 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/34/54/6038f386e64cd4555461124524aa3b687afbedfddb44490bf070d35535e8/boto3-1.34.105-py3-none-any.whl", hash = "sha256:b633e8fbf7145bdb995ce68a27d096bb89fd393185b0e773418d81cd78db5a03", size = 139335 }, -] - -[[package]] -name = "boto3-stubs" -version = "1.34.105" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "botocore-stubs" }, - { name = "types-s3transfer" }, - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fc/56/11e7cf2e220d5469d718bc977e33f424cd877d727b04abb5efcdf37107dd/boto3_stubs-1.34.105.tar.gz", hash = "sha256:6aec0131447885a24db82279609a08130c5d0b0f738dfc70a59ff3f453c04a68", size = 88192 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/61/a5/cc47e3dc03a398655dcbebf0ebd2c2824fa6cd4469fc2acb104294eaecd9/boto3_stubs-1.34.105-py3-none-any.whl", hash = "sha256:ba03524668d5edb5e177d11df7937e769bd52e7d7adbee20762353f72b775eb5", size = 56356 }, -] - -[package.optional-dependencies] -essential = [ - { name = "mypy-boto3-cloudformation" }, - { name = "mypy-boto3-dynamodb" }, - { name = "mypy-boto3-ec2" }, - { name = "mypy-boto3-lambda" }, - { name = "mypy-boto3-rds" }, - { name = "mypy-boto3-s3" }, - { name = "mypy-boto3-sqs" }, -] -s3 = [ - { name = "mypy-boto3-s3" }, -] - [[package]] name = "botocore" version = "1.34.162" @@ -296,18 +246,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bc/47/e35f788047c91110f48703a6254e5c84e33111b3291f7b57a653ca00accf/botocore-1.34.162-py3-none-any.whl", hash = "sha256:2d918b02db88d27a75b48275e6fb2506e9adaaddbec1ffa6a8a0898b34e769be", size = 12468049 }, ] -[[package]] -name = "botocore-stubs" -version = "1.37.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "types-awscrt" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/11/ab/c06cd0ccc508b334949563d3c0e0d07dcfb8d65b393891390604fb23984c/botocore_stubs-1.37.1.tar.gz", hash = "sha256:fad2ac52f815d71841c66b0d97f884376456db0cd5917d44f474ac29eef34a19", size = 41416 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/2e/999e44a7605db48b124812d67f7261c6c8b111547a4e25fbbc4058988c21/botocore_stubs-1.37.1-py3-none-any.whl", hash = "sha256:52119b15e1dc6f6f5d2ea714177384a37ba877fac4f14b1cf787230562a04643", size = 64259 }, -] - [[package]] name = "cachetools" version = "5.5.2" @@ -559,7 +497,7 @@ wheels = [ [[package]] name = "datasets" -version = "2.20.0" +version = "3.4.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -572,15 +510,14 @@ dependencies = [ { name = "packaging" }, { name = "pandas" }, { name = "pyarrow" }, - { name = "pyarrow-hotfix" }, { name = "pyyaml" }, { name = "requests" }, { name = "tqdm" }, { name = "xxhash" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/59/b94bfb5f6225c4c931cd516390b3f006e232a036a48337f72889c6c9ab27/datasets-2.20.0.tar.gz", hash = "sha256:3c4dbcd27e0f642b9d41d20ff2efa721a5e04b32b2ca4009e0fc9139e324553f", size = 2225757 } +sdist = { url = "https://files.pythonhosted.org/packages/99/4b/40cda74a4e0e58450b0c85a737e134ab5df65e6f5c33c5e175db5d6a5227/datasets-3.4.1.tar.gz", hash = "sha256:e23968da79bc014ef9f7540eeb7771c6180eae82c86ebcfcc10535a03caf08b5", size = 566559 } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/2d/963b266bb8f88492d5ab4232d74292af8beb5b6fdae97902df9e284d4c32/datasets-2.20.0-py3-none-any.whl", hash = "sha256:76ac02e3bdfff824492e20678f0b6b1b6d080515957fe834b00c2ba8d6b18e5e", size = 547777 }, + { url = "https://files.pythonhosted.org/packages/16/44/5de560a2625d31801895fb2663693df210c6465960d61a99192caa9afd63/datasets-3.4.1-py3-none-any.whl", hash = "sha256:b91cf257bd64132fa9d953dd4768ab6d63205597301f132a74271cfcce8b5dd3", size = 487392 }, ] [[package]] @@ -818,11 +755,11 @@ wheels = [ [[package]] name = "fsspec" -version = "2024.5.0" +version = "2024.12.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/71/28/cbf337fddd6f22686b7c2639b80e006accd904db152fe333fd98f4cd8d1e/fsspec-2024.5.0.tar.gz", hash = "sha256:1d021b0b0f933e3b3029ed808eb400c08ba101ca2de4b3483fbc9ca23fcee94a", size = 400066 } +sdist = { url = "https://files.pythonhosted.org/packages/ee/11/de70dee31455c546fbc88301971ec03c328f3d1138cfba14263f651e9551/fsspec-2024.12.0.tar.gz", hash = "sha256:670700c977ed2fb51e0d9f9253177ed20cbde4a3e5c0283cc5385b5870c8533f", size = 291600 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/a3/16e9fe32187e9c8bc7f9b7bcd9728529faa725231a0c96f2f98714ff2fc5/fsspec-2024.5.0-py3-none-any.whl", hash = "sha256:e0fdbc446d67e182f49a70b82cf7889028a63588fde6b222521f10937b2b670c", size = 316106 }, + { url = "https://files.pythonhosted.org/packages/de/86/5486b0188d08aa643e127774a99bac51ffa6cf343e3deb0583956dca5b22/fsspec-2024.12.0-py3-none-any.whl", hash = "sha256:b520aed47ad9804237ff878b504267a3b0b441e97508bd6d2d8774e3db85cee2", size = 183862 }, ] [package.optional-dependencies] @@ -1452,32 +1389,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/39/f6/c55a49753098ae8469bb5c1b177298b65ac68f4a7334dd37b727cdcd72eb/mlflow_skinny-2.20.3-py3-none-any.whl", hash = "sha256:4cf9502bf8b7c4c971c90808560caeb2d57608354927f7b7b3150ca2c580c022", size = 6008656 }, ] -[[package]] -name = "moto" -version = "5.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "boto3" }, - { name = "botocore" }, - { name = "cryptography" }, - { name = "jinja2" }, - { name = "python-dateutil" }, - { name = "requests" }, - { name = "responses" }, - { name = "werkzeug" }, - { name = "xmltodict" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/df/fc/629a225bf615ed5e787aa36ef3c395ed9c702e6c14b51ee5bf174c5d7dd9/moto-5.1.0.tar.gz", hash = "sha256:879274a9d2213ca49706e3c8ea380d90953ec1ec642976f6315255394d36edc0", size = 6581832 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/91/d7/0dadf1e7ee7be7920dc0d8d3ee9e0fa9af13f2eedec7fffc4475a91697a6/moto-5.1.0-py3-none-any.whl", hash = "sha256:4fada00cedfba661aa58fe0b33b3ba9a0ef96d0e9937c9bed5163053898b4a27", size = 4725360 }, -] - -[package.optional-dependencies] -s3 = [ - { name = "py-partiql-parser" }, - { name = "pyyaml" }, -] - [[package]] name = "msgpack" version = "1.1.0" @@ -1589,103 +1500,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/da/d9/f7f9379981e39b8c2511c9e0326d212accacb82f12fbfdc1aa2ce2a7b2b6/multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3", size = 133351 }, ] -[[package]] -name = "mypy-boto3" -version = "1.34.105" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "boto3" }, - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3a/d5/720940785599388dd135bf3ace3085398c91d387457385bc6b465b19abc2/mypy_boto3-1.34.105.tar.gz", hash = "sha256:652db2cd761ad77eed72f33aa506e4728d99fa5bdb941baaa258dfc12b15b63b", size = 21320 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/0b/d95e0ab2391dbf5356300e6fdefbe6d0ca21760d4e46e67fc34634b03602/mypy_boto3-1.34.105-py3-none-any.whl", hash = "sha256:5bb74a91e26e34c69d75f0d81467f101bc76eab864932bb35c68aacf0ecb6c7a", size = 21147 }, -] - -[[package]] -name = "mypy-boto3-cloudformation" -version = "1.34.111" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/59/c3/f48efbcc17fb03fb167993028889be0bfbb582720e3eaa719786c5c53085/mypy_boto3_cloudformation-1.34.111.tar.gz", hash = "sha256:a02e201d1a9d9a8fb4db5b942d5c537a4e8861c611f0d986126674ac557cb9e8", size = 57941 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/f3/c18601b0f21080c4b6183d924ae0052bce0d792ef97b1cbdccfb6d535313/mypy_boto3_cloudformation-1.34.111-py3-none-any.whl", hash = "sha256:526e928c504fa2880b1774aa10629a04fe0ec70ed2864ab3d3f7772386a1a925", size = 70105 }, -] - -[[package]] -name = "mypy-boto3-dynamodb" -version = "1.34.148" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/03/8b/79ce3b60d347b3c9a1571b587a36f7c0c042ece1d39377a07cefbf072a4d/mypy_boto3_dynamodb-1.34.148.tar.gz", hash = "sha256:c85489b92cbbbe4f6997070372022df914d4cb8eb707fdc73aa18ce6ba25c578", size = 50077 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/74/2a13f51997fd06b403e0109ded7976a451b76ef1569b8803b41c6b47139e/mypy_boto3_dynamodb-1.34.148-py3-none-any.whl", hash = "sha256:f1a7aabff5c6e926b9b272df87251c9d6dfceb4c1fb159fb5a2df52062cd7e87", size = 60366 }, -] - -[[package]] -name = "mypy-boto3-ec2" -version = "1.34.159" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/01/7f/3f1bb06b88168e6c27487e8b42d09d1093322cdfd47d1fbcfce206ef25f3/mypy_boto3_ec2-1.34.159.tar.gz", hash = "sha256:b9badb833dd01e2076c445b3b8609ec4842221620dc8f701dc146b8ceff05283", size = 408995 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/85/b27eeea58de39738e2954b455b95383c988974c6dda006da0e640087d431/mypy_boto3_ec2-1.34.159-py3-none-any.whl", hash = "sha256:d155c4295cd38750bf50adf9540951187f8f05800cd6e6b8fd2058ff0eeccfb4", size = 401497 }, -] - -[[package]] -name = "mypy-boto3-lambda" -version = "1.34.77" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/21/b3/49903a707edbe4f67ba5c4876288041e3841553c3c99b0172bc6682c18f5/mypy-boto3-lambda-1.34.77.tar.gz", hash = "sha256:7b81d2a5604fb592e92fe0b284ecd259de071703360a33b71c9b54df46d81c9c", size = 43125 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/f1/52da9a9148885ba3e3b9f0b49a2f6c638a1ef997397bdbf26e1547bb5cb6/mypy_boto3_lambda-1.34.77-py3-none-any.whl", hash = "sha256:e21022d2eef12aa731af80790410afdba9412b056339823252813bae2adbf553", size = 50206 }, -] - -[[package]] -name = "mypy-boto3-rds" -version = "1.34.152" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/37/c1/3e03c44ba84906be50786f324bc0488ee5bf36d60906e225611a7d3cfae6/mypy_boto3_rds-1.34.152.tar.gz", hash = "sha256:a3e25da87116e4b7ec4f1419a35fd3c7491f1cf631d9467cc835bc9c5c23fabe", size = 94343 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/52/78/ea038ad37f311c3e0ea9ddf82f9b209dbcbb7b4721a7c7667f550384da24/mypy_boto3_rds-1.34.152-py3-none-any.whl", hash = "sha256:71106812e6e6a89daa99f9b4534c580456336373e6dccb45b652e4d221c7beea", size = 101026 }, -] - -[[package]] -name = "mypy-boto3-s3" -version = "1.34.162" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/88/5a/432a718a7472b83191b3b58064472ca75f7b014eff0252985e45b2542463/mypy_boto3_s3-1.34.162.tar.gz", hash = "sha256:7e2fbda0fbd97a17a172a503bade7c4a2615d5ebf6fa532c274b8020bb3c6894", size = 75773 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/06/21/f4b6928de4ed0097ff0d69f114d9dff7a343e8a6ace3745866420d0bcaa4/mypy_boto3_s3-1.34.162-py3-none-any.whl", hash = "sha256:c7ab11369041a62c7d7f4c6dd1d3aab53470339df4b1e1da94df88914c25be29", size = 83945 }, -] - -[[package]] -name = "mypy-boto3-sqs" -version = "1.34.121" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/81/75/83be93078eb1e78f7f7d04e33c2c7e313c81420660d3fb7a28522487e07a/mypy_boto3_sqs-1.34.121.tar.gz", hash = "sha256:bdbc623235ffc8127cb8753f49323f74a919df552247b0b2caaf85cf9bb495b8", size = 22200 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/18/96a8c01ff1b9663810263a06f8b84bca8ecca18da75b23bedd9f01f1b1a6/mypy_boto3_sqs-1.34.121-py3-none-any.whl", hash = "sha256:e92aefacfa08e7094b79002576ef261e4075f5af9c25219fc47fb8452f53fc5f", size = 33040 }, -] - [[package]] name = "numpy" version = "2.2.3" @@ -1998,15 +1812,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7b/08/9c66c269b0d417a0af9fb969535f0371b8c538633535a7a6a5ca3f9231e2/psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab", size = 1163864 }, ] -[[package]] -name = "py-partiql-parser" -version = "0.6.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/a1/0a2867e48b232b4f82c4929ef7135f2a5d72c3886b957dccf63c70aa2fcb/py_partiql_parser-0.6.1.tar.gz", hash = "sha256:8583ff2a0e15560ef3bc3df109a7714d17f87d81d33e8c38b7fed4e58a63215d", size = 17120 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/97/84/0e410c20bbe9a504fc56e97908f13261c2b313d16cbb3b738556166f044a/py_partiql_parser-0.6.1-py2.py3-none-any.whl", hash = "sha256:ff6a48067bff23c37e9044021bf1d949c83e195490c17e020715e927fe5b2456", size = 23520 }, -] - [[package]] name = "pyarrow" version = "19.0.1" @@ -2042,15 +1847,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ed/bd/54907846383dcc7ee28772d7e646f6c34276a17da740002a5cefe90f04f7/pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:58d9397b2e273ef76264b45531e9d552d8ec8a6688b7390b5be44c02a37aade8", size = 42085744 }, ] -[[package]] -name = "pyarrow-hotfix" -version = "0.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/0a/71da7b0db0c7078d4cf34ecf0c70ded5ed29decc06612097474e0114f4cc/pyarrow_hotfix-0.6.tar.gz", hash = "sha256:79d3e030f7ff890d408a100ac16d6f00b14d44a502d7897cd9fc3e3a534e9945", size = 9754 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/f4/9ec2222f5f5f8ea04f66f184caafd991a39c8782e31f5b0266f101cb68ca/pyarrow_hotfix-0.6-py3-none-any.whl", hash = "sha256:dcc9ae2d220dff0083be6a9aa8e0cdee5182ad358d4931fce825c545e5c89178", size = 7888 }, -] - [[package]] name = "pyasn1" version = "0.6.1" @@ -2366,20 +2162,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/97/ec/889fbc557727da0c34a33850950310240f2040f3b1955175fdb2b36a8910/requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563", size = 27695 }, ] -[[package]] -name = "responses" -version = "0.25.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyyaml" }, - { name = "requests" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/97/63/759996eea0f17e8dc4c9ea9c60765292d28a7750bdbee073ad55d83caa57/responses-0.25.6.tar.gz", hash = "sha256:eae7ce61a9603004e76c05691e7c389e59652d91e94b419623c12bbfb8e331d8", size = 79145 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/92/c4/8d23584b3a3471ea6f5a18cfb035e11eeb9fa9b3112d901477c6ad10cc4e/responses-0.25.6-py3-none-any.whl", hash = "sha256:9cac8f21e1193bb150ec557875377e41ed56248aed94e4567ed644db564bacf1", size = 34730 }, -] - [[package]] name = "rich" version = "13.9.4" @@ -2481,28 +2263,16 @@ wheels = [ [[package]] name = "s3fs" -version = "2024.5.0" +version = "2024.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiobotocore" }, { name = "aiohttp" }, { name = "fsspec" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e6/37/bede54962c7946613f68c393f14731023c25ef5508c3c8cbe87dfc8c0fbb/s3fs-2024.5.0.tar.gz", hash = "sha256:b03471ae0d066b275b7dd0b0383cc5a93538ef40b2f6e730ce447bce849c1e32", size = 74910 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/48/da/8e924dcfe126e402f0c6d967368ceddab9519ee0292ea2312f212722cb1e/s3fs-2024.5.0-py3-none-any.whl", hash = "sha256:edccd9cc9f33a344a090e54b71e9f507e5b2d79369353b0e101237b20a720bc6", size = 29462 }, -] - -[[package]] -name = "s3transfer" -version = "0.10.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "botocore" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c0/0a/1cdbabf9edd0ea7747efdf6c9ab4e7061b085aa7f9bfc36bb1601563b069/s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7", size = 145287 } +sdist = { url = "https://files.pythonhosted.org/packages/96/88/e2fc4fc2a618126ac3cea9b16a4abc5a37dff2522067c9730b5d72d67ac3/s3fs-2024.12.0.tar.gz", hash = "sha256:1b0f3a8f5946cca5ba29871d6792ab1e4528ed762327d8aefafc81b73b99fd56", size = 76578 } wheels = [ - { url = "https://files.pythonhosted.org/packages/66/05/7957af15543b8c9799209506df4660cba7afc4cf94bfb60513827e96bed6/s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e", size = 83175 }, + { url = "https://files.pythonhosted.org/packages/f7/af/eaec1466887348d7f6cc9d3a668b30b62a4629fb187d0268146118ba3d5e/s3fs-2024.12.0-py3-none-any.whl", hash = "sha256:d8665549f9d1de083151582437a2f10d5f3b3227c1f8e67a2b0b730db813e005", size = 30196 }, ] [[package]] @@ -2721,24 +2491,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/cc/0a838ba5ca64dc832aa43f727bd586309846b0ffb2ce52422543e6075e8a/typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847", size = 44908 }, ] -[[package]] -name = "types-awscrt" -version = "0.23.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a3/53/7c69677327794fe91cc89a1362400b78f00b1a20364384da1e004c259d42/types_awscrt-0.23.10.tar.gz", hash = "sha256:965659260599b421564204b895467684104a2c0311bbacfd3c2423b8b0d3f3e9", size = 15455 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/89/ad/3d7c9a8b972048f3987355e3e48da56eb9f3ed8e151113c3c973b43ad91e/types_awscrt-0.23.10-py3-none-any.whl", hash = "sha256:7391bf502f6093221e68da8fb6a2af7ec67a98d376c58d5b76cc3938f449d121", size = 19426 }, -] - -[[package]] -name = "types-s3transfer" -version = "0.11.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/1a/6e812e46e6a446fa14a3ad2970948d96322a14b1306d50705cbad87b5ad5/types_s3transfer-0.11.3.tar.gz", hash = "sha256:48b90c1f9950dafd9816d0d1476e3323fbcd3202b74f2908b8a28c58e08e75b9", size = 14057 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/67/ee/19a69caff957a2e3df2d28652f58e5dfef9f7d0d6e0908848a0d0362f611/types_s3transfer-0.11.3-py3-none-any.whl", hash = "sha256:953491bc834b171d4a477130e80aecd87dc27c4ba71b9d339c178deaad3fd517", size = 19487 }, -] - [[package]] name = "typing-extensions" version = "4.12.2" @@ -2990,15 +2742,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2d/82/f56956041adef78f849db6b289b282e72b55ab8045a75abad81898c28d19/wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8", size = 23594 }, ] -[[package]] -name = "xmltodict" -version = "0.14.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/50/05/51dcca9a9bf5e1bce52582683ce50980bcadbc4fa5143b9f2b19ab99958f/xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553", size = 51942 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/45/fc303eb433e8a2a271739c98e953728422fa61a3c1f36077a49e395c972e/xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac", size = 9981 }, -] - [[package]] name = "xxhash" version = "3.5.0" diff --git a/pyproject.toml b/pyproject.toml index 502980269..4faf68693 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,9 +31,7 @@ dev = [ "loguru>=0.7.2", "lumigator-schemas", "lumigator-sdk", - "mlflow>=2.20.0", "mlflow>=2.20.3", - "mypy-boto3-s3>=1.36.0", "pytest>=8.3.4", "python-dotenv>=1.0.1", "ray>=2.41.0", @@ -46,3 +44,8 @@ members = ["notebooks"] [tool.uv.sources] lumigator-sdk = { path = "lumigator/sdk" } lumigator-schemas = { path = "lumigator/schemas" } + +[tool.pytest.ini_options] +markers = [ + "integration: marks tests as integration tests (deselect with '-m \"not integration\"')", +] diff --git a/uv.lock b/uv.lock index 2ef037577..cffdf7c5e 100644 --- a/uv.lock +++ b/uv.lock @@ -452,7 +452,6 @@ dev = [ { name = "lumigator-schemas" }, { name = "lumigator-sdk" }, { name = "mlflow" }, - { name = "mypy-boto3-s3" }, { name = "pytest" }, { name = "python-dotenv" }, { name = "ray" }, @@ -484,9 +483,7 @@ dev = [ { name = "loguru", specifier = ">=0.7.2" }, { name = "lumigator-schemas", directory = "lumigator/schemas" }, { name = "lumigator-sdk", directory = "lumigator/sdk" }, - { name = "mlflow", specifier = ">=2.20.0" }, { name = "mlflow", specifier = ">=2.20.3" }, - { name = "mypy-boto3-s3", specifier = ">=1.36.0" }, { name = "pytest", specifier = ">=8.3.4" }, { name = "python-dotenv", specifier = ">=1.0.1" }, { name = "ray", specifier = ">=2.41.0" }, @@ -1695,18 +1692,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/99/b7/b9e70fde2c0f0c9af4cc5277782a89b66d35948ea3369ec9f598358c3ac5/multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506", size = 10051 }, ] -[[package]] -name = "mypy-boto3-s3" -version = "1.36.21" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ab/14/dc3737dfd3d105d9a4f11065798f525deebd1bc8093b58c4ff2aff344aff/mypy_boto3_s3-1.36.21.tar.gz", hash = "sha256:9c6143c0dabfbd98e6c741e7cc65a33c7f87b8c28eeb373a2bc3e2c923af8283", size = 73643 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/9a/4740755604974b32247b00d82d8e5f7f88e3f39200dbacaf83c212ab13ef/mypy_boto3_s3-1.36.21-py3-none-any.whl", hash = "sha256:bfda17f51efafc2cdcefad7a13f5ac35bd721291476d8558c2d3a21758442be5", size = 80240 }, -] - [[package]] name = "myst-parser" version = "4.0.1"