From da97ae69cca6814693e651c833dbf39996057f14 Mon Sep 17 00:00:00 2001 From: Mario Buikhuizen Date: Thu, 21 Nov 2024 14:59:13 +0100 Subject: [PATCH] fix: nm.upload_pandas() fails with SSLError Revert "feat: propagate requests.Session with cert file (#139)" This reverts commit efdf843065602f70fb615038e759a2a524439361. --- nominal/core/_clientsbunch.py | 14 ++------------ nominal/core/_multipart.py | 10 ++++------ nominal/core/client.py | 30 +++++------------------------- nominal/core/dataset.py | 11 +++-------- 4 files changed, 14 insertions(+), 51 deletions(-) diff --git a/nominal/core/_clientsbunch.py b/nominal/core/_clientsbunch.py index b6442c55..d6efeb1c 100644 --- a/nominal/core/_clientsbunch.py +++ b/nominal/core/_clientsbunch.py @@ -4,7 +4,6 @@ from functools import partial from typing import Protocol -import requests from conjure_python_client import RequestsClient, ServiceConfiguration from typing_extensions import Self @@ -33,8 +32,6 @@ @dataclass(frozen=True) class ClientsBunch: auth_header: str - requests_session: requests.Session - """The session should be used for requests to utilize the same cert as conjure calls.""" assets: scout_assets.AssetService attachment: attachments_api.AttachmentService @@ -59,13 +56,11 @@ class ClientsBunch: notebook: scout.NotebookService @classmethod - def from_config(cls, cfg: ServiceConfiguration, agent: str, token: str, trust_store_path: str) -> Self: + def from_config(cls, cfg: ServiceConfiguration, agent: str, token: str) -> Self: client_factory = partial(RequestsClient.create, user_agent=agent, service_config=cfg) - requests_session = requests.Session() - requests_session.cert = trust_store_path + return cls( auth_header=f"Bearer {token}", - requests_session=requests_session, assets=client_factory(scout_assets.AssetService), attachment=client_factory(attachments_api.AttachmentService), authentication=client_factory(authentication_api.AuthenticationServiceV2), @@ -93,8 +88,3 @@ def from_config(cls, cfg: ServiceConfiguration, agent: str, token: str, trust_st class HasAuthHeader(Protocol): @property def auth_header(self) -> str: ... - - -class HasRequestsSession(Protocol): - @property - def requests_session(self) -> requests.Session: ... diff --git a/nominal/core/_multipart.py b/nominal/core/_multipart.py index d65bb9c1..77358c44 100644 --- a/nominal/core/_multipart.py +++ b/nominal/core/_multipart.py @@ -15,9 +15,8 @@ def _sign_and_upload_part_job( - auth_header: str, upload_client: upload_api.UploadService, - session: requests.Session, + auth_header: str, key: str, upload_id: str, q: Queue[bytes], @@ -30,7 +29,7 @@ def _sign_and_upload_part_job( "successfully signed multipart upload part", extra={"key": key, "part": part, "upload_id": upload_id, "response.url": response.url}, ) - put_response = session.put(response.url, data=data, headers=response.headers) + put_response = requests.put(response.url, data=data, headers=response.headers) logger.debug( "put multipart upload part", extra={"url": response.url, "size": len(data), "status_code": put_response.status_code}, @@ -51,11 +50,10 @@ def _iter_chunks(f: BinaryIO, chunk_size: int) -> Iterable[bytes]: def put_multipart_upload( auth_header: str, - upload_client: upload_api.UploadService, - session: requests.Session, f: BinaryIO, filename: str, mimetype: str, + upload_client: upload_api.UploadService, chunk_size: int = 64_000_000, max_workers: int = 8, ) -> str: @@ -80,7 +78,7 @@ def put_multipart_upload( initiate_request = ingest_api.InitiateMultipartUploadRequest(filename=filename, filetype=mimetype) initiate_response = upload_client.initiate_multipart_upload(auth_header, initiate_request) key, upload_id = initiate_response.key, initiate_response.upload_id - _sign_and_upload_part = partial(_sign_and_upload_part_job, auth_header, upload_client, session, key, upload_id, q) + _sign_and_upload_part = partial(_sign_and_upload_part_job, upload_client, auth_header, key, upload_id, q) jobs: list[concurrent.futures.Future[requests.Response]] = [] diff --git a/nominal/core/client.py b/nominal/core/client.py index 2295a499..2cef7b0a 100644 --- a/nominal/core/client.py +++ b/nominal/core/client.py @@ -83,7 +83,7 @@ def create( connect_timeout=connect_timeout, ) agent = construct_user_agent_string() - return cls(_clients=ClientsBunch.from_config(cfg, agent, token, trust_store_path)) + return cls(_clients=ClientsBunch.from_config(cfg, agent, token)) def get_user(self) -> User: """Retrieve the user associated with this client.""" @@ -234,12 +234,7 @@ def create_dataset_from_io( filename = f"{urlsafe_name}{file_type.extension}" s3_path = put_multipart_upload( - self._clients.auth_header, - self._clients.upload, - self._clients.requests_session, - dataset, - filename, - file_type.mimetype, + self._clients.auth_header, dataset, filename, file_type.mimetype, self._clients.upload ) request = ingest_api.TriggerFileIngest( destination=ingest_api.IngestDestination( @@ -285,12 +280,7 @@ def create_video_from_io( filename = f"{urlsafe_name}{file_type.extension}" s3_path = put_multipart_upload( - self._clients.auth_header, - self._clients.upload, - self._clients.requests_session, - video, - filename, - file_type.mimetype, + self._clients.auth_header, video, filename, file_type.mimetype, self._clients.upload ) request = ingest_api.IngestVideoRequest( labels=list(labels), @@ -437,12 +427,7 @@ def create_attachment_from_io( filename = f"{urlsafe_name}{file_type.extension}" s3_path = put_multipart_upload( - self._clients.auth_header, - self._clients.upload, - self._clients.requests_session, - attachment, - filename, - file_type.mimetype, + self._clients.auth_header, attachment, filename, file_type.mimetype, self._clients.upload ) request = attachments_api.CreateAttachmentRequest( description=description or "", @@ -565,12 +550,7 @@ def create_video_from_mcap_io( filename = f"{urlsafe_name}{file_type.extension}" s3_path = put_multipart_upload( - self._clients.auth_header, - self._clients.upload, - self._clients.requests_session, - mcap, - filename, - file_type.mimetype, + self._clients.auth_header, mcap, filename, file_type.mimetype, self._clients.upload ) request = ingest_api.IngestMcapRequest( channel_config=[ diff --git a/nominal/core/dataset.py b/nominal/core/dataset.py index d1a3c77f..ca1879e3 100644 --- a/nominal/core/dataset.py +++ b/nominal/core/dataset.py @@ -25,7 +25,7 @@ upload_api, ) from nominal._utils import FileType, FileTypes -from nominal.core._clientsbunch import HasAuthHeader, HasRequestsSession +from nominal.core._clientsbunch import HasAuthHeader from nominal.core._conjure_utils import _available_units, _build_unit_update from nominal.core._multipart import put_multipart_upload from nominal.core._utils import HasRid, update_dataclass @@ -66,7 +66,7 @@ class Dataset(HasRid): bounds: DatasetBounds | None _clients: _Clients = field(repr=False) - class _Clients(Channel._Clients, HasAuthHeader, HasRequestsSession, Protocol): + class _Clients(Channel._Clients, HasAuthHeader, Protocol): @property def catalog(self) -> scout_catalog.CatalogService: ... @property @@ -187,12 +187,7 @@ def add_to_dataset_from_io( urlsafe_name = urllib.parse.quote_plus(self.name) filename = f"{urlsafe_name}{file_type.extension}" s3_path = put_multipart_upload( - self._clients.auth_header, - self._clients.upload, - self._clients.requests_session, - dataset, - filename, - file_type.mimetype, + self._clients.auth_header, dataset, filename, file_type.mimetype, self._clients.upload ) request = ingest_api.TriggerFileIngest( destination=ingest_api.IngestDestination(