Skip to content

Commit 788d076

Browse files
authored
Actually publish the sync config retrieval API (#54)
Part of #33
1 parent 05c7157 commit 788d076

File tree

5 files changed

+9
-6
lines changed

5 files changed

+9
-6
lines changed

src/lmstudio/sync_api.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1339,9 +1339,8 @@ def get_info(self) -> ModelInstanceInfo:
13391339
"""Get the model info for this model."""
13401340
return self._session.get_model_info(self.identifier)
13411341

1342-
# Private until this API can emit the client config types
13431342
@sdk_public_api()
1344-
def _get_load_config(self) -> AnyLoadConfig:
1343+
def get_load_config(self) -> AnyLoadConfig:
13451344
"""Get the model load config for this model."""
13461345
return self._session._get_load_config(self.identifier)
13471346

tests/async/test_embedding_async.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,8 @@ async def test_context_length_async(model_id: str, caplog: LogCap) -> None:
111111
async def test_get_load_config_async(model_id: str, caplog: LogCap) -> None:
112112
caplog.set_level(logging.DEBUG)
113113
async with AsyncClient() as client:
114-
response = await client.embedding._get_load_config(model_id)
114+
model = await client.embedding.model(model_id)
115+
response = await model.get_load_config()
115116
logging.info(f"Load config response: {response}")
116117
assert response
117118
assert isinstance(response, EmbeddingLoadModelConfig)

tests/async/test_llm_async.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,8 @@ async def test_context_length_async(model_id: str, caplog: LogCap) -> None:
9393
async def test_get_load_config_async(model_id: str, caplog: LogCap) -> None:
9494
caplog.set_level(logging.DEBUG)
9595
async with AsyncClient() as client:
96-
response = await client.llm._get_load_config(model_id)
96+
model = await client.llm.model(model_id)
97+
response = await model.get_load_config()
9798
logging.info(f"Load config response: {response}")
9899
assert response
99100
assert isinstance(response, LlmLoadModelConfig)

tests/sync/test_embedding_sync.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,8 @@ def test_context_length_sync(model_id: str, caplog: LogCap) -> None:
112112
def test_get_load_config_sync(model_id: str, caplog: LogCap) -> None:
113113
caplog.set_level(logging.DEBUG)
114114
with Client() as client:
115-
response = client.embedding._get_load_config(model_id)
115+
model = client.embedding.model(model_id)
116+
response = model.get_load_config()
116117
logging.info(f"Load config response: {response}")
117118
assert response
118119
assert isinstance(response, EmbeddingLoadModelConfig)

tests/sync/test_llm_sync.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,8 @@ def test_context_length_sync(model_id: str, caplog: LogCap) -> None:
9393
def test_get_load_config_sync(model_id: str, caplog: LogCap) -> None:
9494
caplog.set_level(logging.DEBUG)
9595
with Client() as client:
96-
response = client.llm._get_load_config(model_id)
96+
model = client.llm.model(model_id)
97+
response = model.get_load_config()
9798
logging.info(f"Load config response: {response}")
9899
assert response
99100
assert isinstance(response, LlmLoadModelConfig)

0 commit comments

Comments
 (0)