Skip to content

Commit a4c74a5

Browse files
authored
Make file preparation API public (#37)
1 parent 5aeb367 commit a4c74a5

File tree

7 files changed

+50
-53
lines changed

7 files changed

+50
-53
lines changed

src/lmstudio/async_api.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
Chat,
3636
ChatHistoryDataDict,
3737
FileHandle,
38-
_FileCacheInputType,
38+
LocalFileInput,
3939
_LocalFileData,
4040
)
4141
from .json_api import (
@@ -590,8 +590,8 @@ async def _fetch_file_handle(self, file_data: _LocalFileData) -> FileHandle:
590590
return load_struct(handle, FileHandle)
591591

592592
@sdk_public_api_async()
593-
async def _add_temp_file(
594-
self, src: _FileCacheInputType, name: str | None = None
593+
async def prepare_file(
594+
self, src: LocalFileInput, name: str | None = None
595595
) -> FileHandle:
596596
"""Add a file to the server."""
597597
# Private until LM Studio file handle support stabilizes
@@ -1502,12 +1502,12 @@ def repository(self) -> AsyncSessionRepository:
15021502

15031503
# Convenience methods
15041504
@sdk_public_api_async()
1505-
async def _add_temp_file(
1506-
self, src: _FileCacheInputType, name: str | None = None
1505+
async def prepare_file(
1506+
self, src: LocalFileInput, name: str | None = None
15071507
) -> FileHandle:
15081508
"""Add a file to the server."""
15091509
# Private until LM Studio file handle support stabilizes
1510-
return await self._files._add_temp_file(src, name)
1510+
return await self._files.prepare_file(src, name)
15111511

15121512
@sdk_public_api_async()
15131513
async def list_downloaded_models(

src/lmstudio/history.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -370,10 +370,8 @@ def add_user_message(
370370
self,
371371
content: UserMessageInput | Iterable[UserMessageInput],
372372
*,
373+
files: Sequence[FileHandleInput] = (),
373374
images: Sequence[FileHandleInput] = (),
374-
# Mark file parameters as private until LM Studio
375-
# file handle support stabilizes
376-
_files: Sequence[FileHandleInput] = (),
377375
) -> UserMessage:
378376
"""Add a new user message to the chat history."""
379377
# Accept both singular and multi-part user messages
@@ -383,10 +381,10 @@ def add_user_message(
383381
else:
384382
content_items = list(content)
385383
# Convert given local file information to file handles
384+
if files:
385+
content_items.extend(files)
386386
if images:
387387
content_items.extend(images)
388-
if _files:
389-
content_items.extend(_files)
390388
# Consecutive messages with the same role are not supported,
391389
# but multi-part user messages are valid (to allow for file
392390
# attachments), so just merge them
@@ -519,14 +517,16 @@ def add_tool_result(self, result: ToolCallResultInput) -> ToolResultMessage:
519517
return message
520518

521519

520+
LocalFileInput = BinaryIO | bytes | str | os.PathLike[str]
521+
522+
522523
# Private until file handle caching support is part of the published SDK API
523-
_FileCacheInputType = BinaryIO | bytes | str | os.PathLike[str]
524524

525525

526-
def _get_file_details(src: _FileCacheInputType) -> Tuple[str, bytes]:
526+
def _get_file_details(src: LocalFileInput) -> Tuple[str, bytes]:
527527
"""Read file contents as binary data and generate a suitable default name."""
528528
if isinstance(src, bytes):
529-
# We interpreter bytes as raw data, not a bytes filesystem path
529+
# We process bytes as raw data, not a bytes filesystem path
530530
data = src
531531
name = str(uuid.uuid4())
532532
elif hasattr(src, "read"):
@@ -555,14 +555,13 @@ def _get_file_details(src: _FileCacheInputType) -> Tuple[str, bytes]:
555555
_FileHandleCacheKey: TypeAlias = tuple[str, _ContentHash]
556556

557557

558-
# Private until file handle caching support is part of the published SDK API
559558
class _LocalFileData:
560559
"""Local file data to be added to a chat history."""
561560

562561
name: str
563562
raw_data: bytes
564563

565-
def __init__(self, src: _FileCacheInputType, name: str | None = None) -> None:
564+
def __init__(self, src: LocalFileInput, name: str | None = None) -> None:
566565
default_name, raw_data = _get_file_details(src)
567566
self.name = name or default_name
568567
self.raw_data = raw_data
@@ -594,7 +593,7 @@ def __init__(self) -> None:
594593

595594
@sdk_public_api()
596595
def _get_file_handle(
597-
self, src: _FileCacheInputType, name: str | None = None
596+
self, src: LocalFileInput, name: str | None = None
598597
) -> FileHandle:
599598
file_data = _LocalFileData(src, name)
600599
cache_key = file_data._get_cache_key()

src/lmstudio/sync_api.py

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@
5353
Chat,
5454
ChatHistoryDataDict,
5555
FileHandle,
56-
_FileCacheInputType,
56+
LocalFileInput,
5757
_LocalFileData,
5858
ToolCallRequest,
5959
)
@@ -765,9 +765,7 @@ def _fetch_file_handle(self, file_data: _LocalFileData) -> FileHandle:
765765
return load_struct(handle, FileHandle)
766766

767767
@sdk_public_api()
768-
def _add_temp_file(
769-
self, src: _FileCacheInputType, name: str | None = None
770-
) -> FileHandle:
768+
def prepare_file(self, src: LocalFileInput, name: str | None = None) -> FileHandle:
771769
"""Add a file to the server."""
772770
# Private until LM Studio file handle support stabilizes
773771
file_data = _LocalFileData(src, name)
@@ -1820,12 +1818,10 @@ def repository(self) -> SyncSessionRepository:
18201818

18211819
# Convenience methods
18221820
@sdk_public_api()
1823-
def _add_temp_file(
1824-
self, src: _FileCacheInputType, name: str | None = None
1825-
) -> FileHandle:
1821+
def prepare_file(self, src: LocalFileInput, name: str | None = None) -> FileHandle:
18261822
"""Add a file to the server."""
18271823
# Private until LM Studio file handle support stabilizes
1828-
return self._files._add_temp_file(src, name)
1824+
return self._files.prepare_file(src, name)
18291825

18301826
@sdk_public_api()
18311827
def list_downloaded_models(
@@ -1895,10 +1891,10 @@ def embedding_model(
18951891

18961892

18971893
@sdk_public_api()
1898-
def _add_temp_file(src: _FileCacheInputType, name: str | None = None) -> FileHandle:
1894+
def prepare_file(src: LocalFileInput, name: str | None = None) -> FileHandle:
18991895
"""Add a file to the server using the default global client."""
19001896
# Private until LM Studio file handle support stabilizes
1901-
return get_default_client()._add_temp_file(src, name)
1897+
return get_default_client().prepare_file(src, name)
19021898

19031899

19041900
@sdk_public_api()

tests/async/test_images_async.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ async def test_upload_from_pathlike_async(caplog: LogCap) -> None:
2424
caplog.set_level(logging.DEBUG)
2525
async with AsyncClient() as client:
2626
session = client._files
27-
file = await session._add_temp_file(IMAGE_FILEPATH)
27+
file = await session.prepare_file(IMAGE_FILEPATH)
2828
assert file
2929
assert isinstance(file, FileHandle)
3030
logging.info(f"Uploaded file: {file}")
@@ -37,7 +37,7 @@ async def test_upload_from_file_obj_async(caplog: LogCap) -> None:
3737
async with AsyncClient() as client:
3838
session = client._files
3939
with open(IMAGE_FILEPATH, "rb") as f:
40-
file = await session._add_temp_file(f)
40+
file = await session.prepare_file(f)
4141
assert file
4242
assert isinstance(file, FileHandle)
4343
logging.info(f"Uploaded file: {file}")
@@ -50,7 +50,7 @@ async def test_upload_from_bytesio_async(caplog: LogCap) -> None:
5050
async with AsyncClient() as client:
5151
session = client._files
5252
with open(IMAGE_FILEPATH, "rb") as f:
53-
file = await session._add_temp_file(BytesIO(f.read()))
53+
file = await session.prepare_file(BytesIO(f.read()))
5454
assert file
5555
assert isinstance(file, FileHandle)
5656
logging.info(f"Uploaded file: {file}")
@@ -64,7 +64,7 @@ async def test_vlm_predict_async(caplog: LogCap) -> None:
6464
caplog.set_level(logging.DEBUG)
6565
model_id = EXPECTED_VLM_ID
6666
async with AsyncClient() as client:
67-
file_handle = await client._files._add_temp_file(IMAGE_FILEPATH)
67+
file_handle = await client._files.prepare_file(IMAGE_FILEPATH)
6868
history = Chat()
6969
history.add_user_message((prompt, file_handle))
7070
vlm = await client.llm.model(model_id)
@@ -84,7 +84,7 @@ async def test_non_vlm_predict_async(caplog: LogCap) -> None:
8484
caplog.set_level(logging.DEBUG)
8585
model_id = "hugging-quants/llama-3.2-1b-instruct"
8686
async with AsyncClient() as client:
87-
file_handle = await client._files._add_temp_file(IMAGE_FILEPATH)
87+
file_handle = await client._files.prepare_file(IMAGE_FILEPATH)
8888
history = Chat()
8989
history.add_user_message((prompt, file_handle))
9090
llm = await client.llm.model(model_id)
@@ -101,7 +101,7 @@ async def test_vlm_predict_image_param_async(caplog: LogCap) -> None:
101101
caplog.set_level(logging.DEBUG)
102102
model_id = EXPECTED_VLM_ID
103103
async with AsyncClient() as client:
104-
file_handle = await client._files._add_temp_file(IMAGE_FILEPATH)
104+
file_handle = await client._files.prepare_file(IMAGE_FILEPATH)
105105
history = Chat()
106106
history.add_user_message(prompt, images=[file_handle])
107107
vlm = await client.llm.model(model_id)
@@ -121,7 +121,7 @@ async def test_non_vlm_predict_image_param_async(caplog: LogCap) -> None:
121121
caplog.set_level(logging.DEBUG)
122122
model_id = "hugging-quants/llama-3.2-1b-instruct"
123123
async with AsyncClient() as client:
124-
file_handle = await client._files._add_temp_file(IMAGE_FILEPATH)
124+
file_handle = await client._files.prepare_file(IMAGE_FILEPATH)
125125
history = Chat()
126126
history.add_user_message(prompt, images=[file_handle])
127127
llm = await client.llm.model(model_id)

tests/sync/test_images_sync.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ def test_upload_from_pathlike_sync(caplog: LogCap) -> None:
3030
caplog.set_level(logging.DEBUG)
3131
with Client() as client:
3232
session = client._files
33-
file = session._add_temp_file(IMAGE_FILEPATH)
33+
file = session.prepare_file(IMAGE_FILEPATH)
3434
assert file
3535
assert isinstance(file, FileHandle)
3636
logging.info(f"Uploaded file: {file}")
@@ -42,7 +42,7 @@ def test_upload_from_file_obj_sync(caplog: LogCap) -> None:
4242
with Client() as client:
4343
session = client._files
4444
with open(IMAGE_FILEPATH, "rb") as f:
45-
file = session._add_temp_file(f)
45+
file = session.prepare_file(f)
4646
assert file
4747
assert isinstance(file, FileHandle)
4848
logging.info(f"Uploaded file: {file}")
@@ -54,7 +54,7 @@ def test_upload_from_bytesio_sync(caplog: LogCap) -> None:
5454
with Client() as client:
5555
session = client._files
5656
with open(IMAGE_FILEPATH, "rb") as f:
57-
file = session._add_temp_file(BytesIO(f.read()))
57+
file = session.prepare_file(BytesIO(f.read()))
5858
assert file
5959
assert isinstance(file, FileHandle)
6060
logging.info(f"Uploaded file: {file}")
@@ -67,7 +67,7 @@ def test_vlm_predict_sync(caplog: LogCap) -> None:
6767
caplog.set_level(logging.DEBUG)
6868
model_id = EXPECTED_VLM_ID
6969
with Client() as client:
70-
file_handle = client._files._add_temp_file(IMAGE_FILEPATH)
70+
file_handle = client._files.prepare_file(IMAGE_FILEPATH)
7171
history = Chat()
7272
history.add_user_message((prompt, file_handle))
7373
vlm = client.llm.model(model_id)
@@ -86,7 +86,7 @@ def test_non_vlm_predict_sync(caplog: LogCap) -> None:
8686
caplog.set_level(logging.DEBUG)
8787
model_id = "hugging-quants/llama-3.2-1b-instruct"
8888
with Client() as client:
89-
file_handle = client._files._add_temp_file(IMAGE_FILEPATH)
89+
file_handle = client._files.prepare_file(IMAGE_FILEPATH)
9090
history = Chat()
9191
history.add_user_message((prompt, file_handle))
9292
llm = client.llm.model(model_id)
@@ -102,7 +102,7 @@ def test_vlm_predict_image_param_sync(caplog: LogCap) -> None:
102102
caplog.set_level(logging.DEBUG)
103103
model_id = EXPECTED_VLM_ID
104104
with Client() as client:
105-
file_handle = client._files._add_temp_file(IMAGE_FILEPATH)
105+
file_handle = client._files.prepare_file(IMAGE_FILEPATH)
106106
history = Chat()
107107
history.add_user_message(prompt, images=[file_handle])
108108
vlm = client.llm.model(model_id)
@@ -121,7 +121,7 @@ def test_non_vlm_predict_image_param_sync(caplog: LogCap) -> None:
121121
caplog.set_level(logging.DEBUG)
122122
model_id = "hugging-quants/llama-3.2-1b-instruct"
123123
with Client() as client:
124-
file_handle = client._files._add_temp_file(IMAGE_FILEPATH)
124+
file_handle = client._files.prepare_file(IMAGE_FILEPATH)
125125
history = Chat()
126126
history.add_user_message(prompt, images=[file_handle])
127127
llm = client.llm.model(model_id)

tests/test_convenience_api.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,11 +47,11 @@ def test_embedding_specific() -> None:
4747

4848

4949
@pytest.mark.lmstudio
50-
def test_add_temp_file() -> None:
50+
def test_prepare_file() -> None:
5151
# API is private until LM Studio file handle support stabilizes
5252
name = "example-file"
5353
raw_data = b"raw data"
54-
file_handle = lms.sync_api._add_temp_file(raw_data, name)
54+
file_handle = lms.sync_api.prepare_file(raw_data, name)
5555
assert file_handle.name == name
5656
assert file_handle.size_bytes == len(raw_data)
5757

tests/test_history.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
AnyChatMessageDict,
1616
ChatHistoryData,
1717
ChatHistoryDataDict,
18-
_FileCacheInputType,
18+
LocalFileInput,
1919
FileHandle,
2020
_FileHandleCache,
2121
FileHandleDict,
@@ -492,7 +492,7 @@ def _make_local_file_cache() -> tuple[_FileHandleCache, list[FileHandle], int]:
492492
# * files with different names are looked up under both names
493493
cache = _FileHandleCache()
494494
num_unique_files = 3
495-
files_to_cache: list[tuple[_FileCacheInputType, str | None]] = [
495+
files_to_cache: list[tuple[LocalFileInput, str | None]] = [
496496
(b"raw binary data", "raw-binary.txt"),
497497
(b"raw binary data", "raw-binary.txt"),
498498
(IMAGE_FILEPATH, None),
@@ -589,20 +589,22 @@ def test_invalid_local_file() -> None:
589589
"text": "What do you make of this?",
590590
"type": "text",
591591
},
592-
{
593-
"fileType": "image",
594-
"identifier": "some-image",
595-
"name": "lemmy.png",
596-
"sizeBytes": 41812,
597-
"type": "file",
598-
},
592+
# Implementation attaches the prepared file handles
593+
# before it attaches the prepared image handles
599594
{
600595
"fileType": "text/plain",
601596
"identifier": "some-file",
602597
"name": "someFile.txt",
603598
"sizeBytes": 100,
604599
"type": "file",
605600
},
601+
{
602+
"fileType": "image",
603+
"identifier": "some-image",
604+
"name": "lemmy.png",
605+
"sizeBytes": 41812,
606+
"type": "file",
607+
},
606608
],
607609
"role": "user",
608610
},
@@ -621,7 +623,7 @@ def test_user_message_attachments() -> None:
621623
chat.add_user_message(
622624
"What do you make of this?",
623625
images=[INPUT_IMAGE_HANDLE],
624-
_files=[INPUT_FILE_HANDLE],
626+
files=[INPUT_FILE_HANDLE],
625627
)
626628
history = chat._get_history()
627629
assert history["messages"] == EXPECTED_USER_ATTACHMENT_MESSAGES

0 commit comments

Comments
 (0)