Skip to content

Commit 760331a

Browse files
authored
Update import convention in examples (#16)
1 parent 4a35383 commit 760331a

8 files changed

+37
-37
lines changed

examples/chatbot.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,10 @@
33

44
import readline # Enables input line editing
55

6-
import lmstudio as lm
6+
import lmstudio as lms
77

8-
model = lm.llm()
9-
chat = lm.Chat("You are a task focused AI assistant")
8+
model = lms.llm()
9+
chat = lms.Chat("You are a task focused AI assistant")
1010

1111
while True:
1212
try:

examples/structured-response.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,15 +3,15 @@
33

44
import json
55

6-
import lmstudio as lm
6+
import lmstudio as lms
77

8-
class BookSchema(lm.BaseModel):
8+
class BookSchema(lms.BaseModel):
99
"""Structured information about a published book."""
1010
title: str
1111
author: str
1212
year: int
1313

14-
model = lm.llm()
14+
model = lms.llm()
1515

1616
result = model.respond("Tell me about The Hobbit", response_format=BookSchema)
1717
book = result.parsed

examples/terminal-sim.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@
33

44
import readline # Enables input line editing
55

6-
import lmstudio as lm
6+
import lmstudio as lms
77

8-
model = lm.llm()
8+
model = lms.llm()
99
console_history = []
1010

1111
while True:

examples/tool-use-multiple.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"""Example script demonstrating agent use of multiple tools."""
33

44
import math
5-
import lmstudio as lm
5+
import lmstudio as lms
66

77
def add(a: int, b: int) -> int:
88
"""Given two numbers a and b, returns the sum of them."""
@@ -18,7 +18,7 @@ def is_prime(n: int) -> bool:
1818
return False
1919
return True
2020

21-
model = lm.llm("qwen2.5-7b-instruct")
21+
model = lms.llm("qwen2.5-7b-instruct")
2222
model.act(
2323
"Is the result of 12345 + 45668 a prime? Think step by step.",
2424
[add, is_prime],

examples/tool-use.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
#!/usr/bin/env python
22
"""Example script demonstrating agent tool use."""
33

4-
import lmstudio as lm
4+
import lmstudio as lms
55

66
def multiply(a: float, b: float) -> float:
77
"""Given two numbers a and b. Returns the product of them."""
88
return a * b
99

10-
model = lm.llm("qwen2.5-7b-instruct")
10+
model = lms.llm("qwen2.5-7b-instruct")
1111
model.act(
1212
"What is the result of 12345 multiplied by 54321?",
1313
[multiply],

tests/load_models.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from contextlib import contextmanager
55
from typing import Generator
66

7-
import lmstudio as lm
7+
import lmstudio as lms
88

99
from .support import (
1010
EXPECTED_EMBEDDING_ID,
@@ -23,27 +23,27 @@
2323
def print_load_result(model_identifier: str) -> Generator[None, None, None]:
2424
try:
2525
yield
26-
except lm.LMStudioModelNotFoundError:
26+
except lms.LMStudioModelNotFoundError:
2727
print(f"Load error: {model_identifier!r} is not yet downloaded")
2828
else:
2929
print(f"Loaded: {model_identifier!r}")
3030

3131

32-
async def _load_llm(client: lm.AsyncClient, model_identifier: str) -> None:
32+
async def _load_llm(client: lms.AsyncClient, model_identifier: str) -> None:
3333
with print_load_result(model_identifier):
3434
await client.llm.load_new_instance(
3535
model_identifier, config=LLM_LOAD_CONFIG, ttl=None
3636
)
3737

3838

39-
async def _load_embedding_model(client: lm.AsyncClient, model_identifier: str) -> None:
39+
async def _load_embedding_model(client: lms.AsyncClient, model_identifier: str) -> None:
4040
with print_load_result(model_identifier):
4141
await client.embedding.load_new_instance(model_identifier, ttl=None)
4242

4343

4444
async def reload_models() -> None:
4545
await unload_models()
46-
async with lm.AsyncClient() as client:
46+
async with lms.AsyncClient() as client:
4747
await asyncio.gather(
4848
_load_llm(client, EXPECTED_LLM_ID),
4949
_load_llm(client, EXPECTED_VLM_ID),

tests/test_convenience_api.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
# Note: before testing additional functionality (such as passing configs),
44
# this should be migrated to mock-style testing rather than end-to-end
55

6-
import lmstudio as lm
6+
import lmstudio as lms
77

88
import pytest
99

@@ -17,32 +17,32 @@
1717

1818
@pytest.mark.lmstudio
1919
def test_get_default_client() -> None:
20-
client = lm.get_default_client()
21-
assert isinstance(client, lm.Client)
20+
client = lms.get_default_client()
21+
assert isinstance(client, lms.Client)
2222

2323

2424
@pytest.mark.lmstudio
2525
def test_llm_any() -> None:
26-
model = lm.llm()
26+
model = lms.llm()
2727
assert model.identifier in (EXPECTED_LLM_ID, EXPECTED_VLM_ID, TOOL_LLM_ID)
2828

2929

3030
@pytest.mark.lmstudio
3131
@pytest.mark.parametrize("model_id", (EXPECTED_LLM_ID, EXPECTED_VLM_ID, TOOL_LLM_ID))
3232
def test_llm_specific(model_id: str) -> None:
33-
model = lm.llm(model_id)
33+
model = lms.llm(model_id)
3434
assert model.identifier == model_id
3535

3636

3737
@pytest.mark.lmstudio
3838
def test_embedding_any() -> None:
39-
model = lm.embedding_model()
39+
model = lms.embedding_model()
4040
assert model.identifier == EXPECTED_EMBEDDING_ID
4141

4242

4343
@pytest.mark.lmstudio
4444
def test_embedding_specific() -> None:
45-
model = lm.embedding_model(EXPECTED_EMBEDDING_ID)
45+
model = lms.embedding_model(EXPECTED_EMBEDDING_ID)
4646
assert model.identifier == EXPECTED_EMBEDDING_ID
4747

4848

@@ -51,34 +51,34 @@ def test_add_temp_file() -> None:
5151
# API is private until LM Studio file handle support stabilizes
5252
name = "example-file"
5353
raw_data = b"raw data"
54-
file_handle = lm.sync_api._add_temp_file(raw_data, name)
54+
file_handle = lms.sync_api._add_temp_file(raw_data, name)
5555
assert file_handle.name == name
5656
assert file_handle.size_bytes == len(raw_data)
5757

5858

5959
@pytest.mark.lmstudio
6060
def test_list_downloaded_models() -> None:
61-
all_models = [m.model_key for m in lm.list_downloaded_models()]
62-
embedding_models = [m.model_key for m in lm.list_downloaded_models("embedding")]
63-
llms = [m.model_key for m in lm.list_downloaded_models("llm")]
61+
all_models = [m.model_key for m in lms.list_downloaded_models()]
62+
embedding_models = [m.model_key for m in lms.list_downloaded_models("embedding")]
63+
llms = [m.model_key for m in lms.list_downloaded_models("llm")]
6464
assert set(all_models) == (set(embedding_models) | set(llms))
6565

6666

6767
@pytest.mark.lmstudio
6868
def test_list_loaded_models() -> None:
69-
all_models = [m.identifier for m in lm.list_loaded_models()]
70-
embedding_models = [m.identifier for m in lm.list_loaded_models("embedding")]
71-
llms = [m.identifier for m in lm.list_loaded_models("llm")]
69+
all_models = [m.identifier for m in lms.list_loaded_models()]
70+
embedding_models = [m.identifier for m in lms.list_loaded_models("embedding")]
71+
llms = [m.identifier for m in lms.list_loaded_models("llm")]
7272
assert set(all_models) == (set(embedding_models) | set(llms))
7373

7474

7575
@pytest.mark.lmstudio
7676
def test_list_loaded_embedding_models() -> None:
77-
models = [m.identifier for m in lm.list_loaded_models("embedding")]
77+
models = [m.identifier for m in lms.list_loaded_models("embedding")]
7878
assert not (set((EXPECTED_EMBEDDING_ID,)) - set(models))
7979

8080

8181
@pytest.mark.lmstudio
8282
def test_list_loaded_LLMs() -> None:
83-
models = [m.identifier for m in lm.list_loaded_models("llm")]
83+
models = [m.identifier for m in lms.list_loaded_models("llm")]
8484
assert not (set((EXPECTED_LLM_ID, EXPECTED_VLM_ID, TOOL_LLM_ID)) - set(models))

tests/unload_models.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""Unload the models required by the test suite."""
22

33
import asyncio
4-
import lmstudio as lm
4+
import lmstudio as lms
55

66
from .support import (
77
EXPECTED_EMBEDDING_ID,
@@ -10,20 +10,20 @@
1010
TOOL_LLM_ID,
1111
)
1212

13-
AsyncSessionModel = lm.async_api.AsyncSessionEmbedding | lm.async_api.AsyncSessionLlm
13+
AsyncSessionModel = lms.async_api.AsyncSessionEmbedding | lms.async_api.AsyncSessionLlm
1414

1515

1616
async def _unload_model(session: AsyncSessionModel, model_identifier: str) -> None:
1717
try:
1818
await session.unload(model_identifier)
19-
except lm.LMStudioModelNotFoundError:
19+
except lms.LMStudioModelNotFoundError:
2020
print(f"Unloaded: {model_identifier!r} (model was not loaded)")
2121
else:
2222
print(f"Unloaded: {model_identifier!r}")
2323

2424

2525
async def unload_models() -> None:
26-
async with lm.AsyncClient() as client:
26+
async with lms.AsyncClient() as client:
2727
await asyncio.gather(
2828
_unload_model(client.llm, EXPECTED_LLM_ID),
2929
_unload_model(client.llm, EXPECTED_VLM_ID),

0 commit comments

Comments
 (0)