Skip to content

chore: change ollama model versions in CI #59

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 16 additions & 17 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -108,20 +108,19 @@ jobs:
run: |
set -e
cargo binstall --force --locked cargo-component@0.21.1
cargo binstall golem-cli@1.2.3 --locked --force --no-confirm
cargo binstall wac-cli --locked --force --no-confirm
- name: Start Ollama in Docker
run: |
set -e
docker run -d --name ollama -p 11434:11434 ollama/ollama:latest
timeout 60 bash -c 'until curl -f http://localhost:11434/api/version; do sleep 2; done'
echo "Pulling Qwen2.5:1.5b"
docker exec ollama ollama pull qwen2.5:1.5b
echo "Pulling Gemma2:2b"
docker exec ollama ollama pull gemma2:2b
echo "Pulling Qwen3:1.7b"
docker exec ollama ollama pull qwen3:1.7b
echo "Pulling Gemma3:4b"
docker exec ollama ollama pull gemma3:4b
echo "Verifying models are available"
docker exec ollama ollama list | grep -q "qwen2.5:1.5b" || exit 1
docker exec ollama ollama list | grep -q "gemma2:2b" || exit 1
docker exec ollama ollama list | grep -q "qwen3:1.7b" || exit 1
docker exec ollama ollama list | grep -q "gemma3:4b" || exit 1
echo "Ollama setup completed."
- name: Install and Run latest Golem Server
run: |
Expand All @@ -139,16 +138,16 @@ jobs:
set -e
cargo make --cwd llm build-ollama
cd test
golem-cli app build -b ollama-debug
golem-cli app deploy -b ollama-debug
golem-cli worker new -e GOLEM_OLLAMA_BASE_URL=http://localhost:11434 test:llm/ollama-1
golem-cli worker invoke test:llm/ollama-1 test1
golem-cli worker invoke test:llm/ollama-1 test2
golem-cli worker invoke test:llm/ollama-1 test3
golem-cli worker invoke test:llm/ollama-1 test4
golem-cli worker invoke test:llm/ollama-1 test5
golem-cli worker invoke test:llm/ollama-1 test6
golem-cli worker invoke test:llm/ollama-1 test7
golem app build -b ollama-debug
golem app deploy -b ollama-debug
golem worker new -e GOLEM_OLLAMA_BASE_URL=http://localhost:11434 test:llm/ollama-1
golem worker invoke test:llm/ollama-1 test1 | grep -v "ERROR: " || exit 1
golem worker invoke test:llm/ollama-1 test2 | grep -v "ERROR: " || exit 1
golem worker invoke test:llm/ollama-1 test3 | grep -v "ERROR: " || exit 1
golem worker invoke test:llm/ollama-1 test4 | grep -v "ERROR: " || exit 1
golem worker invoke test:llm/ollama-1 test5 | grep -v "ERROR: " || exit 1
golem worker invoke test:llm/ollama-1 test6 | grep -v "ERROR: " || exit 1
golem worker invoke test:llm/ollama-1 test7 | grep -v "ERROR: " || exit 1
publish-all:
needs:
- tests
Expand Down
2 changes: 1 addition & 1 deletion llm/anthropic/src/conversions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ pub fn process_response(response: MessagesResponse) -> ChatEvent {
Err(e) => {
return ChatEvent::Error(Error {
code: ErrorCode::InvalidRequest,
message: format!("Failed to decode base64 image data: {}", e),
message: format!("Failed to decode base64 image data: {e}"),
provider_error_json: None,
});
}
Expand Down
2 changes: 1 addition & 1 deletion llm/grok/src/conversions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ fn convert_content_parts(contents: Vec<ContentPart>) -> crate::client::Content {
let media_type = &image_source.mime_type; // This is already a string
result.push(crate::client::ContentPart::ImageInput {
image_url: crate::client::ImageUrl {
url: format!("data:{};base64,{}", media_type, base64_data),
url: format!("data:{media_type};base64,{base64_data}"),
detail: image_source.detail.map(|d| d.into()),
},
});
Expand Down
2 changes: 1 addition & 1 deletion llm/llm/src/event_source/ndjson_stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ fn try_parse_line(
return Ok(None);
}

trace!("Parsed NDJSON line: {}", line);
trace!("Parsed NDJSON line: {line}");

// Create a MessageEvent with the JSON line as data
let event = MessageEvent {
Expand Down
6 changes: 3 additions & 3 deletions llm/llm/src/event_source/stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,9 @@ where
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Utf8(err) => f.write_fmt(format_args!("UTF8 error: {}", err)),
Self::Parser(err) => f.write_fmt(format_args!("Parse error: {}", err)),
Self::Transport(err) => f.write_fmt(format_args!("Transport error: {}", err)),
Self::Utf8(err) => f.write_fmt(format_args!("UTF8 error: {err}")),
Self::Parser(err) => f.write_fmt(format_args!("Parse error: {err}")),
Self::Transport(err) => f.write_fmt(format_args!("Transport error: {err}")),
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion llm/ollama/src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@ pub fn image_to_base64(source: &str) -> Result<String, Box<dyn std::error::Error
pub fn from_reqwest_error(context: &str, err: reqwest::Error) -> Error {
Error {
code: ErrorCode::InternalError,
message: format!("{}: {}", context, err),
message: format!("{context}: {err}"),
provider_error_json: None,
}
}
2 changes: 1 addition & 1 deletion llm/ollama/src/conversions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ pub fn process_response(response: CompletionsResponse) -> ChatEvent {
};

ChatEvent::Message(CompleteResponse {
id: format!("ollama-{}", timestamp),
id: format!("ollama-{timestamp}"),
content,
tool_calls,
metadata,
Expand Down
2 changes: 1 addition & 1 deletion llm/openai/src/conversions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ pub fn content_part_to_inner_input_item(content_part: ContentPart) -> InnerInput
ImageReference::Inline(image_source) => {
let base64_data = general_purpose::STANDARD.encode(&image_source.data);
let mime_type = &image_source.mime_type; // This is already a string
let data_url = format!("data:{};base64,{}", mime_type, base64_data);
let data_url = format!("data:{mime_type};base64,{base64_data}");

InnerInputItem::ImageInput {
image_url: data_url,
Expand Down
2 changes: 1 addition & 1 deletion llm/openrouter/src/conversions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ fn convert_content_parts(contents: Vec<ContentPart>) -> crate::client::Content {
let media_type = &image_source.mime_type; // This is already a string
result.push(crate::client::ContentPart::ImageInput {
image_url: crate::client::ImageUrl {
url: format!("data:{};base64,{}", media_type, base64_data),
url: format!("data:{media_type};base64,{base64_data}"),
detail: image_source.detail.map(|d| d.into()),
},
});
Expand Down
2 changes: 1 addition & 1 deletion test/components-rust/test-llm/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ path = "wit-generated"

[package.metadata.component.target.dependencies]
"golem:llm" = { path = "wit-generated/deps/golem-llm" }
"wasi:clocks" = { path = "wit-generated/deps/clocks" }
"wasi:io" = { path = "wit-generated/deps/io" }
"wasi:clocks" = { path = "wit-generated/deps/clocks" }
"golem:rpc" = { path = "wit-generated/deps/golem-rpc" }
"test:helper-client" = { path = "wit-generated/deps/test_helper-client" }
"test:llm-exports" = { path = "wit-generated/deps/test_llm-exports" }
Expand Down
Loading