Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit aa149df

Browse files
chore: use non-cuda version for e2e tests (#2112)
Co-authored-by: sangjanai <sang@jan.ai>
1 parent 7c3788d commit aa149df

File tree

3 files changed

+3
-3
lines changed

3 files changed

+3
-3
lines changed

engine/e2e-test/api/engines/test_api_engine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def test_engines_get_llamacpp_should_be_successful(self):
2828

2929
# engines install
3030
def test_engines_install_llamacpp_specific_version_and_variant(self):
31-
data = {"version": "v0.1.40-b4354", "variant": "linux-amd64-avx-cuda-11-7"}
31+
data = {"version": "v0.1.40-b4354", "variant": "linux-amd64-avx"}
3232
response = requests.post(
3333
"http://localhost:3928/v1/engines/llama-cpp/install", json=data
3434
)

engine/e2e-test/api/engines/test_api_engine_install_nightly.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def test_engines_install_llamacpp_should_be_successful(self):
2323
assert response.status_code == 200
2424

2525
def test_engines_install_llamacpp_specific_version_and_variant(self):
26-
data = {"version": latest_pre_release_tag, "variant": "linux-amd64-avx-cuda-11-7"}
26+
data = {"version": latest_pre_release_tag, "variant": "linux-amd64-avx"}
2727
response = requests.post(
2828
"http://localhost:3928/v1/engines/llama-cpp/install", json=data
2929
)

engine/e2e-test/api/engines/test_api_get_default_engine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def setup_and_teardown(self):
2424
def test_api_get_default_engine_successfully(self):
2525
# Data test
2626
engine= "llama-cpp"
27-
name= "linux-amd64-avx-cuda-11-7"
27+
name= "linux-amd64-avx"
2828
version= "v0.1.35-27.10.24"
2929

3030
data = {"version": version, "variant": name}

0 commit comments

Comments
 (0)