Skip to content

Commit e41e8ab

Browse files
committed
format
1 parent 9370378 commit e41e8ab

File tree

8 files changed

+41
-28
lines changed

8 files changed

+41
-28
lines changed

async-openai-wasm/tests/chat_completion.rs

Lines changed: 15 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
1+
use async_openai_wasm::Client;
12
use async_openai_wasm::config::OpenAIConfig;
23
use async_openai_wasm::types::{
34
ChatCompletionRequestUserMessageArgs, CreateChatCompletionRequestArgs,
45
};
5-
use async_openai_wasm::Client;
66
use futures::StreamExt;
77
use serde_json::json;
88

@@ -26,11 +26,13 @@ async fn test_chat_completion_reasoning() {
2626
.with_api_key(test_key),
2727
);
2828
let request = CreateChatCompletionRequestArgs::default()
29-
.messages(vec![ChatCompletionRequestUserMessageArgs::default()
30-
.content("Hello! Do you know the Rust programming language?")
31-
.build()
32-
.unwrap()
33-
.into()])
29+
.messages(vec![
30+
ChatCompletionRequestUserMessageArgs::default()
31+
.content("Hello! Do you know the Rust programming language?")
32+
.build()
33+
.unwrap()
34+
.into(),
35+
])
3436
.model("deepseek/deepseek-r1")
3537
// The extra params that OpenRouter requires to get reasoning content
3638
// See https://openrouter.ai/docs/api-reference/parameters#include-reasoning
@@ -66,11 +68,13 @@ async fn test_chat_completion_reasoning_stream() {
6668
.with_api_key(test_key),
6769
);
6870
let request = CreateChatCompletionRequestArgs::default()
69-
.messages(vec![ChatCompletionRequestUserMessageArgs::default()
70-
.content("Hello! Do you know the Rust programming language?")
71-
.build()
72-
.unwrap()
73-
.into()])
71+
.messages(vec![
72+
ChatCompletionRequestUserMessageArgs::default()
73+
.content("Hello! Do you know the Rust programming language?")
74+
.build()
75+
.unwrap()
76+
.into(),
77+
])
7478
.model("deepseek/deepseek-r1")
7579
// The extra params that OpenRouter requires to get reasoning content
7680
// See https://openrouter.ai/docs/api-reference/parameters#include-reasoning

async-openai-wasm/tests/whisper.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use tokio_test::assert_err;
22

33
use async_openai_wasm::types::CreateTranslationRequestArgs;
4-
use async_openai_wasm::{types::CreateTranscriptionRequestArgs, Client};
4+
use async_openai_wasm::{Client, types::CreateTranscriptionRequestArgs};
55

66
#[tokio::test]
77
async fn transcribe_test() {

examples/cloudflare-wasm-worker/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use async_openai_wasm::config::OpenAIConfig;
22
use async_openai_wasm::{
3-
types::{ChatCompletionRequestUserMessageArgs, CreateChatCompletionRequestArgs},
43
Client,
4+
types::{ChatCompletionRequestUserMessageArgs, CreateChatCompletionRequestArgs},
55
};
66
use serde::{Deserialize, Serialize};
77
use worker::*;

examples/openai-web-app-assistant/src/main.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#![allow(non_snake_case)]
22

33
use dioxus::prelude::*;
4-
use dioxus_logger::tracing::{error, info, Level};
4+
use dioxus_logger::tracing::{Level, error, info};
55
use futures::stream::StreamExt;
66

77
use async_openai_wasm::types::{

examples/openai-web-app-assistant/src/utils.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
use crate::{API_BASE, API_KEY};
2+
use async_openai_wasm::Client;
23
use async_openai_wasm::config::OpenAIConfig;
34
use async_openai_wasm::types::{
45
AssistantStreamEvent, CreateAssistantRequest, CreateAssistantRequestArgs, FunctionObject,
56
MessageDeltaContent, RunObject, SubmitToolOutputsRunRequest, ToolsOutputs,
67
};
7-
use async_openai_wasm::Client;
88
use dioxus::prelude::Signal;
99
use dioxus::prelude::*;
1010
use futures::StreamExt;

examples/openai-web-app-chat/src/main.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
#![allow(non_snake_case)]
22

3+
use async_openai_wasm::Client;
34
use async_openai_wasm::config::OpenAIConfig;
45
use async_openai_wasm::types::{
56
ChatCompletionRequestMessage, ChatCompletionRequestUserMessageArgs,
67
CreateChatCompletionRequestArgs,
78
};
8-
use async_openai_wasm::Client;
99
use dioxus::prelude::*;
10-
use dioxus_logger::tracing::{error, info, Level};
10+
use dioxus_logger::tracing::{Level, error, info};
1111
use futures::stream::StreamExt;
1212

1313
const API_BASE: &str = "...";

examples/realtime/src/main.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ use std::process::exit;
33
use async_openai_wasm::types::realtime::{
44
ConversationItemCreateEvent, Item, ResponseCreateEvent, ServerEvent, ToText,
55
};
6-
use futures_util::{future, pin_mut, StreamExt};
6+
use futures_util::{StreamExt, future, pin_mut};
77

88
use tokio::io::AsyncReadExt;
99
use tokio_tungstenite::{

examples/reasoning/src/main.rs

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
1+
use async_openai_wasm::Client;
12
use async_openai_wasm::config::OpenAIConfig;
23
use async_openai_wasm::types::{
34
ChatCompletionRequestUserMessageArgs, CreateChatCompletionRequestArgs,
45
};
5-
use async_openai_wasm::Client;
66
use futures::StreamExt;
77
use serde_json::json;
88

@@ -13,26 +13,35 @@ const DEEPSEEK_REASONING_KEY: &str = "reasoning_content";
1313
const DEEPSEEK_BASEURL: &str = "https://api.deepseek.com";
1414
const DEEPSEEK_MODEL_NAME: &str = "deepseek-reasoner";
1515

16-
1716
#[tokio::main(flavor = "current_thread")]
1817
async fn main() {
1918
let test_key = std::env::var("TEST_API_KEY").unwrap();
2019
let use_deepseek = std::env::var("USE_DEEPSEEK").is_ok();
2120
let (reasoning_key, base_url, model_name) = if use_deepseek {
22-
(DEEPSEEK_REASONING_KEY, DEEPSEEK_BASEURL, DEEPSEEK_MODEL_NAME)
21+
(
22+
DEEPSEEK_REASONING_KEY,
23+
DEEPSEEK_BASEURL,
24+
DEEPSEEK_MODEL_NAME,
25+
)
2326
} else {
24-
(OPENROUTER_REASONING_KEY, OPENROUTER_BASEURL, OPENROUTER_MODEL_NAME)
27+
(
28+
OPENROUTER_REASONING_KEY,
29+
OPENROUTER_BASEURL,
30+
OPENROUTER_MODEL_NAME,
31+
)
2532
};
2633
let client = Client::with_config(
2734
OpenAIConfig::new()
2835
.with_api_base(base_url)
2936
.with_api_key(test_key),
3037
);
31-
let messages = vec![ChatCompletionRequestUserMessageArgs::default()
32-
.content("Hello! Do you know the Rust programming language?")
33-
.build()
34-
.unwrap()
35-
.into()];
38+
let messages = vec![
39+
ChatCompletionRequestUserMessageArgs::default()
40+
.content("Hello! Do you know the Rust programming language?")
41+
.build()
42+
.unwrap()
43+
.into(),
44+
];
3645
let request = if use_deepseek {
3746
CreateChatCompletionRequestArgs::default()
3847
.messages(messages)
@@ -70,4 +79,4 @@ async fn main() {
7079
}
7180
assert!(reasoning.len() > 0);
7281
println!("Reasoning:\n{reasoning}");
73-
}
82+
}

0 commit comments

Comments
 (0)