feat: support o4-mini model (#2247)

This commit is contained in:
Yingjie He
2025-04-17 11:30:46 -07:00
committed by GitHub
parent cb944b51e6
commit 30a4965c17
4 changed files with 8 additions and 11 deletions

View File

@@ -367,11 +367,10 @@ pub fn create_request(
)); ));
} }
let is_o1 = model_config.model_name.starts_with("o1"); let is_ox_model = model_config.model_name.starts_with("o");
let is_o3 = model_config.model_name.starts_with("o3");
// Only extract reasoning effort for O1/O3 models // Only extract reasoning effort for O1/O3 models
let (model_name, reasoning_effort) = if is_o1 || is_o3 { let (model_name, reasoning_effort) = if is_ox_model {
let parts: Vec<&str> = model_config.model_name.split('-').collect(); let parts: Vec<&str> = model_config.model_name.split('-').collect();
let last_part = parts.last().unwrap(); let last_part = parts.last().unwrap();
@@ -391,7 +390,7 @@ pub fn create_request(
}; };
let system_message = json!({ let system_message = json!({
"role": if is_o1 || is_o3 { "developer" } else { "system" }, "role": if is_ox_model { "developer" } else { "system" },
"content": system "content": system
}); });
@@ -427,7 +426,7 @@ pub fn create_request(
.insert("tools".to_string(), json!(tools_spec)); .insert("tools".to_string(), json!(tools_spec));
} }
// o1, o3 models currently don't support temperature // o1, o3 models currently don't support temperature
if !is_o1 && !is_o3 { if !is_ox_model {
if let Some(temp) = model_config.temperature { if let Some(temp) = model_config.temperature {
payload payload
.as_object_mut() .as_object_mut()
@@ -438,7 +437,7 @@ pub fn create_request(
// o1 models use max_completion_tokens instead of max_tokens // o1 models use max_completion_tokens instead of max_tokens
if let Some(tokens) = model_config.max_tokens { if let Some(tokens) = model_config.max_tokens {
let key = if is_o1 || is_o3 { let key = if is_ox_model {
"max_completion_tokens" "max_completion_tokens"
} else { } else {
"max_tokens" "max_tokens"

View File

@@ -17,10 +17,6 @@ use url::Url;
pub const GOOGLE_API_HOST: &str = "https://generativelanguage.googleapis.com"; pub const GOOGLE_API_HOST: &str = "https://generativelanguage.googleapis.com";
pub const GOOGLE_DEFAULT_MODEL: &str = "gemini-2.0-flash"; pub const GOOGLE_DEFAULT_MODEL: &str = "gemini-2.0-flash";
pub const GOOGLE_KNOWN_MODELS: &[&str] = &[ pub const GOOGLE_KNOWN_MODELS: &[&str] = &[
"gemini-1.5-pro-latest",
"gemini-1.5-pro",
"gemini-1.5-flash-latest",
"gemini-1.5-flash",
"gemini-2.0-flash", "gemini-2.0-flash",
"gemini-2.0-flash-lite-preview-02-05", "gemini-2.0-flash-lite-preview-02-05",
"gemini-2.0-flash-thinking-exp-01-21", "gemini-2.0-flash-thinking-exp-01-21",

View File

@@ -20,6 +20,8 @@ pub const OPEN_AI_KNOWN_MODELS: &[&str] = &[
"gpt-4-turbo", "gpt-4-turbo",
"gpt-3.5-turbo", "gpt-3.5-turbo",
"o1", "o1",
"o3",
"o4-mini",
]; ];
pub const OPEN_AI_DOC_URL: &str = "https://platform.openai.com/docs/models"; pub const OPEN_AI_DOC_URL: &str = "https://platform.openai.com/docs/models";

View File

@@ -194,7 +194,7 @@ export default function ExtensionsSection({
</p> </p>
</div> </div>
<div className={'border-b border-borderSubtle pb-8'}> <div className="border-b border-borderSubtle pb-8">
<ExtensionList <ExtensionList
extensions={extensions} extensions={extensions}
onToggle={handleExtensionToggle} onToggle={handleExtensionToggle}