mirror of
https://github.com/aljazceru/goose.git
synced 2025-12-18 14:44:21 +01:00
docs: update Snowflake link and description verbiage (#2882)
This commit is contained in:
@@ -19,7 +19,7 @@ pub const SNOWFLAKE_DEFAULT_MODEL: &str = "claude-3-7-sonnet";
|
||||
pub const SNOWFLAKE_KNOWN_MODELS: &[&str] = &["claude-3-7-sonnet", "claude-3-5-sonnet"];
|
||||
|
||||
pub const SNOWFLAKE_DOC_URL: &str =
|
||||
"https://docs.snowflake.com/en/user-guide/snowflake-cortex/llm-functions#choosing-a-model";
|
||||
"https://docs.snowflake.com/user-guide/snowflake-cortex/aisql#choosing-a-model";
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum SnowflakeAuth {
|
||||
@@ -399,7 +399,7 @@ impl Provider for SnowflakeProvider {
|
||||
ProviderMetadata::new(
|
||||
"snowflake",
|
||||
"Snowflake",
|
||||
"Access several models using Snowflake Cortex services.",
|
||||
"Access the latest models using Snowflake Cortex services.",
|
||||
SNOWFLAKE_DEFAULT_MODEL,
|
||||
SNOWFLAKE_KNOWN_MODELS.to_vec(),
|
||||
SNOWFLAKE_DOC_URL,
|
||||
|
||||
@@ -30,7 +30,7 @@ Goose relies heavily on tool calling capabilities and currently works best with
|
||||
| [Ollama](https://ollama.com/) | Local model runner supporting Qwen, Llama, DeepSeek, and other open-source models. **Because this provider runs locally, you must first [download and run a model](/docs/getting-started/providers#local-llms-ollama).** | `OLLAMA_HOST` |
|
||||
| [OpenAI](https://platform.openai.com/api-keys) | Provides gpt-4o, o1, and other advanced language models. Also supports OpenAI-compatible endpoints (e.g., self-hosted LLaMA, vLLM, KServe). **o1-mini and o1-preview are not supported because Goose uses tool calling.** | `OPENAI_API_KEY`, `OPENAI_HOST` (optional), `OPENAI_ORGANIZATION` (optional), `OPENAI_PROJECT` (optional), `OPENAI_CUSTOM_HEADERS` (optional) |
|
||||
| [OpenRouter](https://openrouter.ai/) | API gateway for unified access to various models with features like rate-limiting management. | `OPENROUTER_API_KEY` |
|
||||
| [Snowflake](https://docs.snowflake.com/en/user-guide/snowflake-cortex/llm-functions#choosing-a-model) | Access several models using Snowflake Cortex services, including Claude models. **Requires a Snowflake account and programmatic access token (PAT)**. | `SNOWFLAKE_HOST`, `SNOWFLAKE_TOKEN` |
|
||||
| [Snowflake](https://docs.snowflake.com/user-guide/snowflake-cortex/aisql#choosing-a-model) | Access the latest models using Snowflake Cortex services, including Claude models. **Requires a Snowflake account and programmatic access token (PAT)**. | `SNOWFLAKE_HOST`, `SNOWFLAKE_TOKEN` |
|
||||
| [Venice AI](https://venice.ai/home) | Provides access to open source models like Llama, Mistral, and Qwen while prioritizing user privacy. **Requires an account and an [API key](https://docs.venice.ai/overview/guides/generating-api-key)**. | `VENICE_API_KEY`, `VENICE_HOST` (optional), `VENICE_BASE_PATH` (optional), `VENICE_MODELS_PATH` (optional) |
|
||||
|
||||
|
||||
|
||||
@@ -221,7 +221,7 @@ export const PROVIDER_REGISTRY: ProviderRegistry[] = [
|
||||
details: {
|
||||
id: 'snowflake',
|
||||
name: 'Snowflake',
|
||||
description: 'Access Cortex models hosted on your Snowflake account',
|
||||
description: 'Access the latest models using Snowflake Cortex services.',
|
||||
parameters: [
|
||||
{
|
||||
name: 'SNOWFLAKE_HOST',
|
||||
|
||||
Reference in New Issue
Block a user