refactor: prompts for generating config file (#463)

pull/464/head
sigoden 3 weeks ago committed by GitHub
parent 4ddccc361c
commit 4d1c53384b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -75,7 +75,7 @@ clients:
# See https://github.com/jmorganca/ollama
- type: ollama
api_base: http://localhost:11434
api_base: http://localhost:11434 # ENV: {client_name}_API_BASE
api_auth: Basic xxx # ENV: {client_name}_API_AUTH
chat_endpoint: /api/chat # Optional
models: # Required
@ -84,8 +84,8 @@ clients:
# See https://learn.microsoft.com/en-us/azure/ai-services/openai/chatgpt-quickstart
- type: azure-openai
api_base: https://{RESOURCE}.openai.azure.com
api_key: xxx # ENV: {client_name}_API_BASE
api_base: https://{RESOURCE}.openai.azure.com # ENV: {client_name}_API_BASE
api_key: xxx # ENV: {client_name}_API_KEY
models: # Required
- name: gpt-35-turbo # Model deployment name
max_input_tokens: 8192
@ -103,7 +103,7 @@ clients:
- type: cloudflare
account_id: xxx # ENV: {client_name}_ACCOUNT_ID
api_key: xxx # ENV: {client_name}_API_BASE
api_key: xxx # ENV: {client_name}_API_KEY
# See https://docs.aws.amazon.com/bedrock/latest/userguide/
- type: bedrock
@ -134,8 +134,8 @@ clients:
# - OctoAI: https://octo.ai/docs/text-gen-solution/migration-from-openai
- type: openai-compatible
name: localai
api_base: http://localhost:8080/v1
api_key: sk-xxx # ENV: {client_name}_API_BASE
api_base: http://localhost:8080/v1 # ENV: {client_name}_API_BASE
api_key: sk-xxx # ENV: {client_name}_API_KEY
chat_endpoint: /chat/completions # Optional
models: # Required
- name: llama3

@ -27,7 +27,7 @@ impl AzureOpenAIClient {
(
"models[].max_input_tokens",
"Max Input Tokens:",
true,
false,
PromptKind::Integer,
),
];

@ -26,7 +26,7 @@ impl ClaudeClient {
config_get_fn!(api_key, get_api_key);
pub const PROMPTS: [PromptType<'static>; 1] =
[("api_key", "API Key:", false, PromptKind::String)];
[("api_key", "API Key:", true, PromptKind::String)];
fn request_builder(&self, client: &ReqwestClient, data: SendData) -> Result<RequestBuilder> {
let api_key = self.get_api_key().ok();

@ -27,8 +27,8 @@ impl CloudflareClient {
config_get_fn!(api_key, get_api_key);
pub const PROMPTS: [PromptType<'static>; 2] = [
("account_id", "Account ID:", false, PromptKind::String),
("api_key", "API Key:", false, PromptKind::String),
("account_id", "Account ID:", true, PromptKind::String),
("api_key", "API Key:", true, PromptKind::String),
];
fn request_builder(&self, client: &ReqwestClient, data: SendData) -> Result<RequestBuilder> {

@ -25,7 +25,7 @@ impl CohereClient {
config_get_fn!(api_key, get_api_key);
pub const PROMPTS: [PromptType<'static>; 1] =
[("api_key", "API Key:", false, PromptKind::String)];
[("api_key", "API Key:", true, PromptKind::String)];
fn request_builder(&self, client: &ReqwestClient, data: SendData) -> Result<RequestBuilder> {
let api_key = self.get_api_key()?;

@ -203,7 +203,7 @@ macro_rules! openai_compatible_client {
config_get_fn!(api_key, get_api_key);
pub const PROMPTS: [PromptType<'static>; 1] =
[("api_key", "API Key:", false, PromptKind::String)];
[("api_key", "API Key:", true, PromptKind::String)];
fn request_builder(
&self,

@ -14,7 +14,7 @@ use serde_json::{json, Value};
#[derive(Debug, Clone, Deserialize, Default)]
pub struct OllamaConfig {
pub name: Option<String>,
pub api_base: String,
pub api_base: Option<String>,
pub api_auth: Option<String>,
pub chat_endpoint: Option<String>,
pub models: Vec<ModelConfig>,
@ -22,11 +22,12 @@ pub struct OllamaConfig {
}
impl OllamaClient {
config_get_fn!(api_base, get_api_base);
config_get_fn!(api_auth, get_api_auth);
pub const PROMPTS: [PromptType<'static>; 4] = [
("api_base", "API Base:", true, PromptKind::String),
("api_auth", "API Key:", false, PromptKind::String),
("api_auth", "API Auth:", false, PromptKind::String),
("models[].name", "Model Name:", true, PromptKind::String),
(
"models[].max_input_tokens",
@ -37,6 +38,7 @@ impl OllamaClient {
];
fn request_builder(&self, client: &ReqwestClient, data: SendData) -> Result<RequestBuilder> {
let api_base = self.get_api_base()?;
let api_auth = self.get_api_auth().ok();
let mut body = build_body(data, &self.model)?;
@ -44,7 +46,7 @@ impl OllamaClient {
let chat_endpoint = self.config.chat_endpoint.as_deref().unwrap_or("/api/chat");
let url = format!("{}{chat_endpoint}", self.config.api_base);
let url = format!("{api_base}{chat_endpoint}");
debug!("Ollama Request: {url} {body}");

@ -10,7 +10,7 @@ use serde::Deserialize;
#[derive(Debug, Clone, Deserialize)]
pub struct OpenAICompatibleConfig {
pub name: Option<String>,
pub api_base: String,
pub api_base: Option<String>,
pub api_key: Option<String>,
pub chat_endpoint: Option<String>,
pub models: Vec<ModelConfig>,
@ -18,6 +18,7 @@ pub struct OpenAICompatibleConfig {
}
impl OpenAICompatibleClient {
config_get_fn!(api_base, get_api_base);
config_get_fn!(api_key, get_api_key);
pub const PROMPTS: [PromptType<'static>; 5] = [
@ -34,6 +35,7 @@ impl OpenAICompatibleClient {
];
fn request_builder(&self, client: &ReqwestClient, data: SendData) -> Result<RequestBuilder> {
let api_base = self.get_api_base()?;
let api_key = self.get_api_key().ok();
let mut body = openai_build_body(data, &self.model);
@ -45,7 +47,7 @@ impl OpenAICompatibleClient {
.as_deref()
.unwrap_or("/chat/completions");
let url = format!("{}{chat_endpoint}", self.config.api_base);
let url = format!("{api_base}{chat_endpoint}");
debug!("OpenAICompatible Request: {url} {body}");

@ -34,7 +34,7 @@ impl VertexAIClient {
pub const PROMPTS: [PromptType<'static>; 2] = [
("project_id", "Project ID", true, PromptKind::String),
("location", "Global Location", true, PromptKind::String),
("location", "Location", true, PromptKind::String),
];
fn request_builder(

Loading…
Cancel
Save