feat: Removed the server functionality

This commit is contained in:
2025-11-03 14:25:55 -07:00
parent b49a27f886
commit 474c5bc76f
17 changed files with 21 additions and 1070 deletions
+1 -7
View File
@@ -518,13 +518,7 @@ fn extract_chat_completions(data: &Value) -> Result<ChatCompletionsOutput> {
bail!("Invalid response data: {data}");
}
let output = ChatCompletionsOutput {
text,
tool_calls,
id: None,
input_tokens: data["usage"]["inputTokens"].as_u64(),
output_tokens: data["usage"]["outputTokens"].as_u64(),
};
let output = ChatCompletionsOutput { text, tool_calls };
Ok(output)
}
+3 -6
View File
@@ -2,10 +2,10 @@ use super::*;
use crate::utils::strip_think_tag;
use anyhow::{bail, Context, Result};
use anyhow::{Context, Result, bail};
use reqwest::RequestBuilder;
use serde::Deserialize;
use serde_json::{json, Value};
use serde_json::{Value, json};
const API_BASE: &str = "https://api.anthropic.com/v1";
@@ -301,7 +301,7 @@ pub fn claude_build_chat_completions_body(
}
})
.collect();
}
}
Ok(body)
}
@@ -353,9 +353,6 @@ pub fn claude_extract_chat_completions(data: &Value) -> Result<ChatCompletionsOu
let output = ChatCompletionsOutput {
text: text.to_string(),
tool_calls,
id: data["id"].as_str().map(|v| v.to_string()),
input_tokens: data["usage"]["input_tokens"].as_u64(),
output_tokens: data["usage"]["output_tokens"].as_u64(),
};
Ok(output)
}
+1 -7
View File
@@ -244,12 +244,6 @@ fn extract_chat_completions(data: &Value) -> Result<ChatCompletionsOutput> {
if text.is_empty() && tool_calls.is_empty() {
bail!("Invalid response data: {data}");
}
let output = ChatCompletionsOutput {
text,
tool_calls,
id: data["id"].as_str().map(|v| v.to_string()),
input_tokens: data["usage"]["billed_units"]["input_tokens"].as_u64(),
output_tokens: data["usage"]["billed_units"]["output_tokens"].as_u64(),
};
let output = ChatCompletionsOutput { text, tool_calls };
Ok(output)
}
+1 -7
View File
@@ -21,7 +21,7 @@ use std::sync::LazyLock;
use std::time::Duration;
use tokio::sync::mpsc::unbounded_channel;
const MODELS_YAML: &str = include_str!("../../models.yaml");
pub const MODELS_YAML: &str = include_str!("../../models.yaml");
pub static ALL_PROVIDER_MODELS: LazyLock<Vec<ProviderModels>> = LazyLock::new(|| {
Config::local_models_override()
@@ -47,8 +47,6 @@ pub trait Client: Sync + Send {
fn model(&self) -> &Model;
fn model_mut(&mut self) -> &mut Model;
fn build_client(&self) -> Result<ReqwestClient> {
let mut builder = ReqwestClient::builder();
let extra = self.extra_config();
@@ -291,9 +289,6 @@ pub struct ChatCompletionsData {
pub struct ChatCompletionsOutput {
pub text: String,
pub tool_calls: Vec<ToolCall>,
pub id: Option<String>,
pub input_tokens: Option<u64>,
pub output_tokens: Option<u64>,
}
impl ChatCompletionsOutput {
@@ -341,7 +336,6 @@ pub type RerankOutput = Vec<RerankResult>;
#[derive(Debug, Deserialize)]
pub struct RerankResult {
pub index: usize,
pub relevance_score: f64,
}
pub type PromptAction<'a> = (&'a str, &'a str, Option<&'a str>, bool);
-4
View File
@@ -159,10 +159,6 @@ macro_rules! client_common_fns {
fn model(&self) -> &Model {
&self.model
}
fn model_mut(&mut self) -> &mut Model {
&mut self.model
}
};
}
-8
View File
@@ -118,14 +118,6 @@ impl Model {
}
}
pub fn data(&self) -> &ModelData {
&self.data
}
pub fn data_mut(&mut self) -> &mut ModelData {
&mut self.data
}
pub fn description(&self) -> String {
match self.model_type() {
ModelType::Chat => {
+1 -7
View File
@@ -389,13 +389,7 @@ pub fn openai_extract_chat_completions(data: &Value) -> Result<ChatCompletionsOu
} else {
text.to_string()
};
let output = ChatCompletionsOutput {
text,
tool_calls,
id: data["id"].as_str().map(|v| v.to_string()),
input_tokens: data["usage"]["prompt_tokens"].as_u64(),
output_tokens: data["usage"]["completion_tokens"].as_u64(),
};
let output = ChatCompletionsOutput { text, tool_calls };
Ok(output)
}
-5
View File
@@ -56,7 +56,6 @@ impl SseHandler {
}
pub fn tool_call(&mut self, call: ToolCall) -> Result<()> {
// debug!("HandleCall: {:?}", call);
self.tool_calls.push(call);
Ok(())
}
@@ -65,10 +64,6 @@ impl SseHandler {
self.abort_signal.clone()
}
pub fn tool_calls(&self) -> &[ToolCall] {
&self.tool_calls
}
pub fn take(self) -> (String, Vec<ToolCall>) {
let Self {
buffer, tool_calls, ..
+1 -7
View File
@@ -296,13 +296,7 @@ fn gemini_extract_chat_completions_text(data: &Value) -> Result<ChatCompletionsO
bail!("Invalid response data: {data}");
}
}
let output = ChatCompletionsOutput {
text,
tool_calls,
id: None,
input_tokens: data["usageMetadata"]["promptTokenCount"].as_u64(),
output_tokens: data["usageMetadata"]["candidatesTokenCount"].as_u64(),
};
let output = ChatCompletionsOutput { text, tool_calls };
Ok(output)
}