Skip to content

Commit

Permalink
fix: update visibilty
Browse files Browse the repository at this point in the history
  • Loading branch information
McPatate committed Jun 26, 2024
1 parent 5e2f568 commit 9f35984
Show file tree
Hide file tree
Showing 10 changed files with 109 additions and 108 deletions.
93 changes: 47 additions & 46 deletions crates/lsp-ai/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ pub struct TextSplitter {
}

#[derive(Debug, Clone, Deserialize)]
pub enum ValidMemoryBackend {
pub(crate) enum ValidMemoryBackend {
#[serde(rename = "file_store")]
FileStore(FileStore),
#[serde(rename = "postgresml")]
Expand Down Expand Up @@ -141,10 +141,10 @@ pub(crate) struct Crawl {
}

#[derive(Clone, Debug, Deserialize)]
pub struct PostgresMLEmbeddingModel {
pub model: String,
pub embed_parameters: Option<Value>,
pub query_parameters: Option<Value>,
pub(crate) struct PostgresMLEmbeddingModel {
pub(crate) model: String,
pub(crate) embed_parameters: Option<Value>,
pub(crate) query_parameters: Option<Value>,
}

#[derive(Clone, Debug, Deserialize)]
Expand All @@ -164,38 +164,38 @@ pub(crate) struct FileStore {
}

impl FileStore {
pub fn new_without_crawl() -> Self {
pub(crate) fn new_without_crawl() -> Self {
Self { crawl: None }
}
}

#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Ollama {
pub(crate) struct Ollama {
// The generate endpoint, default: 'http://localhost:11434/api/generate'
pub generate_endpoint: Option<String>,
pub(crate) generate_endpoint: Option<String>,
// The chat endpoint, default: 'http://localhost:11434/api/chat'
pub chat_endpoint: Option<String>,
pub(crate) chat_endpoint: Option<String>,
// The model name
pub model: String,
pub(crate) model: String,
// The maximum requests per second
#[serde(default = "max_requests_per_second_default")]
pub max_requests_per_second: f32,
pub(crate) max_requests_per_second: f32,
}

#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct MistralFIM {
pub(crate) struct MistralFIM {
// The auth token env var name
pub auth_token_env_var_name: Option<String>,
pub auth_token: Option<String>,
pub(crate) auth_token_env_var_name: Option<String>,
pub(crate) auth_token: Option<String>,
// The fim endpoint
pub fim_endpoint: Option<String>,
pub(crate) fim_endpoint: Option<String>,
// The model name
pub model: String,
pub(crate) model: String,
// The maximum requests per second
#[serde(default = "max_requests_per_second_default")]
pub max_requests_per_second: f32,
pub(crate) max_requests_per_second: f32,
}

#[cfg(feature = "llama_cpp")]
Expand Down Expand Up @@ -229,82 +229,83 @@ pub struct LLaMACPP {

#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct OpenAI {
pub(crate) struct OpenAI {
// The auth token env var name
pub auth_token_env_var_name: Option<String>,
pub(crate) auth_token_env_var_name: Option<String>,
// The auth token
pub auth_token: Option<String>,
pub(crate) auth_token: Option<String>,
// The completions endpoint
pub completions_endpoint: Option<String>,
pub(crate) completions_endpoint: Option<String>,
// The chat endpoint
pub chat_endpoint: Option<String>,
pub(crate) chat_endpoint: Option<String>,
// The maximum requests per second
#[serde(default = "max_requests_per_second_default")]
pub max_requests_per_second: f32,
pub(crate) max_requests_per_second: f32,
// The model name
pub model: String,
pub(crate) model: String,
}

#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Gemini {
pub(crate) struct Gemini {
// The auth token env var name
pub auth_token_env_var_name: Option<String>,
pub(crate) auth_token_env_var_name: Option<String>,
// The auth token
pub auth_token: Option<String>,
pub(crate) auth_token: Option<String>,
// The completions endpoint
pub completions_endpoint: Option<String>,
#[allow(dead_code)]
pub(crate) completions_endpoint: Option<String>,
// The chat endpoint
pub chat_endpoint: Option<String>,
pub(crate) chat_endpoint: Option<String>,
// The maximum requests per second
#[serde(default = "max_requests_per_second_default")]
pub max_requests_per_second: f32,
pub(crate) max_requests_per_second: f32,
// The model name
pub model: String,
pub(crate) model: String,
}

#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub(crate) struct Anthropic {
// The auth token env var name
pub auth_token_env_var_name: Option<String>,
pub auth_token: Option<String>,
pub(crate) auth_token_env_var_name: Option<String>,
pub(crate) auth_token: Option<String>,
// The completions endpoint
#[allow(dead_code)]
pub completions_endpoint: Option<String>,
pub(crate) completions_endpoint: Option<String>,
// The chat endpoint
pub chat_endpoint: Option<String>,
pub(crate) chat_endpoint: Option<String>,
// The maximum requests per second
#[serde(default = "max_requests_per_second_default")]
pub max_requests_per_second: f32,
pub(crate) max_requests_per_second: f32,
// The model name
pub model: String,
pub(crate) model: String,
}

#[derive(Clone, Debug, Deserialize)]
pub struct Completion {
pub(crate) struct Completion {
// The model key to use
pub model: String,
pub(crate) model: String,
// Args are deserialized by the backend using them
#[serde(default)]
pub parameters: Kwargs,
pub(crate) parameters: Kwargs,
// Parameters for post processing
#[serde(default)]
pub post_process: PostProcess,
pub(crate) post_process: PostProcess,
}

#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct ValidConfig {
pub(crate) struct ValidConfig {
pub(crate) memory: ValidMemoryBackend,
pub models: HashMap<String, ValidModel>,
pub completion: Option<Completion>,
pub(crate) models: HashMap<String, ValidModel>,
pub(crate) completion: Option<Completion>,
}

#[derive(Clone, Debug, Deserialize, Default)]
pub struct ValidClientParams {
pub(crate) struct ValidClientParams {
#[serde(alias = "rootUri")]
pub root_uri: Option<String>,
pub(crate) root_uri: Option<String>,
}

#[derive(Clone, Debug)]
Expand Down
2 changes: 1 addition & 1 deletion crates/lsp-ai/src/crawl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use tracing::{error, instrument};

use crate::config::{self, Config};

pub struct Crawl {
pub(crate) struct Crawl {
crawl_config: config::Crawl,
config: Config,
crawled_file_types: HashSet<String>,
Expand Down
2 changes: 1 addition & 1 deletion crates/lsp-ai/src/splitters/text_splitter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use crate::{config, memory_backends::file_store::File};

use super::{ByteRange, Chunk, Splitter};

pub struct TextSplitter {
pub(crate) struct TextSplitter {
chunk_size: usize,
splitter: text_splitter::TextSplitter<text_splitter::Characters>,
}
Expand Down
2 changes: 1 addition & 1 deletion crates/lsp-ai/src/transformer_backends/anthropic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ pub(crate) struct AnthropicRunParams {
pub(crate) temperature: f32,
}

pub struct Anthropic {
pub(crate) struct Anthropic {
config: config::Anthropic,
}

Expand Down
26 changes: 11 additions & 15 deletions crates/lsp-ai/src/transformer_backends/gemini.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,37 +55,33 @@ impl GeminiContent {
}

#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(rename = "camelCase")]
#[serde(deny_unknown_fields)]
pub struct GeminiGenerationConfig {
#[serde(rename = "stopSequences")]
pub(crate) struct GeminiGenerationConfig {
#[serde(default)]
pub stop_sequences: Vec<String>,
#[serde(rename = "maxOutputTokens")]
pub(crate) stop_sequences: Vec<String>,
#[serde(default = "max_tokens_default")]
pub max_output_tokens: usize,
pub temperature: Option<f32>,
#[serde(rename = "topP")]
pub top_p: Option<f32>,
#[serde(rename = "topK")]
pub top_k: Option<f32>,
pub(crate) max_output_tokens: usize,
pub(crate) temperature: Option<f32>,
pub(crate) top_p: Option<f32>,
pub(crate) top_k: Option<f32>,
}

// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct GeminiRunParams {
#[serde(rename = "camelCase")]
pub(crate) struct GeminiRunParams {
contents: Vec<GeminiContent>,
#[serde(rename = "systemInstruction")]
system_instruction: GeminiContent,
#[serde(rename = "generationConfig")]
generation_config: Option<GeminiGenerationConfig>,
}

pub struct Gemini {
pub(crate) struct Gemini {
configuration: config::Gemini,
}

impl Gemini {
pub fn new(configuration: config::Gemini) -> Self {
pub(crate) fn new(configuration: config::Gemini) -> Self {
Self { configuration }
}

Expand Down
18 changes: 9 additions & 9 deletions crates/lsp-ai/src/transformer_backends/mistral_fim.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,25 +26,25 @@ const fn temperature_default() -> f32 {

// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
#[derive(Debug, Deserialize)]
pub struct MistralFIMRunParams {
pub(crate) struct MistralFIMRunParams {
#[serde(default = "max_tokens_default")]
pub max_tokens: usize,
pub(crate) max_tokens: usize,
#[serde(default = "top_p_default")]
pub top_p: f32,
pub(crate) top_p: f32,
#[serde(default = "temperature_default")]
pub temperature: f32,
pub min_tokens: Option<u64>,
pub random_seed: Option<u64>,
pub(crate) temperature: f32,
pub(crate) min_tokens: Option<u64>,
pub(crate) random_seed: Option<u64>,
#[serde(default)]
pub stop: Vec<String>,
pub(crate) stop: Vec<String>,
}

pub struct MistralFIM {
pub(crate) struct MistralFIM {
config: config::MistralFIM,
}

impl MistralFIM {
pub fn new(config: config::MistralFIM) -> Self {
pub(crate) fn new(config: config::MistralFIM) -> Self {
Self { config }
}

Expand Down
6 changes: 3 additions & 3 deletions crates/lsp-ai/src/transformer_backends/ollama.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ use super::TransformerBackend;

// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
#[derive(Debug, Deserialize)]
pub struct OllamaRunParams {
pub fim: Option<FIM>,
pub(crate) struct OllamaRunParams {
pub(crate) fim: Option<FIM>,
messages: Option<Vec<ChatMessage>>,
#[serde(default)]
options: HashMap<String, Value>,
Expand All @@ -26,7 +26,7 @@ pub struct OllamaRunParams {
keep_alive: Option<String>,
}

pub struct Ollama {
pub(crate) struct Ollama {
configuration: config::Ollama,
}

Expand Down
36 changes: 18 additions & 18 deletions crates/lsp-ai/src/transformer_backends/open_ai/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,22 +38,22 @@ const fn temperature_default() -> f32 {

// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
#[derive(Debug, Deserialize)]
pub struct OpenAIRunParams {
pub fim: Option<FIM>,
pub(crate) struct OpenAIRunParams {
pub(crate) fim: Option<FIM>,
messages: Option<Vec<ChatMessage>>,
#[serde(default = "max_tokens_default")]
pub max_tokens: usize,
pub(crate) max_tokens: usize,
#[serde(default = "top_p_default")]
pub top_p: f32,
pub(crate) top_p: f32,
#[serde(default = "presence_penalty_default")]
pub presence_penalty: f32,
pub(crate) presence_penalty: f32,
#[serde(default = "frequency_penalty_default")]
pub frequency_penalty: f32,
pub(crate) frequency_penalty: f32,
#[serde(default = "temperature_default")]
pub temperature: f32,
pub(crate) temperature: f32,
}

pub struct OpenAI {
pub(crate) struct OpenAI {
configuration: config::OpenAI,
}

Expand All @@ -68,27 +68,27 @@ struct OpenAICompletionsResponse {
error: Option<Value>,
#[serde(default)]
#[serde(flatten)]
pub other: HashMap<String, Value>,
pub(crate) other: HashMap<String, Value>,
}

#[derive(Debug, Deserialize, Serialize)]
pub struct OpenAIChatMessage {
pub role: String,
pub content: String,
pub(crate) struct OpenAIChatMessage {
pub(crate) role: String,
pub(crate) content: String,
}

#[derive(Deserialize)]
pub struct OpenAIChatChoices {
pub message: OpenAIChatMessage,
pub(crate) struct OpenAIChatChoices {
pub(crate) message: OpenAIChatMessage,
}

#[derive(Deserialize)]
pub struct OpenAIChatResponse {
pub choices: Option<Vec<OpenAIChatChoices>>,
pub error: Option<Value>,
pub(crate) struct OpenAIChatResponse {
pub(crate) choices: Option<Vec<OpenAIChatChoices>>,
pub(crate) error: Option<Value>,
#[serde(default)]
#[serde(flatten)]
pub other: HashMap<String, Value>,
pub(crate) other: HashMap<String, Value>,
}

impl OpenAI {
Expand Down
Loading

0 comments on commit 9f35984

Please sign in to comment.