This commit is contained in:
Silas Marvin
2024-04-07 19:45:44 -07:00
parent bb1e4516a4
commit a2457f77e6
6 changed files with 10 additions and 38 deletions

2
Cargo.lock generated
View File

@@ -1954,7 +1954,7 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pgml"
version = "1.0.0"
version = "1.0.1"
dependencies = [
"anyhow",
"async-trait",

View File

@@ -8,7 +8,6 @@ edition = "2021"
[dependencies]
anyhow = "1.0.75"
lsp-server = "0.7.6"
# lsp-server = { path = "../rust-analyzer/lib/lsp-server" }
lsp-types = "0.95.0"
ropey = "1.6.1"
serde = "1.0.190"

View File

@@ -17,12 +17,8 @@
"contributes": {
"commands": [
{
"command": "lsp-ai.generate",
"title": "LSP AI Generate"
},
{
"command": "lsp-ai.generateStream",
"title": "LSP AI Generate Stream"
"command": "lsp-ai.generation",
"title": "LSP-AI Generation"
}
],
"configuration": {

View File

@@ -33,7 +33,7 @@ export function activate(context: vscode.ExtensionContext) {
client.start();
// Register generate function
const generateCommand = 'lsp-ai.generate';
const generateCommand = 'lsp-ai.generation';
const generateCommandHandler = (editor: vscode.TextEditor) => {
let params = {
textDocument: {
@@ -41,7 +41,7 @@ export function activate(context: vscode.ExtensionContext) {
},
position: editor.selection.active
};
client.sendRequest("textDocument/generate", params).then(result => {
client.sendRequest("textDocument/generation", params).then(result => {
editor.edit((edit) => {
edit.insert(editor.selection.active, result["generatedText"]);
});
@@ -51,29 +51,6 @@ export function activate(context: vscode.ExtensionContext) {
};
context.subscriptions.push(vscode.commands.registerTextEditorCommand(generateCommand, generateCommandHandler));
// Register functions
// This function is not ready to go
// const generateStreamCommand = 'lsp-ai.generateStream';
// const generateStreamCommandHandler = (editor: vscode.TextEditor) => {
// let params = {
// textDocument: {
// uri: editor.document.uri.toString(),
// },
// position: editor.selection.active,
// partialResultToken: uuidv4()
// };
// console.log("PARAMS: ", params);
// client.sendRequest("textDocument/generateStream", params).then(result => {
// console.log("RECEIVED RESULT", result);
// editor.edit((edit) => {
// edit.insert(editor.selection.active, result["generatedText"]);
// });
// }).catch(error => {
// console.error("Error making generate request", error);
// });
// };
// context.subscriptions.push(vscode.commands.registerTextEditorCommand(generateStreamCommand, generateStreamCommandHandler));
vscode.languages.registerInlineCompletionItemProvider({ pattern: '**' },
{
provideInlineCompletionItems: async (document: vscode.TextDocument, position: vscode.Position) => {
@@ -83,7 +60,7 @@ export function activate(context: vscode.ExtensionContext) {
},
position: position
};
const result = await client.sendRequest("textDocument/generate", params);
const result = await client.sendRequest("textDocument/generation", params);
return [new vscode.InlineCompletionItem(result["generatedText"])];
}
}

View File

@@ -6,8 +6,8 @@ use std::collections::HashMap;
const DEFAULT_LLAMA_CPP_N_CTX: usize = 1024;
const DEFAULT_OPENAI_MAX_CONTEXT: usize = 2048;
const DEFAULT_MAX_COMPLETION_TOKENS: usize = 32;
const DEFAULT_MAX_GENERATION_TOKENS: usize = 256;
const DEFAULT_MAX_COMPLETION_TOKENS: usize = 16;
const DEFAULT_MAX_GENERATION_TOKENS: usize = 64;
pub type Kwargs = HashMap<String, Value>;
@@ -263,7 +263,7 @@ impl Config {
pub fn get_transformer_max_requests_per_second(&self) -> f32 {
match &self.config.transformer {
ValidTransformerBackend::LLaMACPP(_) => f32::MAX,
ValidTransformerBackend::LLaMACPP(_) => 1.,
ValidTransformerBackend::OpenAI(openai) => openai.max_requests_per_second,
ValidTransformerBackend::Anthropic(anthropic) => anthropic.max_requests_per_second,
}