mirror of
https://github.com/SilasMarvin/lsp-ai.git
synced 2025-12-18 23:14:28 +01:00
Merge pull request #35 from SilasMarvin/feat/add_release_ci
feat: add release CI
This commit is contained in:
2
.cargo/config.toml
Normal file
2
.cargo/config.toml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[alias]
|
||||||
|
xtask = "run --package xtask --bin xtask --"
|
||||||
8
.github/actions/github-release/Dockerfile
vendored
Normal file
8
.github/actions/github-release/Dockerfile
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
FROM node:slim
|
||||||
|
|
||||||
|
COPY . /action
|
||||||
|
WORKDIR /action
|
||||||
|
|
||||||
|
RUN npm install --production
|
||||||
|
|
||||||
|
ENTRYPOINT ["node", "/action/main.js"]
|
||||||
21
.github/actions/github-release/README.md
vendored
Normal file
21
.github/actions/github-release/README.md
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# github-release
|
||||||
|
|
||||||
|
Copy-pasted from
|
||||||
|
https://github.com/rust-lang/rust-analyzer/tree/2df30e1e07eafc1de0359566423f471920693a34/.github/actions/github-release
|
||||||
|
|
||||||
|
An action used to publish GitHub releases for `wasmtime`.
|
||||||
|
|
||||||
|
As of the time of this writing there's a few actions floating around which
|
||||||
|
perform github releases but they all tend to have their set of drawbacks.
|
||||||
|
Additionally nothing handles deleting releases which we need for our rolling
|
||||||
|
`dev` release.
|
||||||
|
|
||||||
|
To handle all this, this action rolls its own implementation using the
|
||||||
|
actions/toolkit repository and packages published there. These run in a Docker
|
||||||
|
container and take various inputs to orchestrate the release from the build.
|
||||||
|
|
||||||
|
More comments can be found in `main.js`.
|
||||||
|
|
||||||
|
Testing this is really hard. If you want to try though run `npm install` and
|
||||||
|
then `node main.js`. You'll have to configure a bunch of env vars though to get
|
||||||
|
anything reasonably working.
|
||||||
15
.github/actions/github-release/action.yml
vendored
Normal file
15
.github/actions/github-release/action.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
name: 'wasmtime github releases'
|
||||||
|
description: 'wasmtime github releases'
|
||||||
|
inputs:
|
||||||
|
token:
|
||||||
|
description: ''
|
||||||
|
required: true
|
||||||
|
name:
|
||||||
|
description: ''
|
||||||
|
required: true
|
||||||
|
files:
|
||||||
|
description: ''
|
||||||
|
required: true
|
||||||
|
runs:
|
||||||
|
using: 'docker'
|
||||||
|
image: 'Dockerfile'
|
||||||
144
.github/actions/github-release/main.js
vendored
Normal file
144
.github/actions/github-release/main.js
vendored
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
const core = require('@actions/core');
|
||||||
|
const path = require("path");
|
||||||
|
const fs = require("fs");
|
||||||
|
const github = require('@actions/github');
|
||||||
|
const glob = require('glob');
|
||||||
|
|
||||||
|
function sleep(milliseconds) {
|
||||||
|
return new Promise(resolve => setTimeout(resolve, milliseconds));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runOnce() {
|
||||||
|
// Load all our inputs and env vars. Note that `getInput` reads from `INPUT_*`
|
||||||
|
const files = core.getInput('files');
|
||||||
|
const name = core.getInput('name');
|
||||||
|
const token = core.getInput('token');
|
||||||
|
const slug = process.env.GITHUB_REPOSITORY;
|
||||||
|
const owner = slug.split('/')[0];
|
||||||
|
const repo = slug.split('/')[1];
|
||||||
|
const sha = process.env.HEAD_SHA;
|
||||||
|
|
||||||
|
core.info(`files: ${files}`);
|
||||||
|
core.info(`name: ${name}`);
|
||||||
|
|
||||||
|
const options = {
|
||||||
|
request: {
|
||||||
|
timeout: 30000,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const octokit = github.getOctokit(token, options);
|
||||||
|
|
||||||
|
// Delete the previous release since we can't overwrite one. This may happen
|
||||||
|
// due to retrying an upload or it may happen because we're doing the dev
|
||||||
|
// release.
|
||||||
|
const releases = await octokit.paginate("GET /repos/:owner/:repo/releases", { owner, repo });
|
||||||
|
for (const release of releases) {
|
||||||
|
if (release.tag_name !== name) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const release_id = release.id;
|
||||||
|
core.info(`deleting release ${release_id}`);
|
||||||
|
await octokit.rest.repos.deleteRelease({ owner, repo, release_id });
|
||||||
|
}
|
||||||
|
|
||||||
|
// We also need to update the `dev` tag while we're at it on the `dev` branch.
|
||||||
|
if (name == 'nightly') {
|
||||||
|
try {
|
||||||
|
core.info(`updating nightly tag`);
|
||||||
|
await octokit.rest.git.updateRef({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
ref: 'tags/nightly',
|
||||||
|
sha,
|
||||||
|
force: true,
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
core.error(e);
|
||||||
|
core.info(`creating nightly tag`);
|
||||||
|
await octokit.rest.git.createTag({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
tag: 'nightly',
|
||||||
|
message: 'nightly release',
|
||||||
|
object: sha,
|
||||||
|
type: 'commit',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates an official GitHub release for this `tag`, and if this is `dev`
|
||||||
|
// then we know that from the previous block this should be a fresh release.
|
||||||
|
core.info(`creating a release`);
|
||||||
|
const release = await octokit.rest.repos.createRelease({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
name,
|
||||||
|
tag_name: name,
|
||||||
|
target_commitish: sha,
|
||||||
|
prerelease: name === 'nightly',
|
||||||
|
});
|
||||||
|
const release_id = release.data.id;
|
||||||
|
|
||||||
|
// Upload all the relevant assets for this release as just general blobs.
|
||||||
|
for (const file of glob.sync(files)) {
|
||||||
|
const size = fs.statSync(file).size;
|
||||||
|
const name = path.basename(file);
|
||||||
|
|
||||||
|
await runWithRetry(async function() {
|
||||||
|
// We can't overwrite assets, so remove existing ones from a previous try.
|
||||||
|
let assets = await octokit.rest.repos.listReleaseAssets({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
release_id
|
||||||
|
});
|
||||||
|
for (const asset of assets.data) {
|
||||||
|
if (asset.name === name) {
|
||||||
|
core.info(`delete asset ${name}`);
|
||||||
|
const asset_id = asset.id;
|
||||||
|
await octokit.rest.repos.deleteReleaseAsset({ owner, repo, asset_id });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
core.info(`upload ${file}`);
|
||||||
|
const headers = { 'content-length': size, 'content-type': 'application/octet-stream' };
|
||||||
|
const data = fs.createReadStream(file);
|
||||||
|
await octokit.rest.repos.uploadReleaseAsset({
|
||||||
|
data,
|
||||||
|
headers,
|
||||||
|
name,
|
||||||
|
url: release.data.upload_url,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runWithRetry(f) {
|
||||||
|
const retries = 10;
|
||||||
|
const maxDelay = 4000;
|
||||||
|
let delay = 1000;
|
||||||
|
|
||||||
|
for (let i = 0; i < retries; i++) {
|
||||||
|
try {
|
||||||
|
await f();
|
||||||
|
break;
|
||||||
|
} catch (e) {
|
||||||
|
if (i === retries - 1)
|
||||||
|
throw e;
|
||||||
|
|
||||||
|
core.error(e);
|
||||||
|
const currentDelay = Math.round(Math.random() * delay);
|
||||||
|
core.info(`sleeping ${currentDelay} ms`);
|
||||||
|
await sleep(currentDelay);
|
||||||
|
delay = Math.min(delay * 2, maxDelay);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function run() {
|
||||||
|
await runWithRetry(runOnce);
|
||||||
|
}
|
||||||
|
|
||||||
|
run().catch(err => {
|
||||||
|
core.error(err);
|
||||||
|
core.setFailed(err.message);
|
||||||
|
});
|
||||||
10
.github/actions/github-release/package.json
vendored
Normal file
10
.github/actions/github-release/package.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"name": "wasmtime-github-release",
|
||||||
|
"version": "0.0.0",
|
||||||
|
"main": "main.js",
|
||||||
|
"dependencies": {
|
||||||
|
"@actions/core": "^1.6",
|
||||||
|
"@actions/github": "^5.0",
|
||||||
|
"glob": "^7.1.5"
|
||||||
|
}
|
||||||
|
}
|
||||||
8
.github/workflows/README.md
vendored
Normal file
8
.github/workflows/README.md
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# CI pipelines
|
||||||
|
|
||||||
|
## Release
|
||||||
|
|
||||||
|
To create a new release for `lsp-ai`, all you'll need to do is create a new branch with the following format: `release/{release_name}`. `release_name` is usually the version of the release package in SemVer format `x.x.x{-rcx}`.
|
||||||
|
|
||||||
|
This has the advantage of being able to fix issues for a specific release while continuing developping on the `main` branch by cherry-picking patches. It's inspired by trunk-based development.
|
||||||
|
|
||||||
188
.github/workflows/release.yml
vendored
Normal file
188
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
name: release
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- 'release/**'
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_INCREMENTAL: 0
|
||||||
|
CARGO_NET_RETRY: 10
|
||||||
|
RUSTFLAGS: "-D warnings -W unreachable-pub"
|
||||||
|
RUSTUP_MAX_RETRIES: 10
|
||||||
|
FETCH_DEPTH: 0 # pull in the tags for the version string
|
||||||
|
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
|
||||||
|
CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dist:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- os: windows-latest
|
||||||
|
target: x86_64-pc-windows-msvc
|
||||||
|
code-target: win32-x64
|
||||||
|
- os: windows-latest
|
||||||
|
target: i686-pc-windows-msvc
|
||||||
|
code-target: win32-ia32
|
||||||
|
- os: windows-latest
|
||||||
|
target: aarch64-pc-windows-msvc
|
||||||
|
code-target: win32-arm64
|
||||||
|
- os: ubuntu-22.04
|
||||||
|
target: x86_64-unknown-linux-gnu
|
||||||
|
code-target: linux-x64
|
||||||
|
- os: ubuntu-22.04
|
||||||
|
target: aarch64-unknown-linux-gnu
|
||||||
|
code-target: linux-arm64
|
||||||
|
- os: ubuntu-22.04
|
||||||
|
target: arm-unknown-linux-gnueabihf
|
||||||
|
code-target: linux-armhf
|
||||||
|
- os: macos-12
|
||||||
|
target: x86_64-apple-darwin
|
||||||
|
code-target: darwin-x64
|
||||||
|
- os: macos-12
|
||||||
|
target: aarch64-apple-darwin
|
||||||
|
code-target: darwin-arm64
|
||||||
|
|
||||||
|
env:
|
||||||
|
LSP_AI_TARGET: ${{ matrix.target }}
|
||||||
|
|
||||||
|
name: dist (${{ matrix.target }})
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
container: ${{ matrix.container }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: ${{ env.FETCH_DEPTH }}
|
||||||
|
|
||||||
|
- name: Install Rust toolchain
|
||||||
|
run: |
|
||||||
|
rustup update --no-self-update stable
|
||||||
|
rustup target add ${{ matrix.target }}
|
||||||
|
rustup component add rust-src
|
||||||
|
|
||||||
|
- name: Update apt repositories
|
||||||
|
if: contains(matrix.os, 'ubuntu')
|
||||||
|
run: sudo apt-get update -y
|
||||||
|
|
||||||
|
- name: Install AArch64 target toolchain
|
||||||
|
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
||||||
|
run: sudo apt-get install gcc-aarch64-linux-gnu libc6-dev-arm64-cross g++-aarch64-linux-gnu
|
||||||
|
|
||||||
|
- name: Install ARM target toolchain
|
||||||
|
if: matrix.target == 'arm-unknown-linux-gnueabihf'
|
||||||
|
run: sudo apt-get install gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf
|
||||||
|
|
||||||
|
- name: Dist
|
||||||
|
run: cargo xtask dist
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-${{ matrix.target }}
|
||||||
|
path: ./dist
|
||||||
|
|
||||||
|
dist-x86_64-unknown-linux-musl:
|
||||||
|
name: dist (x86_64-unknown-linux-musl)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
LSP_AI_TARGET: x86_64-unknown-linux-musl
|
||||||
|
# For some reason `-crt-static` is not working for clang without lld
|
||||||
|
RUSTFLAGS: "-C link-arg=-fuse-ld=lld -C target-feature=-crt-static"
|
||||||
|
container:
|
||||||
|
image: rust:alpine
|
||||||
|
volumes:
|
||||||
|
- /usr/local/cargo/registry:/usr/local/cargo/registry
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Install dependencies
|
||||||
|
run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl make linux-headers
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: ${{ env.FETCH_DEPTH }}
|
||||||
|
|
||||||
|
- name: Install Rust toolchain
|
||||||
|
run: |
|
||||||
|
rustup update --no-self-update stable
|
||||||
|
rustup target add x86_64-unknown-linux-gnu
|
||||||
|
rustup component add rust-src
|
||||||
|
|
||||||
|
- name: Dist
|
||||||
|
run: cargo xtask dist
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-x86_64-unknown-linux-musl
|
||||||
|
path: ./dist
|
||||||
|
|
||||||
|
publish:
|
||||||
|
name: publish
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: ["dist", "dist-x86_64-unknown-linux-musl"]
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: ${{ env.FETCH_DEPTH }}
|
||||||
|
|
||||||
|
- run: echo "HEAD_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||||
|
- run: 'echo "HEAD_SHA: $HEAD_SHA"'
|
||||||
|
|
||||||
|
- name: Split branch name
|
||||||
|
env:
|
||||||
|
BRANCH: ${{ github.ref_name }}
|
||||||
|
id: split
|
||||||
|
run: echo "tag=${BRANCH##*/}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-aarch64-apple-darwin
|
||||||
|
path: dist
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-x86_64-apple-darwin
|
||||||
|
path: dist
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-x86_64-unknown-linux-gnu
|
||||||
|
path: dist
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-x86_64-unknown-linux-musl
|
||||||
|
path: dist
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-aarch64-unknown-linux-gnu
|
||||||
|
path: dist
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-arm-unknown-linux-gnueabihf
|
||||||
|
path: dist
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-x86_64-pc-windows-msvc
|
||||||
|
path: dist
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-i686-pc-windows-msvc
|
||||||
|
path: dist
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-aarch64-pc-windows-msvc
|
||||||
|
path: dist
|
||||||
|
- run: ls -al ./dist
|
||||||
|
|
||||||
|
- name: Publish Release
|
||||||
|
uses: ./.github/actions/github-release
|
||||||
|
with:
|
||||||
|
files: "dist/*"
|
||||||
|
name: ${{ steps.split.outputs.tag }}
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
78
Cargo.lock
generated
78
Cargo.lock
generated
@@ -266,9 +266,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bumpalo"
|
name = "bumpalo"
|
||||||
version = "3.15.4"
|
version = "3.16.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7ff69b9dd49fd426c69a0db9fc04dd934cdb6645ff000864d98f7e2af8830eaa"
|
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "byteorder"
|
name = "byteorder"
|
||||||
@@ -1658,9 +1658,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "memchr"
|
||||||
version = "2.7.1"
|
version = "2.7.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
|
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memo-map"
|
name = "memo-map"
|
||||||
@@ -3218,9 +3218,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time"
|
name = "time"
|
||||||
version = "0.3.34"
|
version = "0.3.36"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749"
|
checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"deranged",
|
"deranged",
|
||||||
"itoa",
|
"itoa",
|
||||||
@@ -3239,9 +3239,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time-macros"
|
name = "time-macros"
|
||||||
version = "0.2.17"
|
version = "0.2.18"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774"
|
checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"num-conv",
|
"num-conv",
|
||||||
"time-core",
|
"time-core",
|
||||||
@@ -4134,6 +4134,55 @@ dependencies = [
|
|||||||
"windows-sys 0.48.0",
|
"windows-sys 0.48.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "write-json"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "23f6174b2566cc4a74f95e1367ec343e7fa80c93cc8087f5c4a3d6a1088b2118"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "xflags"
|
||||||
|
version = "0.3.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7d9e15fbb3de55454b0106e314b28e671279009b363e6f1d8e39fdc3bf048944"
|
||||||
|
dependencies = [
|
||||||
|
"xflags-macros",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "xflags-macros"
|
||||||
|
version = "0.3.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "672423d4fea7ffa2f6c25ba60031ea13dc6258070556f125cc4d790007d4a155"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "xshell"
|
||||||
|
version = "0.2.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6db0ab86eae739efd1b054a8d3d16041914030ac4e01cd1dca0cf252fd8b6437"
|
||||||
|
dependencies = [
|
||||||
|
"xshell-macros",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "xshell-macros"
|
||||||
|
version = "0.2.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9d422e8e38ec76e2f06ee439ccc765e9c6a9638b9e7c9f2e8255e4d41e8bd852"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "xtask"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"flate2",
|
||||||
|
"time",
|
||||||
|
"write-json",
|
||||||
|
"xflags",
|
||||||
|
"xshell",
|
||||||
|
"zip",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "xxhash-rust"
|
name = "xxhash-rust"
|
||||||
version = "0.8.10"
|
version = "0.8.10"
|
||||||
@@ -4165,3 +4214,16 @@ name = "zeroize"
|
|||||||
version = "1.7.0"
|
version = "1.7.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
|
checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "zip"
|
||||||
|
version = "0.6.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261"
|
||||||
|
dependencies = [
|
||||||
|
"byteorder",
|
||||||
|
"crc32fast",
|
||||||
|
"crossbeam-utils",
|
||||||
|
"flate2",
|
||||||
|
"time",
|
||||||
|
]
|
||||||
|
|||||||
14
Cargo.toml
14
Cargo.toml
@@ -1,7 +1,5 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = ["crates/*", "xtask/"]
|
||||||
"crates/*",
|
|
||||||
]
|
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
@@ -10,3 +8,13 @@ license = "MIT"
|
|||||||
description = "LSP-AI is an open-source language server that serves as a backend for AI-powered functionality, designed to assist and empower software engineers, not replace them."
|
description = "LSP-AI is an open-source language server that serves as a backend for AI-powered functionality, designed to assist and empower software engineers, not replace them."
|
||||||
repository = "https://github.com/SilasMarvin/lsp-ai"
|
repository = "https://github.com/SilasMarvin/lsp-ai"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
authors = ["Silas Marvin <silas.marvin@dmarvin.net>"]
|
||||||
|
|
||||||
|
[profile.dev.package]
|
||||||
|
# This speeds up `cargo xtask dist`.
|
||||||
|
miniz_oxide.opt-level = 3
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
incremental = true
|
||||||
|
# Set this to 1 or 2 to get more useful backtraces in debugger.
|
||||||
|
debug = 0
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
pub type Kwargs = HashMap<String, Value>;
|
pub(crate) type Kwargs = HashMap<String, Value>;
|
||||||
|
|
||||||
const fn max_requests_per_second_default() -> f32 {
|
const fn max_requests_per_second_default() -> f32 {
|
||||||
1.
|
1.
|
||||||
@@ -70,7 +70,7 @@ pub struct TextSplitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize)]
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
pub enum ValidMemoryBackend {
|
pub(crate) enum ValidMemoryBackend {
|
||||||
#[serde(rename = "file_store")]
|
#[serde(rename = "file_store")]
|
||||||
FileStore(FileStore),
|
FileStore(FileStore),
|
||||||
#[serde(rename = "postgresml")]
|
#[serde(rename = "postgresml")]
|
||||||
@@ -79,7 +79,7 @@ pub enum ValidMemoryBackend {
|
|||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize)]
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
pub enum ValidModel {
|
pub(crate) enum ValidModel {
|
||||||
#[cfg(feature = "llama_cpp")]
|
#[cfg(feature = "llama_cpp")]
|
||||||
#[serde(rename = "llama_cpp")]
|
#[serde(rename = "llama_cpp")]
|
||||||
LLaMACPP(LLaMACPP),
|
LLaMACPP(LLaMACPP),
|
||||||
@@ -97,13 +97,13 @@ pub enum ValidModel {
|
|||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct ChatMessage {
|
pub(crate) struct ChatMessage {
|
||||||
pub role: String,
|
pub(crate) role: String,
|
||||||
pub content: String,
|
pub(crate) content: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ChatMessage {
|
impl ChatMessage {
|
||||||
pub fn new(role: String, content: String) -> Self {
|
pub(crate) fn new(role: String, content: String) -> Self {
|
||||||
Self {
|
Self {
|
||||||
role,
|
role,
|
||||||
content,
|
content,
|
||||||
@@ -115,10 +115,10 @@ impl ChatMessage {
|
|||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
#[allow(clippy::upper_case_acronyms)]
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct FIM {
|
pub(crate) struct FIM {
|
||||||
pub start: String,
|
pub(crate) start: String,
|
||||||
pub middle: String,
|
pub(crate) middle: String,
|
||||||
pub end: String,
|
pub(crate) end: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
const fn max_crawl_memory_default() -> u64 {
|
const fn max_crawl_memory_default() -> u64 {
|
||||||
@@ -131,71 +131,71 @@ const fn max_crawl_file_size_default() -> u64 {
|
|||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct Crawl {
|
pub(crate) struct Crawl {
|
||||||
#[serde(default = "max_crawl_file_size_default")]
|
#[serde(default = "max_crawl_file_size_default")]
|
||||||
pub max_file_size: u64,
|
pub(crate) max_file_size: u64,
|
||||||
#[serde(default = "max_crawl_memory_default")]
|
#[serde(default = "max_crawl_memory_default")]
|
||||||
pub max_crawl_memory: u64,
|
pub(crate) max_crawl_memory: u64,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub all_files: bool,
|
pub(crate) all_files: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
pub struct PostgresMLEmbeddingModel {
|
pub(crate) struct PostgresMLEmbeddingModel {
|
||||||
pub model: String,
|
pub(crate) model: String,
|
||||||
pub embed_parameters: Option<Value>,
|
pub(crate) embed_parameters: Option<Value>,
|
||||||
pub query_parameters: Option<Value>,
|
pub(crate) query_parameters: Option<Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct PostgresML {
|
pub(crate) struct PostgresML {
|
||||||
pub database_url: Option<String>,
|
pub(crate) database_url: Option<String>,
|
||||||
pub crawl: Option<Crawl>,
|
pub(crate) crawl: Option<Crawl>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub splitter: ValidSplitter,
|
pub(crate) splitter: ValidSplitter,
|
||||||
pub embedding_model: Option<PostgresMLEmbeddingModel>,
|
pub(crate) embedding_model: Option<PostgresMLEmbeddingModel>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize, Default)]
|
#[derive(Clone, Debug, Deserialize, Default)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct FileStore {
|
pub(crate) struct FileStore {
|
||||||
pub crawl: Option<Crawl>,
|
pub(crate) crawl: Option<Crawl>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileStore {
|
impl FileStore {
|
||||||
pub fn new_without_crawl() -> Self {
|
pub(crate) fn new_without_crawl() -> Self {
|
||||||
Self { crawl: None }
|
Self { crawl: None }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct Ollama {
|
pub(crate) struct Ollama {
|
||||||
// The generate endpoint, default: 'http://localhost:11434/api/generate'
|
// The generate endpoint, default: 'http://localhost:11434/api/generate'
|
||||||
pub generate_endpoint: Option<String>,
|
pub(crate) generate_endpoint: Option<String>,
|
||||||
// The chat endpoint, default: 'http://localhost:11434/api/chat'
|
// The chat endpoint, default: 'http://localhost:11434/api/chat'
|
||||||
pub chat_endpoint: Option<String>,
|
pub(crate) chat_endpoint: Option<String>,
|
||||||
// The model name
|
// The model name
|
||||||
pub model: String,
|
pub(crate) model: String,
|
||||||
// The maximum requests per second
|
// The maximum requests per second
|
||||||
#[serde(default = "max_requests_per_second_default")]
|
#[serde(default = "max_requests_per_second_default")]
|
||||||
pub max_requests_per_second: f32,
|
pub(crate) max_requests_per_second: f32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct MistralFIM {
|
pub(crate) struct MistralFIM {
|
||||||
// The auth token env var name
|
// The auth token env var name
|
||||||
pub auth_token_env_var_name: Option<String>,
|
pub(crate) auth_token_env_var_name: Option<String>,
|
||||||
pub auth_token: Option<String>,
|
pub(crate) auth_token: Option<String>,
|
||||||
// The fim endpoint
|
// The fim endpoint
|
||||||
pub fim_endpoint: Option<String>,
|
pub(crate) fim_endpoint: Option<String>,
|
||||||
// The model name
|
// The model name
|
||||||
pub model: String,
|
pub(crate) model: String,
|
||||||
// The maximum requests per second
|
// The maximum requests per second
|
||||||
#[serde(default = "max_requests_per_second_default")]
|
#[serde(default = "max_requests_per_second_default")]
|
||||||
pub max_requests_per_second: f32,
|
pub(crate) max_requests_per_second: f32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "llama_cpp")]
|
#[cfg(feature = "llama_cpp")]
|
||||||
@@ -229,87 +229,89 @@ pub struct LLaMACPP {
|
|||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct OpenAI {
|
pub(crate) struct OpenAI {
|
||||||
// The auth token env var name
|
// The auth token env var name
|
||||||
pub auth_token_env_var_name: Option<String>,
|
pub(crate) auth_token_env_var_name: Option<String>,
|
||||||
// The auth token
|
// The auth token
|
||||||
pub auth_token: Option<String>,
|
pub(crate) auth_token: Option<String>,
|
||||||
// The completions endpoint
|
// The completions endpoint
|
||||||
pub completions_endpoint: Option<String>,
|
pub(crate) completions_endpoint: Option<String>,
|
||||||
// The chat endpoint
|
// The chat endpoint
|
||||||
pub chat_endpoint: Option<String>,
|
pub(crate) chat_endpoint: Option<String>,
|
||||||
// The maximum requests per second
|
// The maximum requests per second
|
||||||
#[serde(default = "max_requests_per_second_default")]
|
#[serde(default = "max_requests_per_second_default")]
|
||||||
pub max_requests_per_second: f32,
|
pub(crate) max_requests_per_second: f32,
|
||||||
// The model name
|
// The model name
|
||||||
pub model: String,
|
pub(crate) model: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct Gemini {
|
pub(crate) struct Gemini {
|
||||||
// The auth token env var name
|
// The auth token env var name
|
||||||
pub auth_token_env_var_name: Option<String>,
|
pub(crate) auth_token_env_var_name: Option<String>,
|
||||||
// The auth token
|
// The auth token
|
||||||
pub auth_token: Option<String>,
|
pub(crate) auth_token: Option<String>,
|
||||||
// The completions endpoint
|
// The completions endpoint
|
||||||
pub completions_endpoint: Option<String>,
|
#[allow(dead_code)]
|
||||||
|
pub(crate) completions_endpoint: Option<String>,
|
||||||
// The chat endpoint
|
// The chat endpoint
|
||||||
pub chat_endpoint: Option<String>,
|
pub(crate) chat_endpoint: Option<String>,
|
||||||
// The maximum requests per second
|
// The maximum requests per second
|
||||||
#[serde(default = "max_requests_per_second_default")]
|
#[serde(default = "max_requests_per_second_default")]
|
||||||
pub max_requests_per_second: f32,
|
pub(crate) max_requests_per_second: f32,
|
||||||
// The model name
|
// The model name
|
||||||
pub model: String,
|
pub(crate) model: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct Anthropic {
|
pub(crate) struct Anthropic {
|
||||||
// The auth token env var name
|
// The auth token env var name
|
||||||
pub auth_token_env_var_name: Option<String>,
|
pub(crate) auth_token_env_var_name: Option<String>,
|
||||||
pub auth_token: Option<String>,
|
pub(crate) auth_token: Option<String>,
|
||||||
// The completions endpoint
|
// The completions endpoint
|
||||||
pub completions_endpoint: Option<String>,
|
#[allow(dead_code)]
|
||||||
|
pub(crate) completions_endpoint: Option<String>,
|
||||||
// The chat endpoint
|
// The chat endpoint
|
||||||
pub chat_endpoint: Option<String>,
|
pub(crate) chat_endpoint: Option<String>,
|
||||||
// The maximum requests per second
|
// The maximum requests per second
|
||||||
#[serde(default = "max_requests_per_second_default")]
|
#[serde(default = "max_requests_per_second_default")]
|
||||||
pub max_requests_per_second: f32,
|
pub(crate) max_requests_per_second: f32,
|
||||||
// The model name
|
// The model name
|
||||||
pub model: String,
|
pub(crate) model: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
pub struct Completion {
|
pub(crate) struct Completion {
|
||||||
// The model key to use
|
// The model key to use
|
||||||
pub model: String,
|
pub(crate) model: String,
|
||||||
// Args are deserialized by the backend using them
|
// Args are deserialized by the backend using them
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub parameters: Kwargs,
|
pub(crate) parameters: Kwargs,
|
||||||
// Parameters for post processing
|
// Parameters for post processing
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub post_process: PostProcess,
|
pub(crate) post_process: PostProcess,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct ValidConfig {
|
pub(crate) struct ValidConfig {
|
||||||
pub memory: ValidMemoryBackend,
|
pub(crate) memory: ValidMemoryBackend,
|
||||||
pub models: HashMap<String, ValidModel>,
|
pub(crate) models: HashMap<String, ValidModel>,
|
||||||
pub completion: Option<Completion>,
|
pub(crate) completion: Option<Completion>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize, Default)]
|
#[derive(Clone, Debug, Deserialize, Default)]
|
||||||
pub struct ValidClientParams {
|
pub(crate) struct ValidClientParams {
|
||||||
#[serde(alias = "rootUri")]
|
#[serde(alias = "rootUri")]
|
||||||
pub root_uri: Option<String>,
|
pub(crate) root_uri: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
pub config: ValidConfig,
|
pub(crate) config: ValidConfig,
|
||||||
pub client_params: ValidClientParams,
|
pub(crate) client_params: ValidClientParams,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Config {
|
impl Config {
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use tracing::{error, instrument};
|
|||||||
|
|
||||||
use crate::config::{self, Config};
|
use crate::config::{self, Config};
|
||||||
|
|
||||||
pub struct Crawl {
|
pub(crate) struct Crawl {
|
||||||
crawl_config: config::Crawl,
|
crawl_config: config::Crawl,
|
||||||
config: Config,
|
config: Config,
|
||||||
crawled_file_types: HashSet<String>,
|
crawled_file_types: HashSet<String>,
|
||||||
@@ -12,7 +12,7 @@ pub struct Crawl {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Crawl {
|
impl Crawl {
|
||||||
pub fn new(crawl_config: config::Crawl, config: Config) -> Self {
|
pub(crate) fn new(crawl_config: config::Crawl, config: Config) -> Self {
|
||||||
Self {
|
Self {
|
||||||
crawl_config,
|
crawl_config,
|
||||||
config,
|
config,
|
||||||
@@ -37,11 +37,10 @@ impl Crawl {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let extension_to_match = triggered_file
|
let extension_to_match = triggered_file
|
||||||
.map(|tf| {
|
.and_then(|tf| {
|
||||||
let path = std::path::Path::new(&tf);
|
let path = std::path::Path::new(&tf);
|
||||||
path.extension().map(|f| f.to_str().map(|f| f.to_owned()))
|
path.extension().map(|f| f.to_str().map(|f| f.to_owned()))
|
||||||
})
|
})
|
||||||
.flatten()
|
|
||||||
.flatten();
|
.flatten();
|
||||||
|
|
||||||
if let Some(extension_to_match) = &extension_to_match {
|
if let Some(extension_to_match) = &extension_to_match {
|
||||||
@@ -70,7 +69,7 @@ impl Crawl {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match (
|
match (
|
||||||
path.extension().map(|pe| pe.to_str()).flatten(),
|
path.extension().and_then(|pe| pe.to_str()),
|
||||||
&extension_to_match,
|
&extension_to_match,
|
||||||
) {
|
) {
|
||||||
(Some(path_extension), Some(extension_to_match)) => {
|
(Some(path_extension), Some(extension_to_match)) => {
|
||||||
|
|||||||
@@ -4,28 +4,28 @@ use serde_json::Value;
|
|||||||
|
|
||||||
use crate::config;
|
use crate::config;
|
||||||
|
|
||||||
pub enum Generation {}
|
pub(crate) enum Generation {}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct GenerationParams {
|
pub(crate) struct GenerationParams {
|
||||||
// This field was "mixed-in" from TextDocumentPositionParams
|
// This field was "mixed-in" from TextDocumentPositionParams
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub text_document_position: TextDocumentPositionParams,
|
pub(crate) text_document_position: TextDocumentPositionParams,
|
||||||
// The model key to use
|
// The model key to use
|
||||||
pub model: String,
|
pub(crate) model: String,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
// Args are deserialized by the backend using them
|
// Args are deserialized by the backend using them
|
||||||
pub parameters: Value,
|
pub(crate) parameters: Value,
|
||||||
// Parameters for post processing
|
// Parameters for post processing
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub post_process: config::PostProcess,
|
pub(crate) post_process: config::PostProcess,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct GenerateResult {
|
pub(crate) struct GenerateResult {
|
||||||
pub generated_text: String,
|
pub(crate) generated_text: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl lsp_types::request::Request for Generation {
|
impl lsp_types::request::Request for Generation {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use lsp_types::{ProgressToken, TextDocumentPositionParams};
|
use lsp_types::{ProgressToken, TextDocumentPositionParams};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
pub enum GenerationStream {}
|
pub(crate) enum GenerationStream {}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
@@ -15,9 +15,9 @@ pub struct GenerationStreamParams {
|
|||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct GenerationStreamResult {
|
pub(crate) struct GenerationStreamResult {
|
||||||
pub generated_text: String,
|
pub(crate) generated_text: String,
|
||||||
pub partial_result_token: ProgressToken,
|
pub(crate) partial_result_token: ProgressToken,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl lsp_types::request::Request for GenerationStream {
|
impl lsp_types::request::Request for GenerationStream {
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
pub mod generation;
|
pub(crate) mod generation;
|
||||||
pub mod generation_stream;
|
pub(crate) mod generation_stream;
|
||||||
|
|||||||
@@ -17,12 +17,12 @@ use crate::{
|
|||||||
use super::{ContextAndCodePrompt, FIMPrompt, MemoryBackend, MemoryRunParams, Prompt, PromptType};
|
use super::{ContextAndCodePrompt, FIMPrompt, MemoryBackend, MemoryRunParams, Prompt, PromptType};
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct AdditionalFileStoreParams {
|
pub(crate) struct AdditionalFileStoreParams {
|
||||||
build_tree: bool,
|
build_tree: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AdditionalFileStoreParams {
|
impl AdditionalFileStoreParams {
|
||||||
pub fn new(build_tree: bool) -> Self {
|
pub(crate) fn new(build_tree: bool) -> Self {
|
||||||
Self { build_tree }
|
Self { build_tree }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -47,7 +47,7 @@ impl File {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FileStore {
|
pub(crate) struct FileStore {
|
||||||
params: AdditionalFileStoreParams,
|
params: AdditionalFileStoreParams,
|
||||||
file_map: Mutex<HashMap<String, File>>,
|
file_map: Mutex<HashMap<String, File>>,
|
||||||
accessed_files: Mutex<IndexSet<String>>,
|
accessed_files: Mutex<IndexSet<String>>,
|
||||||
@@ -55,7 +55,10 @@ pub struct FileStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl FileStore {
|
impl FileStore {
|
||||||
pub fn new(mut file_store_config: config::FileStore, config: Config) -> anyhow::Result<Self> {
|
pub(crate) fn new(
|
||||||
|
mut file_store_config: config::FileStore,
|
||||||
|
config: Config,
|
||||||
|
) -> anyhow::Result<Self> {
|
||||||
let crawl = file_store_config
|
let crawl = file_store_config
|
||||||
.crawl
|
.crawl
|
||||||
.take()
|
.take()
|
||||||
@@ -72,7 +75,7 @@ impl FileStore {
|
|||||||
Ok(s)
|
Ok(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_with_params(
|
pub(crate) fn new_with_params(
|
||||||
mut file_store_config: config::FileStore,
|
mut file_store_config: config::FileStore,
|
||||||
config: Config,
|
config: Config,
|
||||||
params: AdditionalFileStoreParams,
|
params: AdditionalFileStoreParams,
|
||||||
@@ -192,7 +195,7 @@ impl FileStore {
|
|||||||
Ok((rope, cursor_index))
|
Ok((rope, cursor_index))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_characters_around_position(
|
pub(crate) fn get_characters_around_position(
|
||||||
&self,
|
&self,
|
||||||
position: &TextDocumentPositionParams,
|
position: &TextDocumentPositionParams,
|
||||||
characters: usize,
|
characters: usize,
|
||||||
@@ -216,7 +219,7 @@ impl FileStore {
|
|||||||
Ok(rope_slice.to_string())
|
Ok(rope_slice.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_code(
|
pub(crate) fn build_code(
|
||||||
&self,
|
&self,
|
||||||
position: &TextDocumentPositionParams,
|
position: &TextDocumentPositionParams,
|
||||||
prompt_type: PromptType,
|
prompt_type: PromptType,
|
||||||
@@ -272,15 +275,18 @@ impl FileStore {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_map(&self) -> &Mutex<HashMap<String, File>> {
|
pub(crate) fn file_map(&self) -> &Mutex<HashMap<String, File>> {
|
||||||
&self.file_map
|
&self.file_map
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn contains_file(&self, uri: &str) -> bool {
|
pub(crate) fn contains_file(&self, uri: &str) -> bool {
|
||||||
self.file_map.lock().contains_key(uri)
|
self.file_map.lock().contains_key(uri)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn position_to_byte(&self, position: &TextDocumentPositionParams) -> anyhow::Result<usize> {
|
pub(crate) fn position_to_byte(
|
||||||
|
&self,
|
||||||
|
position: &TextDocumentPositionParams,
|
||||||
|
) -> anyhow::Result<usize> {
|
||||||
let file_map = self.file_map.lock();
|
let file_map = self.file_map.lock();
|
||||||
let uri = position.text_document.uri.to_string();
|
let uri = position.text_document.uri.to_string();
|
||||||
let file = file_map
|
let file = file_map
|
||||||
@@ -322,7 +328,7 @@ impl MemoryBackend for FileStore {
|
|||||||
prompt_type: PromptType,
|
prompt_type: PromptType,
|
||||||
params: &Value,
|
params: &Value,
|
||||||
) -> anyhow::Result<Prompt> {
|
) -> anyhow::Result<Prompt> {
|
||||||
let params: MemoryRunParams = params.try_into()?;
|
let params: MemoryRunParams = params.into();
|
||||||
self.build_code(position, prompt_type, params, true)
|
self.build_code(position, prompt_type, params, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ use serde_json::Value;
|
|||||||
|
|
||||||
use crate::config::{Config, ValidMemoryBackend};
|
use crate::config::{Config, ValidMemoryBackend};
|
||||||
|
|
||||||
pub mod file_store;
|
pub(crate) mod file_store;
|
||||||
mod postgresml;
|
mod postgresml;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
@@ -16,9 +16,9 @@ pub enum PromptType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct MemoryRunParams {
|
pub(crate) struct MemoryRunParams {
|
||||||
pub is_for_chat: bool,
|
pub(crate) is_for_chat: bool,
|
||||||
pub max_context: usize,
|
pub(crate) max_context: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&Value> for MemoryRunParams {
|
impl From<&Value> for MemoryRunParams {
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ async fn split_and_upsert_file(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct PostgresML {
|
pub(crate) struct PostgresML {
|
||||||
config: Config,
|
config: Config,
|
||||||
postgresml_config: config::PostgresML,
|
postgresml_config: config::PostgresML,
|
||||||
file_store: Arc<FileStore>,
|
file_store: Arc<FileStore>,
|
||||||
@@ -240,12 +240,7 @@ impl PostgresML {
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
if let Err(e) = task_collection
|
if let Err(e) = task_collection
|
||||||
.delete_documents(
|
.delete_documents(json!({ "$or": delete_or_statements }).into())
|
||||||
json!({
|
|
||||||
"$or": delete_or_statements
|
|
||||||
})
|
|
||||||
.into(),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.context("PGML - error deleting documents")
|
.context("PGML - error deleting documents")
|
||||||
{
|
{
|
||||||
@@ -255,15 +250,14 @@ impl PostgresML {
|
|||||||
let documents: Vec<pgml::types::Json> = chunks
|
let documents: Vec<pgml::types::Json> = chunks
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.zip(&file_uris)
|
.zip(&file_uris)
|
||||||
.map(|(chunks, uri)| {
|
.flat_map(|(chunks, uri)| {
|
||||||
chunks
|
chunks
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|chunk| {
|
.map(|chunk| {
|
||||||
chunk_to_document(&uri, chunk, task_root_uri.as_deref())
|
chunk_to_document(uri, chunk, task_root_uri.as_deref())
|
||||||
})
|
})
|
||||||
.collect::<Vec<Value>>()
|
.collect::<Vec<Value>>()
|
||||||
})
|
})
|
||||||
.flatten()
|
|
||||||
.map(|f: Value| f.into())
|
.map(|f: Value| f.into())
|
||||||
.collect();
|
.collect();
|
||||||
if let Err(e) = task_collection
|
if let Err(e) = task_collection
|
||||||
@@ -365,15 +359,11 @@ impl PostgresML {
|
|||||||
current_chunks_bytes += contents.len();
|
current_chunks_bytes += contents.len();
|
||||||
let chunks: Vec<pgml::types::Json> = self
|
let chunks: Vec<pgml::types::Json> = self
|
||||||
.splitter
|
.splitter
|
||||||
.split_file_contents(&uri, &contents)
|
.split_file_contents(uri, &contents)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|chunk| {
|
.map(|chunk| {
|
||||||
chunk_to_document(
|
chunk_to_document(uri, chunk, self.config.client_params.root_uri.as_deref())
|
||||||
&uri,
|
.into()
|
||||||
chunk,
|
|
||||||
self.config.client_params.root_uri.as_deref(),
|
|
||||||
)
|
|
||||||
.into()
|
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
chunks_to_upsert.extend(chunks);
|
chunks_to_upsert.extend(chunks);
|
||||||
@@ -389,7 +379,7 @@ impl PostgresML {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Upsert any remaining chunks
|
// Upsert any remaining chunks
|
||||||
if chunks_to_upsert.len() > 0 {
|
if chunks_to_upsert.is_empty() {
|
||||||
collection
|
collection
|
||||||
.upsert_documents(chunks_to_upsert, None)
|
.upsert_documents(chunks_to_upsert, None)
|
||||||
.await
|
.await
|
||||||
@@ -479,7 +469,7 @@ impl PostgresML {
|
|||||||
Ok(true)
|
Ok(true)
|
||||||
})?;
|
})?;
|
||||||
// Upsert any remaining documents
|
// Upsert any remaining documents
|
||||||
if documents.len() > 0 {
|
if documents.is_empty() {
|
||||||
let mut collection = self.collection.clone();
|
let mut collection = self.collection.clone();
|
||||||
TOKIO_RUNTIME.spawn(async move {
|
TOKIO_RUNTIME.spawn(async move {
|
||||||
if let Err(e) = collection
|
if let Err(e) = collection
|
||||||
@@ -510,7 +500,7 @@ impl MemoryBackend for PostgresML {
|
|||||||
prompt_type: PromptType,
|
prompt_type: PromptType,
|
||||||
params: &Value,
|
params: &Value,
|
||||||
) -> anyhow::Result<Prompt> {
|
) -> anyhow::Result<Prompt> {
|
||||||
let params: MemoryRunParams = params.try_into()?;
|
let params: MemoryRunParams = params.into();
|
||||||
let chunk_size = self.splitter.chunk_size();
|
let chunk_size = self.splitter.chunk_size();
|
||||||
let total_allowed_characters = tokens_to_estimated_characters(params.max_context);
|
let total_allowed_characters = tokens_to_estimated_characters(params.max_context);
|
||||||
|
|
||||||
@@ -535,8 +525,7 @@ impl MemoryBackend for PostgresML {
|
|||||||
.postgresml_config
|
.postgresml_config
|
||||||
.embedding_model
|
.embedding_model
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|m| m.query_parameters.clone())
|
.and_then(|m| m.query_parameters.clone())
|
||||||
.flatten()
|
|
||||||
{
|
{
|
||||||
Some(query_parameters) => query_parameters,
|
Some(query_parameters) => query_parameters,
|
||||||
None => json!({
|
None => json!({
|
||||||
@@ -602,7 +591,7 @@ impl MemoryBackend for PostgresML {
|
|||||||
Prompt::ContextAndCode(ContextAndCodePrompt::new(
|
Prompt::ContextAndCode(ContextAndCodePrompt::new(
|
||||||
context.to_owned(),
|
context.to_owned(),
|
||||||
format_file_excerpt(
|
format_file_excerpt(
|
||||||
&position.text_document.uri.to_string(),
|
position.text_document.uri.as_str(),
|
||||||
&context_and_code.code,
|
&context_and_code.code,
|
||||||
self.config.client_params.root_uri.as_deref(),
|
self.config.client_params.root_uri.as_deref(),
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ use crate::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct PromptRequest {
|
pub(crate) struct PromptRequest {
|
||||||
position: TextDocumentPositionParams,
|
position: TextDocumentPositionParams,
|
||||||
prompt_type: PromptType,
|
prompt_type: PromptType,
|
||||||
params: Value,
|
params: Value,
|
||||||
@@ -21,7 +21,7 @@ pub struct PromptRequest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PromptRequest {
|
impl PromptRequest {
|
||||||
pub fn new(
|
pub(crate) fn new(
|
||||||
position: TextDocumentPositionParams,
|
position: TextDocumentPositionParams,
|
||||||
prompt_type: PromptType,
|
prompt_type: PromptType,
|
||||||
params: Value,
|
params: Value,
|
||||||
@@ -37,13 +37,13 @@ impl PromptRequest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct FilterRequest {
|
pub(crate) struct FilterRequest {
|
||||||
position: TextDocumentPositionParams,
|
position: TextDocumentPositionParams,
|
||||||
tx: tokio::sync::oneshot::Sender<String>,
|
tx: tokio::sync::oneshot::Sender<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FilterRequest {
|
impl FilterRequest {
|
||||||
pub fn new(
|
pub(crate) fn new(
|
||||||
position: TextDocumentPositionParams,
|
position: TextDocumentPositionParams,
|
||||||
tx: tokio::sync::oneshot::Sender<String>,
|
tx: tokio::sync::oneshot::Sender<String>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
@@ -51,7 +51,7 @@ impl FilterRequest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum WorkerRequest {
|
pub(crate) enum WorkerRequest {
|
||||||
FilterText(FilterRequest),
|
FilterText(FilterRequest),
|
||||||
Prompt(PromptRequest),
|
Prompt(PromptRequest),
|
||||||
DidOpenTextDocument(DidOpenTextDocumentParams),
|
DidOpenTextDocument(DidOpenTextDocumentParams),
|
||||||
@@ -115,7 +115,7 @@ fn do_run(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(
|
pub(crate) fn run(
|
||||||
memory_backend: Box<dyn MemoryBackend + Send + Sync>,
|
memory_backend: Box<dyn MemoryBackend + Send + Sync>,
|
||||||
rx: std::sync::mpsc::Receiver<WorkerRequest>,
|
rx: std::sync::mpsc::Receiver<WorkerRequest>,
|
||||||
) {
|
) {
|
||||||
|
|||||||
@@ -2,20 +2,20 @@ use crate::{config, memory_backends::file_store::File};
|
|||||||
|
|
||||||
use super::{ByteRange, Chunk, Splitter};
|
use super::{ByteRange, Chunk, Splitter};
|
||||||
|
|
||||||
pub struct TextSplitter {
|
pub(crate) struct TextSplitter {
|
||||||
chunk_size: usize,
|
chunk_size: usize,
|
||||||
splitter: text_splitter::TextSplitter<text_splitter::Characters>,
|
splitter: text_splitter::TextSplitter<text_splitter::Characters>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TextSplitter {
|
impl TextSplitter {
|
||||||
pub fn new(config: config::TextSplitter) -> Self {
|
pub(crate) fn new(config: config::TextSplitter) -> Self {
|
||||||
Self {
|
Self {
|
||||||
chunk_size: config.chunk_size,
|
chunk_size: config.chunk_size,
|
||||||
splitter: text_splitter::TextSplitter::new(config.chunk_size),
|
splitter: text_splitter::TextSplitter::new(config.chunk_size),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_with_chunk_size(chunk_size: usize) -> Self {
|
pub(crate) fn new_with_chunk_size(chunk_size: usize) -> Self {
|
||||||
Self {
|
Self {
|
||||||
chunk_size,
|
chunk_size,
|
||||||
splitter: text_splitter::TextSplitter::new(chunk_size),
|
splitter: text_splitter::TextSplitter::new(chunk_size),
|
||||||
|
|||||||
@@ -6,14 +6,14 @@ use crate::{config, memory_backends::file_store::File, utils::parse_tree};
|
|||||||
|
|
||||||
use super::{text_splitter::TextSplitter, ByteRange, Chunk, Splitter};
|
use super::{text_splitter::TextSplitter, ByteRange, Chunk, Splitter};
|
||||||
|
|
||||||
pub struct TreeSitter {
|
pub(crate) struct TreeSitter {
|
||||||
chunk_size: usize,
|
chunk_size: usize,
|
||||||
splitter: TreeSitterCodeSplitter,
|
splitter: TreeSitterCodeSplitter,
|
||||||
text_splitter: TextSplitter,
|
text_splitter: TextSplitter,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TreeSitter {
|
impl TreeSitter {
|
||||||
pub fn new(config: config::TreeSitter) -> anyhow::Result<Self> {
|
pub(crate) fn new(config: config::TreeSitter) -> anyhow::Result<Self> {
|
||||||
let text_splitter = TextSplitter::new_with_chunk_size(config.chunk_size);
|
let text_splitter = TextSplitter::new_with_chunk_size(config.chunk_size);
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
chunk_size: config.chunk_size,
|
chunk_size: config.chunk_size,
|
||||||
|
|||||||
@@ -30,18 +30,18 @@ const fn temperature_default() -> f32 {
|
|||||||
|
|
||||||
// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
|
// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
pub struct AnthropicRunParams {
|
pub(crate) struct AnthropicRunParams {
|
||||||
system: String,
|
system: String,
|
||||||
messages: Vec<ChatMessage>,
|
messages: Vec<ChatMessage>,
|
||||||
#[serde(default = "max_tokens_default")]
|
#[serde(default = "max_tokens_default")]
|
||||||
pub max_tokens: usize,
|
pub(crate) max_tokens: usize,
|
||||||
#[serde(default = "top_p_default")]
|
#[serde(default = "top_p_default")]
|
||||||
pub top_p: f32,
|
pub(crate) top_p: f32,
|
||||||
#[serde(default = "temperature_default")]
|
#[serde(default = "temperature_default")]
|
||||||
pub temperature: f32,
|
pub(crate) temperature: f32,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Anthropic {
|
pub(crate) struct Anthropic {
|
||||||
config: config::Anthropic,
|
config: config::Anthropic,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -56,11 +56,11 @@ struct AnthropicChatResponse {
|
|||||||
error: Option<Value>,
|
error: Option<Value>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub other: HashMap<String, Value>,
|
pub(crate) other: HashMap<String, Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Anthropic {
|
impl Anthropic {
|
||||||
pub fn new(config: config::Anthropic) -> Self {
|
pub(crate) fn new(config: config::Anthropic) -> Self {
|
||||||
Self { config }
|
Self { config }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ const fn max_tokens_default() -> usize {
|
|||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||||
struct Part {
|
struct Part {
|
||||||
pub text: String,
|
pub(crate) text: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||||
@@ -55,37 +55,33 @@ impl GeminiContent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||||
|
#[serde(rename = "camelCase")]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct GeminiGenerationConfig {
|
pub(crate) struct GeminiGenerationConfig {
|
||||||
#[serde(rename = "stopSequences")]
|
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub stop_sequences: Vec<String>,
|
pub(crate) stop_sequences: Vec<String>,
|
||||||
#[serde(rename = "maxOutputTokens")]
|
|
||||||
#[serde(default = "max_tokens_default")]
|
#[serde(default = "max_tokens_default")]
|
||||||
pub max_output_tokens: usize,
|
pub(crate) max_output_tokens: usize,
|
||||||
pub temperature: Option<f32>,
|
pub(crate) temperature: Option<f32>,
|
||||||
#[serde(rename = "topP")]
|
pub(crate) top_p: Option<f32>,
|
||||||
pub top_p: Option<f32>,
|
pub(crate) top_k: Option<f32>,
|
||||||
#[serde(rename = "topK")]
|
|
||||||
pub top_k: Option<f32>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
|
// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||||
pub struct GeminiRunParams {
|
#[serde(rename = "camelCase")]
|
||||||
|
pub(crate) struct GeminiRunParams {
|
||||||
contents: Vec<GeminiContent>,
|
contents: Vec<GeminiContent>,
|
||||||
#[serde(rename = "systemInstruction")]
|
|
||||||
system_instruction: GeminiContent,
|
system_instruction: GeminiContent,
|
||||||
#[serde(rename = "generationConfig")]
|
|
||||||
generation_config: Option<GeminiGenerationConfig>,
|
generation_config: Option<GeminiGenerationConfig>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Gemini {
|
pub(crate) struct Gemini {
|
||||||
configuration: config::Gemini,
|
configuration: config::Gemini,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Gemini {
|
impl Gemini {
|
||||||
pub fn new(configuration: config::Gemini) -> Self {
|
pub(crate) fn new(configuration: config::Gemini) -> Self {
|
||||||
Self { configuration }
|
Self { configuration }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -26,25 +26,25 @@ const fn temperature_default() -> f32 {
|
|||||||
|
|
||||||
// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
|
// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
pub struct MistralFIMRunParams {
|
pub(crate) struct MistralFIMRunParams {
|
||||||
#[serde(default = "max_tokens_default")]
|
#[serde(default = "max_tokens_default")]
|
||||||
pub max_tokens: usize,
|
pub(crate) max_tokens: usize,
|
||||||
#[serde(default = "top_p_default")]
|
#[serde(default = "top_p_default")]
|
||||||
pub top_p: f32,
|
pub(crate) top_p: f32,
|
||||||
#[serde(default = "temperature_default")]
|
#[serde(default = "temperature_default")]
|
||||||
pub temperature: f32,
|
pub(crate) temperature: f32,
|
||||||
pub min_tokens: Option<u64>,
|
pub(crate) min_tokens: Option<u64>,
|
||||||
pub random_seed: Option<u64>,
|
pub(crate) random_seed: Option<u64>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub stop: Vec<String>,
|
pub(crate) stop: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct MistralFIM {
|
pub(crate) struct MistralFIM {
|
||||||
config: config::MistralFIM,
|
config: config::MistralFIM,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MistralFIM {
|
impl MistralFIM {
|
||||||
pub fn new(config: config::MistralFIM) -> Self {
|
pub(crate) fn new(config: config::MistralFIM) -> Self {
|
||||||
Self { config }
|
Self { config }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -16,8 +16,8 @@ use super::TransformerBackend;
|
|||||||
|
|
||||||
// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
|
// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
pub struct OllamaRunParams {
|
pub(crate) struct OllamaRunParams {
|
||||||
pub fim: Option<FIM>,
|
pub(crate) fim: Option<FIM>,
|
||||||
messages: Option<Vec<ChatMessage>>,
|
messages: Option<Vec<ChatMessage>>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
options: HashMap<String, Value>,
|
options: HashMap<String, Value>,
|
||||||
@@ -26,7 +26,7 @@ pub struct OllamaRunParams {
|
|||||||
keep_alive: Option<String>,
|
keep_alive: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Ollama {
|
pub(crate) struct Ollama {
|
||||||
configuration: config::Ollama,
|
configuration: config::Ollama,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -38,22 +38,22 @@ const fn temperature_default() -> f32 {
|
|||||||
|
|
||||||
// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
|
// NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
pub struct OpenAIRunParams {
|
pub(crate) struct OpenAIRunParams {
|
||||||
pub fim: Option<FIM>,
|
pub(crate) fim: Option<FIM>,
|
||||||
messages: Option<Vec<ChatMessage>>,
|
messages: Option<Vec<ChatMessage>>,
|
||||||
#[serde(default = "max_tokens_default")]
|
#[serde(default = "max_tokens_default")]
|
||||||
pub max_tokens: usize,
|
pub(crate) max_tokens: usize,
|
||||||
#[serde(default = "top_p_default")]
|
#[serde(default = "top_p_default")]
|
||||||
pub top_p: f32,
|
pub(crate) top_p: f32,
|
||||||
#[serde(default = "presence_penalty_default")]
|
#[serde(default = "presence_penalty_default")]
|
||||||
pub presence_penalty: f32,
|
pub(crate) presence_penalty: f32,
|
||||||
#[serde(default = "frequency_penalty_default")]
|
#[serde(default = "frequency_penalty_default")]
|
||||||
pub frequency_penalty: f32,
|
pub(crate) frequency_penalty: f32,
|
||||||
#[serde(default = "temperature_default")]
|
#[serde(default = "temperature_default")]
|
||||||
pub temperature: f32,
|
pub(crate) temperature: f32,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct OpenAI {
|
pub(crate) struct OpenAI {
|
||||||
configuration: config::OpenAI,
|
configuration: config::OpenAI,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -68,27 +68,27 @@ struct OpenAICompletionsResponse {
|
|||||||
error: Option<Value>,
|
error: Option<Value>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub other: HashMap<String, Value>,
|
pub(crate) other: HashMap<String, Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize)]
|
#[derive(Debug, Deserialize, Serialize)]
|
||||||
pub struct OpenAIChatMessage {
|
pub(crate) struct OpenAIChatMessage {
|
||||||
pub role: String,
|
pub(crate) role: String,
|
||||||
pub content: String,
|
pub(crate) content: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct OpenAIChatChoices {
|
pub(crate) struct OpenAIChatChoices {
|
||||||
pub message: OpenAIChatMessage,
|
pub(crate) message: OpenAIChatMessage,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct OpenAIChatResponse {
|
pub(crate) struct OpenAIChatResponse {
|
||||||
pub choices: Option<Vec<OpenAIChatChoices>>,
|
pub(crate) choices: Option<Vec<OpenAIChatChoices>>,
|
||||||
pub error: Option<Value>,
|
pub(crate) error: Option<Value>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub other: HashMap<String, Value>,
|
pub(crate) other: HashMap<String, Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OpenAI {
|
impl OpenAI {
|
||||||
|
|||||||
@@ -20,25 +20,25 @@ use crate::transformer_backends::TransformerBackend;
|
|||||||
use crate::utils::{ToResponseError, TOKIO_RUNTIME};
|
use crate::utils::{ToResponseError, TOKIO_RUNTIME};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct CompletionRequest {
|
pub(crate) struct CompletionRequest {
|
||||||
id: RequestId,
|
id: RequestId,
|
||||||
params: CompletionParams,
|
params: CompletionParams,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CompletionRequest {
|
impl CompletionRequest {
|
||||||
pub fn new(id: RequestId, params: CompletionParams) -> Self {
|
pub(crate) fn new(id: RequestId, params: CompletionParams) -> Self {
|
||||||
Self { id, params }
|
Self { id, params }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct GenerationRequest {
|
pub(crate) struct GenerationRequest {
|
||||||
id: RequestId,
|
id: RequestId,
|
||||||
params: GenerationParams,
|
params: GenerationParams,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GenerationRequest {
|
impl GenerationRequest {
|
||||||
pub fn new(id: RequestId, params: GenerationParams) -> Self {
|
pub(crate) fn new(id: RequestId, params: GenerationParams) -> Self {
|
||||||
Self { id, params }
|
Self { id, params }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -58,7 +58,7 @@ impl GenerationStreamRequest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum WorkerRequest {
|
pub(crate) enum WorkerRequest {
|
||||||
Completion(CompletionRequest),
|
Completion(CompletionRequest),
|
||||||
Generation(GenerationRequest),
|
Generation(GenerationRequest),
|
||||||
GenerationStream(GenerationStreamRequest),
|
GenerationStream(GenerationStreamRequest),
|
||||||
@@ -159,7 +159,7 @@ fn post_process_response(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(
|
pub(crate) fn run(
|
||||||
transformer_backends: HashMap<String, Box<dyn TransformerBackend + Send + Sync>>,
|
transformer_backends: HashMap<String, Box<dyn TransformerBackend + Send + Sync>>,
|
||||||
memory_tx: std::sync::mpsc::Sender<memory_worker::WorkerRequest>,
|
memory_tx: std::sync::mpsc::Sender<memory_worker::WorkerRequest>,
|
||||||
transformer_rx: std::sync::mpsc::Receiver<WorkerRequest>,
|
transformer_rx: std::sync::mpsc::Receiver<WorkerRequest>,
|
||||||
@@ -338,7 +338,7 @@ async fn do_completion(
|
|||||||
let mut response = transformer_backend.do_completion(&prompt, params).await?;
|
let mut response = transformer_backend.do_completion(&prompt, params).await?;
|
||||||
|
|
||||||
if let Some(post_process) = config.get_completions_post_process() {
|
if let Some(post_process) = config.get_completions_post_process() {
|
||||||
response.insert_text = post_process_response(response.insert_text, &prompt, &post_process);
|
response.insert_text = post_process_response(response.insert_text, &prompt, post_process);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build and send the response
|
// Build and send the response
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ use tree_sitter::Tree;
|
|||||||
|
|
||||||
use crate::{config::ChatMessage, memory_backends::ContextAndCodePrompt, splitters::Chunk};
|
use crate::{config::ChatMessage, memory_backends::ContextAndCodePrompt, splitters::Chunk};
|
||||||
|
|
||||||
pub static TOKIO_RUNTIME: Lazy<runtime::Runtime> = Lazy::new(|| {
|
pub(crate) static TOKIO_RUNTIME: Lazy<runtime::Runtime> = Lazy::new(|| {
|
||||||
runtime::Builder::new_multi_thread()
|
runtime::Builder::new_multi_thread()
|
||||||
.worker_threads(4)
|
.worker_threads(4)
|
||||||
.enable_all()
|
.enable_all()
|
||||||
@@ -14,7 +14,7 @@ pub static TOKIO_RUNTIME: Lazy<runtime::Runtime> = Lazy::new(|| {
|
|||||||
.expect("Error building tokio runtime")
|
.expect("Error building tokio runtime")
|
||||||
});
|
});
|
||||||
|
|
||||||
pub trait ToResponseError {
|
pub(crate) trait ToResponseError {
|
||||||
fn to_response_error(&self, code: i32) -> ResponseError;
|
fn to_response_error(&self, code: i32) -> ResponseError;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -28,11 +28,11 @@ impl ToResponseError for anyhow::Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tokens_to_estimated_characters(tokens: usize) -> usize {
|
pub(crate) fn tokens_to_estimated_characters(tokens: usize) -> usize {
|
||||||
tokens * 4
|
tokens * 4
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn format_chat_messages(
|
pub(crate) fn format_chat_messages(
|
||||||
messages: &[ChatMessage],
|
messages: &[ChatMessage],
|
||||||
prompt: &ContextAndCodePrompt,
|
prompt: &ContextAndCodePrompt,
|
||||||
) -> Vec<ChatMessage> {
|
) -> Vec<ChatMessage> {
|
||||||
@@ -47,24 +47,28 @@ pub fn format_chat_messages(
|
|||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn format_context_code_in_str(s: &str, context: &str, code: &str) -> String {
|
pub(crate) fn format_context_code_in_str(s: &str, context: &str, code: &str) -> String {
|
||||||
s.replace("{CONTEXT}", context).replace("{CODE}", code)
|
s.replace("{CONTEXT}", context).replace("{CODE}", code)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn format_context_code(context: &str, code: &str) -> String {
|
pub(crate) fn format_context_code(context: &str, code: &str) -> String {
|
||||||
format!("{context}\n\n{code}")
|
format!("{context}\n\n{code}")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chunk_to_id(uri: &str, chunk: &Chunk) -> String {
|
pub(crate) fn chunk_to_id(uri: &str, chunk: &Chunk) -> String {
|
||||||
format!("{uri}#{}-{}", chunk.range.start_byte, chunk.range.end_byte)
|
format!("{uri}#{}-{}", chunk.range.start_byte, chunk.range.end_byte)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_tree(uri: &str, contents: &str, old_tree: Option<&Tree>) -> anyhow::Result<Tree> {
|
pub(crate) fn parse_tree(
|
||||||
|
uri: &str,
|
||||||
|
contents: &str,
|
||||||
|
old_tree: Option<&Tree>,
|
||||||
|
) -> anyhow::Result<Tree> {
|
||||||
let path = std::path::Path::new(uri);
|
let path = std::path::Path::new(uri);
|
||||||
let extension = path.extension().map(|x| x.to_string_lossy());
|
let extension = path.extension().map(|x| x.to_string_lossy());
|
||||||
let extension = extension.as_deref().unwrap_or("");
|
let extension = extension.as_deref().unwrap_or("");
|
||||||
let mut parser = utils_tree_sitter::get_parser_for_extension(extension)?;
|
let mut parser = utils_tree_sitter::get_parser_for_extension(extension)?;
|
||||||
parser
|
parser
|
||||||
.parse(&contents, old_tree)
|
.parse(contents, old_tree)
|
||||||
.with_context(|| format!("parsing tree failed for {uri}"))
|
.with_context(|| format!("parsing tree failed for {uri}"))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -55,11 +55,7 @@ impl TreeSitterCodeSplitter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn split<'a, 'b, 'c>(
|
pub fn split<'c>(&self, tree: &Tree, utf8: &'c [u8]) -> Result<Vec<Chunk<'c>>, SplitError> {
|
||||||
&'a self,
|
|
||||||
tree: &'b Tree,
|
|
||||||
utf8: &'c [u8],
|
|
||||||
) -> Result<Vec<Chunk<'c>>, SplitError> {
|
|
||||||
let cursor = tree.walk();
|
let cursor = tree.walk();
|
||||||
Ok(self
|
Ok(self
|
||||||
.split_recursive(cursor, utf8)?
|
.split_recursive(cursor, utf8)?
|
||||||
@@ -68,7 +64,7 @@ impl TreeSitterCodeSplitter {
|
|||||||
// Let's combine some of our smaller chunks together
|
// Let's combine some of our smaller chunks together
|
||||||
// We also want to do this in reverse as it (seems) to make more sense to combine code slices from bottom to top
|
// We also want to do this in reverse as it (seems) to make more sense to combine code slices from bottom to top
|
||||||
.try_fold(vec![], |mut acc, current| {
|
.try_fold(vec![], |mut acc, current| {
|
||||||
if acc.len() == 0 {
|
if acc.is_empty() {
|
||||||
acc.push(current);
|
acc.push(current);
|
||||||
Ok::<_, SplitError>(acc)
|
Ok::<_, SplitError>(acc)
|
||||||
} else {
|
} else {
|
||||||
@@ -94,9 +90,9 @@ impl TreeSitterCodeSplitter {
|
|||||||
.collect())
|
.collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn split_recursive<'a, 'b, 'c>(
|
fn split_recursive<'c>(
|
||||||
&'a self,
|
&self,
|
||||||
mut cursor: TreeCursor<'b>,
|
mut cursor: TreeCursor<'_>,
|
||||||
utf8: &'c [u8],
|
utf8: &'c [u8],
|
||||||
) -> Result<Vec<Chunk<'c>>, SplitError> {
|
) -> Result<Vec<Chunk<'c>>, SplitError> {
|
||||||
let node = cursor.node();
|
let node = cursor.node();
|
||||||
|
|||||||
19
xtask/Cargo.toml
Normal file
19
xtask/Cargo.toml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[package]
|
||||||
|
name = "xtask"
|
||||||
|
version = "0.1.0"
|
||||||
|
publish = false
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
anyhow = "1"
|
||||||
|
flate2 = "1"
|
||||||
|
write-json = "0.1"
|
||||||
|
xshell = "0.2"
|
||||||
|
xflags = "0.3"
|
||||||
|
time = { version = "0.3", default-features = false }
|
||||||
|
zip = { version = "0.6", default-features = false, features = [
|
||||||
|
"deflate",
|
||||||
|
"time",
|
||||||
|
] }
|
||||||
147
xtask/src/dist.rs
Normal file
147
xtask/src/dist.rs
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
use std::{
|
||||||
|
env,
|
||||||
|
fs::File,
|
||||||
|
io::{self, BufWriter},
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
|
use flate2::{write::GzEncoder, Compression};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
use xshell::{cmd, Shell};
|
||||||
|
use zip::{write::FileOptions, DateTime, ZipWriter};
|
||||||
|
|
||||||
|
use crate::{flags, project_root};
|
||||||
|
|
||||||
|
impl flags::Dist {
|
||||||
|
pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
|
||||||
|
let branch = sh.var("GITHUB_REF").unwrap_or_default();
|
||||||
|
let release = if branch.starts_with("refs/heads/release/") {
|
||||||
|
branch.replace("refs/heads/release/", "")
|
||||||
|
} else {
|
||||||
|
"0.0.0".to_owned()
|
||||||
|
};
|
||||||
|
let project_root = project_root();
|
||||||
|
let target = Target::get(&project_root);
|
||||||
|
let dist = project_root.join("dist");
|
||||||
|
sh.remove_path(&dist)?;
|
||||||
|
sh.create_dir(&dist)?;
|
||||||
|
|
||||||
|
dist_server(sh, &release, &target)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dist_server(sh: &Shell, release: &str, target: &Target) -> anyhow::Result<()> {
|
||||||
|
let _e = sh.push_env("CFG_RELEASE", release);
|
||||||
|
let _e = sh.push_env("CARGO_PROFILE_RELEASE_LTO", "thin");
|
||||||
|
|
||||||
|
// Uncomment to enable debug info for releases. Note that:
|
||||||
|
// * debug info is split on windows and macs, so it does nothing for those platforms,
|
||||||
|
// * on Linux, this blows up the binary size from 8MB to 43MB, which is unreasonable.
|
||||||
|
// let _e = sh.push_env("CARGO_PROFILE_RELEASE_DEBUG", "1");
|
||||||
|
|
||||||
|
if target.name.contains("-linux-") {
|
||||||
|
env::set_var("CC", "clang");
|
||||||
|
}
|
||||||
|
|
||||||
|
let target_name = &target.name;
|
||||||
|
cmd!(sh, "cargo build --manifest-path ./crates/lsp-ai/Cargo.toml --bin lsp-ai --target {target_name} --release").run()?;
|
||||||
|
|
||||||
|
let dst = Path::new("dist").join(&target.artifact_name);
|
||||||
|
gzip(&target.server_path, &dst.with_extension("gz"))?;
|
||||||
|
if target_name.contains("-windows-") {
|
||||||
|
zip(
|
||||||
|
&target.server_path,
|
||||||
|
target.symbols_path.as_ref(),
|
||||||
|
&dst.with_extension("zip"),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gzip(src_path: &Path, dest_path: &Path) -> anyhow::Result<()> {
|
||||||
|
let mut encoder = GzEncoder::new(File::create(dest_path)?, Compression::best());
|
||||||
|
let mut input = io::BufReader::new(File::open(src_path)?);
|
||||||
|
io::copy(&mut input, &mut encoder)?;
|
||||||
|
encoder.finish()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn zip(src_path: &Path, symbols_path: Option<&PathBuf>, dest_path: &Path) -> anyhow::Result<()> {
|
||||||
|
let file = File::create(dest_path)?;
|
||||||
|
let mut writer = ZipWriter::new(BufWriter::new(file));
|
||||||
|
writer.start_file(
|
||||||
|
src_path.file_name().unwrap().to_str().unwrap(),
|
||||||
|
FileOptions::default()
|
||||||
|
.last_modified_time(
|
||||||
|
DateTime::try_from(OffsetDateTime::from(
|
||||||
|
std::fs::metadata(src_path)?.modified()?,
|
||||||
|
))
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.unix_permissions(0o755)
|
||||||
|
.compression_method(zip::CompressionMethod::Deflated)
|
||||||
|
.compression_level(Some(9)),
|
||||||
|
)?;
|
||||||
|
let mut input = io::BufReader::new(File::open(src_path)?);
|
||||||
|
io::copy(&mut input, &mut writer)?;
|
||||||
|
if let Some(symbols_path) = symbols_path {
|
||||||
|
writer.start_file(
|
||||||
|
symbols_path.file_name().unwrap().to_str().unwrap(),
|
||||||
|
FileOptions::default()
|
||||||
|
.last_modified_time(
|
||||||
|
DateTime::try_from(OffsetDateTime::from(
|
||||||
|
std::fs::metadata(src_path)?.modified()?,
|
||||||
|
))
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.compression_method(zip::CompressionMethod::Deflated)
|
||||||
|
.compression_level(Some(9)),
|
||||||
|
)?;
|
||||||
|
let mut input = io::BufReader::new(File::open(symbols_path)?);
|
||||||
|
io::copy(&mut input, &mut writer)?;
|
||||||
|
}
|
||||||
|
writer.finish()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Target {
|
||||||
|
name: String,
|
||||||
|
server_path: PathBuf,
|
||||||
|
symbols_path: Option<PathBuf>,
|
||||||
|
artifact_name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Target {
|
||||||
|
fn get(project_root: &Path) -> Self {
|
||||||
|
let name = match env::var("LSP_AI_TARGET") {
|
||||||
|
Ok(target) => target,
|
||||||
|
_ => {
|
||||||
|
if cfg!(target_os = "linux") {
|
||||||
|
"x86_64-unknown-linux-gnu".to_string()
|
||||||
|
} else if cfg!(target_os = "windows") {
|
||||||
|
"x86_64-pc-windows-msvc".to_string()
|
||||||
|
} else if cfg!(target_os = "macos") {
|
||||||
|
"x86_64-apple-darwin".to_string()
|
||||||
|
} else {
|
||||||
|
panic!("Unsupported OS, maybe try setting LSP_AI_TARGET")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let out_path = project_root.join("target").join(&name).join("release");
|
||||||
|
let (exe_suffix, symbols_path) = if name.contains("-windows-") {
|
||||||
|
(".exe".into(), Some(out_path.join("lsp_ai.pdb")))
|
||||||
|
} else {
|
||||||
|
(String::new(), None)
|
||||||
|
};
|
||||||
|
let server_path = out_path.join(format!("lsp-ai{exe_suffix}"));
|
||||||
|
let artifact_name = format!("lsp-ai-{name}{exe_suffix}");
|
||||||
|
Self {
|
||||||
|
name,
|
||||||
|
server_path,
|
||||||
|
symbols_path,
|
||||||
|
artifact_name,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
43
xtask/src/flags.rs
Normal file
43
xtask/src/flags.rs
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
#![allow(unreachable_pub)]
|
||||||
|
|
||||||
|
xflags::xflags! {
|
||||||
|
src "./src/flags.rs"
|
||||||
|
|
||||||
|
/// Run custom build command.
|
||||||
|
cmd xtask {
|
||||||
|
cmd dist {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// generated start
|
||||||
|
// The following code is generated by `xflags` macro.
|
||||||
|
// Run `env UPDATE_XFLAGS=1 cargo build` to regenerate.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Xtask {
|
||||||
|
pub subcommand: XtaskCmd,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum XtaskCmd {
|
||||||
|
Dist(Dist),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Dist;
|
||||||
|
|
||||||
|
impl Xtask {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn from_env_or_exit() -> Self {
|
||||||
|
Self::from_env_or_exit_()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn from_env() -> xflags::Result<Self> {
|
||||||
|
Self::from_env_()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn from_vec(args: Vec<std::ffi::OsString>) -> xflags::Result<Self> {
|
||||||
|
Self::from_vec_(args)
|
||||||
|
}
|
||||||
|
}
|
||||||
44
xtask/src/main.rs
Normal file
44
xtask/src/main.rs
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
//! See <https://github.com/matklad/cargo-xtask/>.
|
||||||
|
//!
|
||||||
|
//! This binary defines various auxiliary build commands, which are not
|
||||||
|
//! expressible with just `cargo`.
|
||||||
|
//!
|
||||||
|
//! This binary is integrated into the `cargo` command line by using an alias in
|
||||||
|
//! `.cargo/config`.
|
||||||
|
|
||||||
|
#![warn(
|
||||||
|
rust_2018_idioms,
|
||||||
|
unused_lifetimes,
|
||||||
|
semicolon_in_expressions_from_macros
|
||||||
|
)]
|
||||||
|
|
||||||
|
mod flags;
|
||||||
|
|
||||||
|
mod dist;
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
env,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
use xshell::Shell;
|
||||||
|
|
||||||
|
fn main() -> anyhow::Result<()> {
|
||||||
|
let flags = flags::Xtask::from_env_or_exit();
|
||||||
|
|
||||||
|
let sh = &Shell::new()?;
|
||||||
|
sh.change_dir(project_root());
|
||||||
|
|
||||||
|
match flags.subcommand {
|
||||||
|
flags::XtaskCmd::Dist(cmd) => cmd.run(sh),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn project_root() -> PathBuf {
|
||||||
|
Path::new(
|
||||||
|
&env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()),
|
||||||
|
)
|
||||||
|
.ancestors()
|
||||||
|
.nth(1)
|
||||||
|
.unwrap()
|
||||||
|
.to_path_buf()
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user