From 98cd60ba40ff66a131eae86863c46bed3fc6915f Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 10:49:26 +0200 Subject: [PATCH 01/18] feat: add release CI --- .github/actions/github-release/Dockerfile | 8 + .github/actions/github-release/README.md | 21 +++ .github/actions/github-release/action.yml | 15 ++ .github/actions/github-release/main.js | 144 ++++++++++++++++ .github/actions/github-release/package.json | 10 ++ .github/workflows/release.yml | 180 ++++++++++++++++++++ Cargo.lock | 78 ++++++++- Cargo.toml | 14 +- xtask/Cargo.toml | 19 +++ xtask/src/dist.rs | 147 ++++++++++++++++ xtask/src/flags.rs | 43 +++++ xtask/src/main.rs | 44 +++++ 12 files changed, 712 insertions(+), 11 deletions(-) create mode 100644 .github/actions/github-release/Dockerfile create mode 100644 .github/actions/github-release/README.md create mode 100644 .github/actions/github-release/action.yml create mode 100644 .github/actions/github-release/main.js create mode 100644 .github/actions/github-release/package.json create mode 100644 .github/workflows/release.yml create mode 100644 xtask/Cargo.toml create mode 100644 xtask/src/dist.rs create mode 100644 xtask/src/flags.rs create mode 100644 xtask/src/main.rs diff --git a/.github/actions/github-release/Dockerfile b/.github/actions/github-release/Dockerfile new file mode 100644 index 0000000..5849eac --- /dev/null +++ b/.github/actions/github-release/Dockerfile @@ -0,0 +1,8 @@ +FROM node:slim + +COPY . /action +WORKDIR /action + +RUN npm install --production + +ENTRYPOINT ["node", "/action/main.js"] diff --git a/.github/actions/github-release/README.md b/.github/actions/github-release/README.md new file mode 100644 index 0000000..14512c1 --- /dev/null +++ b/.github/actions/github-release/README.md @@ -0,0 +1,21 @@ +# github-release + +Copy-pasted from +https://github.com/rust-lang/rust-analyzer/tree/2df30e1e07eafc1de0359566423f471920693a34/.github/actions/github-release + +An action used to publish GitHub releases for `wasmtime`. + +As of the time of this writing there's a few actions floating around which +perform github releases but they all tend to have their set of drawbacks. +Additionally nothing handles deleting releases which we need for our rolling +`dev` release. + +To handle all this, this action rolls its own implementation using the +actions/toolkit repository and packages published there. These run in a Docker +container and take various inputs to orchestrate the release from the build. + +More comments can be found in `main.js`. + +Testing this is really hard. If you want to try though run `npm install` and +then `node main.js`. You'll have to configure a bunch of env vars though to get +anything reasonably working. diff --git a/.github/actions/github-release/action.yml b/.github/actions/github-release/action.yml new file mode 100644 index 0000000..51a074a --- /dev/null +++ b/.github/actions/github-release/action.yml @@ -0,0 +1,15 @@ +name: 'wasmtime github releases' +description: 'wasmtime github releases' +inputs: + token: + description: '' + required: true + name: + description: '' + required: true + files: + description: '' + required: true +runs: + using: 'docker' + image: 'Dockerfile' diff --git a/.github/actions/github-release/main.js b/.github/actions/github-release/main.js new file mode 100644 index 0000000..040d116 --- /dev/null +++ b/.github/actions/github-release/main.js @@ -0,0 +1,144 @@ +const core = require('@actions/core'); +const path = require("path"); +const fs = require("fs"); +const github = require('@actions/github'); +const glob = require('glob'); + +function sleep(milliseconds) { + return new Promise(resolve => setTimeout(resolve, milliseconds)); +} + +async function runOnce() { + // Load all our inputs and env vars. Note that `getInput` reads from `INPUT_*` + const files = core.getInput('files'); + const name = core.getInput('name'); + const token = core.getInput('token'); + const slug = process.env.GITHUB_REPOSITORY; + const owner = slug.split('/')[0]; + const repo = slug.split('/')[1]; + const sha = process.env.HEAD_SHA; + + core.info(`files: ${files}`); + core.info(`name: ${name}`); + + const options = { + request: { + timeout: 30000, + } + }; + const octokit = github.getOctokit(token, options); + + // Delete the previous release since we can't overwrite one. This may happen + // due to retrying an upload or it may happen because we're doing the dev + // release. + const releases = await octokit.paginate("GET /repos/:owner/:repo/releases", { owner, repo }); + for (const release of releases) { + if (release.tag_name !== name) { + continue; + } + const release_id = release.id; + core.info(`deleting release ${release_id}`); + await octokit.rest.repos.deleteRelease({ owner, repo, release_id }); + } + + // We also need to update the `dev` tag while we're at it on the `dev` branch. + if (name == 'nightly') { + try { + core.info(`updating nightly tag`); + await octokit.rest.git.updateRef({ + owner, + repo, + ref: 'tags/nightly', + sha, + force: true, + }); + } catch (e) { + core.error(e); + core.info(`creating nightly tag`); + await octokit.rest.git.createTag({ + owner, + repo, + tag: 'nightly', + message: 'nightly release', + object: sha, + type: 'commit', + }); + } + } + + // Creates an official GitHub release for this `tag`, and if this is `dev` + // then we know that from the previous block this should be a fresh release. + core.info(`creating a release`); + const release = await octokit.rest.repos.createRelease({ + owner, + repo, + name, + tag_name: name, + target_commitish: sha, + prerelease: name === 'nightly', + }); + const release_id = release.data.id; + + // Upload all the relevant assets for this release as just general blobs. + for (const file of glob.sync(files)) { + const size = fs.statSync(file).size; + const name = path.basename(file); + + await runWithRetry(async function() { + // We can't overwrite assets, so remove existing ones from a previous try. + let assets = await octokit.rest.repos.listReleaseAssets({ + owner, + repo, + release_id + }); + for (const asset of assets.data) { + if (asset.name === name) { + core.info(`delete asset ${name}`); + const asset_id = asset.id; + await octokit.rest.repos.deleteReleaseAsset({ owner, repo, asset_id }); + } + } + + core.info(`upload ${file}`); + const headers = { 'content-length': size, 'content-type': 'application/octet-stream' }; + const data = fs.createReadStream(file); + await octokit.rest.repos.uploadReleaseAsset({ + data, + headers, + name, + url: release.data.upload_url, + }); + }); + } +} + +async function runWithRetry(f) { + const retries = 10; + const maxDelay = 4000; + let delay = 1000; + + for (let i = 0; i < retries; i++) { + try { + await f(); + break; + } catch (e) { + if (i === retries - 1) + throw e; + + core.error(e); + const currentDelay = Math.round(Math.random() * delay); + core.info(`sleeping ${currentDelay} ms`); + await sleep(currentDelay); + delay = Math.min(delay * 2, maxDelay); + } + } +} + +async function run() { + await runWithRetry(runOnce); +} + +run().catch(err => { + core.error(err); + core.setFailed(err.message); +}); diff --git a/.github/actions/github-release/package.json b/.github/actions/github-release/package.json new file mode 100644 index 0000000..af4bf07 --- /dev/null +++ b/.github/actions/github-release/package.json @@ -0,0 +1,10 @@ +{ + "name": "wasmtime-github-release", + "version": "0.0.0", + "main": "main.js", + "dependencies": { + "@actions/core": "^1.6", + "@actions/github": "^5.0", + "glob": "^7.1.5" + } +} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..46994d6 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,180 @@ +name: release +on: + workflow_dispatch: + + push: + branches: + - 'release/**' + +env: + CARGO_INCREMENTAL: 0 + CARGO_NET_RETRY: 10 + RUSTFLAGS: "-D warnings -W unreachable-pub" + RUSTUP_MAX_RETRIES: 10 + FETCH_DEPTH: 0 # pull in the tags for the version string + CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc + CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc + +jobs: + dist: + strategy: + matrix: + include: + - os: windows-latest + target: x86_64-pc-windows-msvc + code-target: win32-x64 + - os: windows-latest + target: i686-pc-windows-msvc + code-target: win32-ia32 + - os: windows-latest + target: aarch64-pc-windows-msvc + code-target: win32-arm64 + - os: ubuntu-22.04 + target: x86_64-unknown-linux-gnu + code-target: linux-x64 + - os: ubuntu-22.04 + target: aarch64-unknown-linux-gnu + code-target: linux-arm64 + - os: ubuntu-22.04 + target: arm-unknown-linux-gnueabihf + code-target: linux-armhf + - os: macos-12 + target: x86_64-apple-darwin + code-target: darwin-x64 + - os: macos-12 + target: aarch64-apple-darwin + code-target: darwin-arm64 + + env: + LSP_AI_TARGET: ${{ matrix.target }} + + name: dist (${{ matrix.target }}) + runs-on: ${{ matrix.os }} + container: ${{ matrix.container }} + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: ${{ env.FETCH_DEPTH }} + + - name: Install Rust toolchain + run: | + rustup update --no-self-update stable + rustup target add ${{ matrix.target }} + rustup component add rust-src + + - name: Update apt repositories + if: contains(matrix.os, 'ubuntu') + run: sudo apt-get update -y + + - name: Install AArch64 target toolchain + if: matrix.target == 'aarch64-unknown-linux-gnu' + run: sudo apt-get install gcc-aarch64-linux-gnu libc6-dev-arm64-cross g++-aarch64-linux-gnu + + - name: Install ARM target toolchain + if: matrix.target == 'arm-unknown-linux-gnueabihf' + run: sudo apt-get install gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf + + - name: Dist + run: cargo xtask dist + + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: dist-${{ matrix.target }} + path: ./dist + + dist-x86_64-unknown-linux-musl: + name: dist (x86_64-unknown-linux-musl) + runs-on: ubuntu-latest + env: + LLM_LS_TARGET: x86_64-unknown-linux-musl + # For some reason `-crt-static` is not working for clang without lld + RUSTFLAGS: "-C link-arg=-fuse-ld=lld -C target-feature=-crt-static" + container: + image: rust:alpine + volumes: + - /usr/local/cargo/registry:/usr/local/cargo/registry + + steps: + - name: Install dependencies + run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ + + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: ${{ env.FETCH_DEPTH }} + + - name: Dist + run: cargo xtask dist + + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: dist-x86_64-unknown-linux-musl + path: ./dist + + publish: + name: publish + runs-on: ubuntu-latest + needs: ["dist", "dist-x86_64-unknown-linux-musl"] + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: ${{ env.FETCH_DEPTH }} + + - run: echo "HEAD_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV + - run: 'echo "HEAD_SHA: $HEAD_SHA"' + + - name: Split branch name + env: + BRANCH: ${{ github.ref_name }} + id: split + run: echo "tag=${BRANCH##*/}" >> $GITHUB_OUTPUT + + - uses: actions/download-artifact@v4 + with: + name: dist-aarch64-apple-darwin + path: dist + - uses: actions/download-artifact@v4 + with: + name: dist-x86_64-apple-darwin + path: dist + - uses: actions/download-artifact@v4 + with: + name: dist-x86_64-unknown-linux-gnu + path: dist + - uses: actions/download-artifact@v4 + with: + name: dist-x86_64-unknown-linux-musl + path: dist + - uses: actions/download-artifact@v4 + with: + name: dist-aarch64-unknown-linux-gnu + path: dist + - uses: actions/download-artifact@v4 + with: + name: dist-arm-unknown-linux-gnueabihf + path: dist + - uses: actions/download-artifact@v4 + with: + name: dist-x86_64-pc-windows-msvc + path: dist + - uses: actions/download-artifact@v4 + with: + name: dist-i686-pc-windows-msvc + path: dist + - uses: actions/download-artifact@v4 + with: + name: dist-aarch64-pc-windows-msvc + path: dist + - run: ls -al ./dist + + - name: Publish Release + uses: ./.github/actions/github-release + with: + files: "dist/*" + name: ${{ steps.split.outputs.tag }} + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/Cargo.lock b/Cargo.lock index d9ef5db..bcdb53f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -266,9 +266,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.15.4" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ff69b9dd49fd426c69a0db9fc04dd934cdb6645ff000864d98f7e2af8830eaa" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "byteorder" @@ -1658,9 +1658,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.7.1" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memo-map" @@ -3218,9 +3218,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.34" +version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", "itoa", @@ -3239,9 +3239,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" dependencies = [ "num-conv", "time-core", @@ -4134,6 +4134,55 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "write-json" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23f6174b2566cc4a74f95e1367ec343e7fa80c93cc8087f5c4a3d6a1088b2118" + +[[package]] +name = "xflags" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d9e15fbb3de55454b0106e314b28e671279009b363e6f1d8e39fdc3bf048944" +dependencies = [ + "xflags-macros", +] + +[[package]] +name = "xflags-macros" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "672423d4fea7ffa2f6c25ba60031ea13dc6258070556f125cc4d790007d4a155" + +[[package]] +name = "xshell" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db0ab86eae739efd1b054a8d3d16041914030ac4e01cd1dca0cf252fd8b6437" +dependencies = [ + "xshell-macros", +] + +[[package]] +name = "xshell-macros" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d422e8e38ec76e2f06ee439ccc765e9c6a9638b9e7c9f2e8255e4d41e8bd852" + +[[package]] +name = "xtask" +version = "0.1.0" +dependencies = [ + "anyhow", + "flate2", + "time", + "write-json", + "xflags", + "xshell", + "zip", +] + [[package]] name = "xxhash-rust" version = "0.8.10" @@ -4165,3 +4214,16 @@ name = "zeroize" version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" + +[[package]] +name = "zip" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" +dependencies = [ + "byteorder", + "crc32fast", + "crossbeam-utils", + "flate2", + "time", +] diff --git a/Cargo.toml b/Cargo.toml index 0121f0b..7342743 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,5 @@ [workspace] -members = [ - "crates/*", -] +members = ["crates/*", "xtask/"] resolver = "2" [workspace.package] @@ -10,3 +8,13 @@ license = "MIT" description = "LSP-AI is an open-source language server that serves as a backend for AI-powered functionality, designed to assist and empower software engineers, not replace them." repository = "https://github.com/SilasMarvin/lsp-ai" readme = "README.md" +authors = ["Silvas Marvin <>"] + +[profile.dev.package] +# This speeds up `cargo xtask dist`. +miniz_oxide.opt-level = 3 + +[profile.release] +incremental = true +# Set this to 1 or 2 to get more useful backtraces in debugger. +debug = 0 diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml new file mode 100644 index 0000000..400f4a0 --- /dev/null +++ b/xtask/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "xtask" +version = "0.1.0" +publish = false +edition.workspace = true +license.workspace = true +authors.workspace = true + +[dependencies] +anyhow = "1" +flate2 = "1" +write-json = "0.1" +xshell = "0.2" +xflags = "0.3" +time = { version = "0.3", default-features = false } +zip = { version = "0.6", default-features = false, features = [ + "deflate", + "time", +] } diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs new file mode 100644 index 0000000..6de8f14 --- /dev/null +++ b/xtask/src/dist.rs @@ -0,0 +1,147 @@ +use std::{ + env, + fs::File, + io::{self, BufWriter}, + path::{Path, PathBuf}, +}; + +use flate2::{write::GzEncoder, Compression}; +use time::OffsetDateTime; +use xshell::{cmd, Shell}; +use zip::{write::FileOptions, DateTime, ZipWriter}; + +use crate::{flags, project_root}; + +impl flags::Dist { + pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> { + let branch = sh.var("GITHUB_REF").unwrap_or_default(); + let release = if branch.starts_with("refs/heads/release/") { + branch.replace("refs/heads/release/", "") + } else { + "0.0.0".to_owned() + }; + let project_root = project_root(); + let target = Target::get(&project_root); + let dist = project_root.join("dist"); + sh.remove_path(&dist)?; + sh.create_dir(&dist)?; + + dist_server(sh, &release, &target)?; + Ok(()) + } +} + +fn dist_server(sh: &Shell, release: &str, target: &Target) -> anyhow::Result<()> { + let _e = sh.push_env("CFG_RELEASE", release); + let _e = sh.push_env("CARGO_PROFILE_RELEASE_LTO", "thin"); + + // Uncomment to enable debug info for releases. Note that: + // * debug info is split on windows and macs, so it does nothing for those platforms, + // * on Linux, this blows up the binary size from 8MB to 43MB, which is unreasonable. + // let _e = sh.push_env("CARGO_PROFILE_RELEASE_DEBUG", "1"); + + if target.name.contains("-linux-") { + env::set_var("CC", "clang"); + } + + let target_name = &target.name; + cmd!(sh, "cargo build --manifest-path ./crates/lsp-ai/Cargo.toml --bin lsp-ai --target {target_name} --release").run()?; + + let dst = Path::new("dist").join(&target.artifact_name); + gzip(&target.server_path, &dst.with_extension("gz"))?; + if target_name.contains("-windows-") { + zip( + &target.server_path, + target.symbols_path.as_ref(), + &dst.with_extension("zip"), + )?; + } + + Ok(()) +} + +fn gzip(src_path: &Path, dest_path: &Path) -> anyhow::Result<()> { + let mut encoder = GzEncoder::new(File::create(dest_path)?, Compression::best()); + let mut input = io::BufReader::new(File::open(src_path)?); + io::copy(&mut input, &mut encoder)?; + encoder.finish()?; + Ok(()) +} + +fn zip(src_path: &Path, symbols_path: Option<&PathBuf>, dest_path: &Path) -> anyhow::Result<()> { + let file = File::create(dest_path)?; + let mut writer = ZipWriter::new(BufWriter::new(file)); + writer.start_file( + src_path.file_name().unwrap().to_str().unwrap(), + FileOptions::default() + .last_modified_time( + DateTime::try_from(OffsetDateTime::from( + std::fs::metadata(src_path)?.modified()?, + )) + .unwrap(), + ) + .unix_permissions(0o755) + .compression_method(zip::CompressionMethod::Deflated) + .compression_level(Some(9)), + )?; + let mut input = io::BufReader::new(File::open(src_path)?); + io::copy(&mut input, &mut writer)?; + if let Some(symbols_path) = symbols_path { + writer.start_file( + symbols_path.file_name().unwrap().to_str().unwrap(), + FileOptions::default() + .last_modified_time( + DateTime::try_from(OffsetDateTime::from( + std::fs::metadata(src_path)?.modified()?, + )) + .unwrap(), + ) + .compression_method(zip::CompressionMethod::Deflated) + .compression_level(Some(9)), + )?; + let mut input = io::BufReader::new(File::open(symbols_path)?); + io::copy(&mut input, &mut writer)?; + } + writer.finish()?; + Ok(()) +} + +struct Target { + name: String, + server_path: PathBuf, + symbols_path: Option, + artifact_name: String, +} + +impl Target { + fn get(project_root: &Path) -> Self { + let name = match env::var("LSP_AI_TARGET") { + Ok(target) => target, + _ => { + if cfg!(target_os = "linux") { + "x86_64-unknown-linux-gnu".to_string() + } else if cfg!(target_os = "windows") { + "x86_64-pc-windows-msvc".to_string() + } else if cfg!(target_os = "macos") { + "x86_64-apple-darwin".to_string() + } else { + panic!("Unsupported OS, maybe try setting LSP_AI_TARGET") + } + } + }; + let out_path = project_root.join("target").join(&name).join("release"); + let (exe_suffix, symbols_path) = if name.contains("-windows-") { + (".exe".into(), Some(out_path.join("lsp_ai.pdb"))) + } else { + (String::new(), None) + }; + let server_path = out_path.join(format!("lsp-ai{exe_suffix}")); + let artifact_name = format!("lsp-ai-{name}{exe_suffix}"); + Self { + name, + server_path, + symbols_path, + artifact_name, + } + } +} diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs new file mode 100644 index 0000000..b3f00af --- /dev/null +++ b/xtask/src/flags.rs @@ -0,0 +1,43 @@ +#![allow(unreachable_pub)] + +xflags::xflags! { + src "./src/flags.rs" + + /// Run custom build command. + cmd xtask { + cmd dist {} + } +} + +// generated start +// The following code is generated by `xflags` macro. +// Run `env UPDATE_XFLAGS=1 cargo build` to regenerate. +#[derive(Debug)] +pub struct Xtask { + pub subcommand: XtaskCmd, +} + +#[derive(Debug)] +pub enum XtaskCmd { + Dist(Dist), +} + +#[derive(Debug)] +pub struct Dist; + +impl Xtask { + #[allow(dead_code)] + pub fn from_env_or_exit() -> Self { + Self::from_env_or_exit_() + } + + #[allow(dead_code)] + pub fn from_env() -> xflags::Result { + Self::from_env_() + } + + #[allow(dead_code)] + pub fn from_vec(args: Vec) -> xflags::Result { + Self::from_vec_(args) + } +} diff --git a/xtask/src/main.rs b/xtask/src/main.rs new file mode 100644 index 0000000..b07455e --- /dev/null +++ b/xtask/src/main.rs @@ -0,0 +1,44 @@ +//! See . +//! +//! This binary defines various auxiliary build commands, which are not +//! expressible with just `cargo`. +//! +//! This binary is integrated into the `cargo` command line by using an alias in +//! `.cargo/config`. + +#![warn( + rust_2018_idioms, + unused_lifetimes, + semicolon_in_expressions_from_macros +)] + +mod flags; + +mod dist; + +use std::{ + env, + path::{Path, PathBuf}, +}; +use xshell::Shell; + +fn main() -> anyhow::Result<()> { + let flags = flags::Xtask::from_env_or_exit(); + + let sh = &Shell::new()?; + sh.change_dir(project_root()); + + match flags.subcommand { + flags::XtaskCmd::Dist(cmd) => cmd.run(sh), + } +} + +fn project_root() -> PathBuf { + Path::new( + &env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()), + ) + .ancestors() + .nth(1) + .unwrap() + .to_path_buf() +} From 9c34ce39a03e362f1994574fee4ddcb8158c415a Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 10:52:03 +0200 Subject: [PATCH 02/18] fix(ci): add missing `.cargo/config.toml` --- .cargo/config.toml | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .cargo/config.toml diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 0000000..8628170 --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,2 @@ +[alias] +xtask = "run --package xtask --bin xtask --" From c287591c6a76d06758b9b91958ff8a5218b4e802 Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 11:19:10 +0200 Subject: [PATCH 03/18] fix: install `x86_64-unknown-linux-gnu` toolchain for musl --- .github/workflows/release.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 46994d6..62ab1c0 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -106,6 +106,12 @@ jobs: with: fetch-depth: ${{ env.FETCH_DEPTH }} + - name: Install Rust toolchain + run: | + rustup update --no-self-update stable + rustup target add x86_64-unknown-linux-gnu + rustup component add rust-src + - name: Dist run: cargo xtask dist From 12900c0ebf76dd88d21c48b37ff2b22491465d2c Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 11:19:28 +0200 Subject: [PATCH 04/18] fix: author name --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 7342743..324e4be 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,7 +8,7 @@ license = "MIT" description = "LSP-AI is an open-source language server that serves as a backend for AI-powered functionality, designed to assist and empower software engineers, not replace them." repository = "https://github.com/SilasMarvin/lsp-ai" readme = "README.md" -authors = ["Silvas Marvin <>"] +authors = ["Silas Marvin "] [profile.dev.package] # This speeds up `cargo xtask dist`. From b594d03e486282ce440d71f04a11bc28246f74ef Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 11:19:39 +0200 Subject: [PATCH 05/18] fix: visibilty warnings --- crates/lsp-ai/src/config.rs | 51 ++++++++++--------- crates/lsp-ai/src/crawl.rs | 2 +- .../lsp-ai/src/custom_requests/generation.rs | 16 +++--- .../src/custom_requests/generation_stream.rs | 8 +-- crates/lsp-ai/src/custom_requests/mod.rs | 4 +- .../lsp-ai/src/memory_backends/file_store.rs | 26 ++++++---- crates/lsp-ai/src/memory_backends/mod.rs | 8 +-- .../src/memory_backends/postgresml/mod.rs | 9 +--- crates/lsp-ai/src/memory_worker.rs | 12 ++--- crates/lsp-ai/src/splitters/text_splitter.rs | 4 +- crates/lsp-ai/src/splitters/tree_sitter.rs | 4 +- .../src/transformer_backends/anthropic.rs | 12 ++--- .../lsp-ai/src/transformer_backends/gemini.rs | 2 +- 13 files changed, 80 insertions(+), 78 deletions(-) diff --git a/crates/lsp-ai/src/config.rs b/crates/lsp-ai/src/config.rs index ea9631f..b89fdd3 100644 --- a/crates/lsp-ai/src/config.rs +++ b/crates/lsp-ai/src/config.rs @@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize}; use serde_json::Value; use std::collections::HashMap; -pub type Kwargs = HashMap; +pub(crate) type Kwargs = HashMap; const fn max_requests_per_second_default() -> f32 { 1. @@ -79,7 +79,7 @@ pub enum ValidMemoryBackend { #[derive(Debug, Clone, Deserialize)] #[serde(tag = "type")] -pub enum ValidModel { +pub(crate) enum ValidModel { #[cfg(feature = "llama_cpp")] #[serde(rename = "llama_cpp")] LLaMACPP(LLaMACPP), @@ -97,13 +97,13 @@ pub enum ValidModel { #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(deny_unknown_fields)] -pub struct ChatMessage { - pub role: String, - pub content: String, +pub(crate) struct ChatMessage { + pub(crate) role: String, + pub(crate) content: String, } impl ChatMessage { - pub fn new(role: String, content: String) -> Self { + pub(crate) fn new(role: String, content: String) -> Self { Self { role, content, @@ -115,10 +115,10 @@ impl ChatMessage { #[derive(Clone, Debug, Deserialize)] #[allow(clippy::upper_case_acronyms)] #[serde(deny_unknown_fields)] -pub struct FIM { - pub start: String, - pub middle: String, - pub end: String, +pub(crate) struct FIM { + pub(crate) start: String, + pub(crate) middle: String, + pub(crate) end: String, } const fn max_crawl_memory_default() -> u64 { @@ -131,13 +131,13 @@ const fn max_crawl_file_size_default() -> u64 { #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] -pub struct Crawl { +pub(crate) struct Crawl { #[serde(default = "max_crawl_file_size_default")] - pub max_file_size: u64, + pub(crate) max_file_size: u64, #[serde(default = "max_crawl_memory_default")] - pub max_crawl_memory: u64, + pub(crate) max_crawl_memory: u64, #[serde(default)] - pub all_files: bool, + pub(crate) all_files: bool, } #[derive(Clone, Debug, Deserialize)] @@ -149,18 +149,18 @@ pub struct PostgresMLEmbeddingModel { #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] -pub struct PostgresML { - pub database_url: Option, - pub crawl: Option, +pub(crate) struct PostgresML { + pub(crate) database_url: Option, + pub(crate) crawl: Option, #[serde(default)] - pub splitter: ValidSplitter, - pub embedding_model: Option, + pub(crate) splitter: ValidSplitter, + pub(crate) embedding_model: Option, } #[derive(Clone, Debug, Deserialize, Default)] #[serde(deny_unknown_fields)] -pub struct FileStore { - pub crawl: Option, +pub(crate) struct FileStore { + pub(crate) crawl: Option, } impl FileStore { @@ -265,11 +265,12 @@ pub struct Gemini { #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] -pub struct Anthropic { +pub(crate) struct Anthropic { // The auth token env var name pub auth_token_env_var_name: Option, pub auth_token: Option, // The completions endpoint + #[allow(dead_code)] pub completions_endpoint: Option, // The chat endpoint pub chat_endpoint: Option, @@ -295,7 +296,7 @@ pub struct Completion { #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] pub struct ValidConfig { - pub memory: ValidMemoryBackend, + pub(crate) memory: ValidMemoryBackend, pub models: HashMap, pub completion: Option, } @@ -308,8 +309,8 @@ pub struct ValidClientParams { #[derive(Clone, Debug)] pub struct Config { - pub config: ValidConfig, - pub client_params: ValidClientParams, + pub(crate) config: ValidConfig, + pub(crate) client_params: ValidClientParams, } impl Config { diff --git a/crates/lsp-ai/src/crawl.rs b/crates/lsp-ai/src/crawl.rs index 2dc1721..4546d2c 100644 --- a/crates/lsp-ai/src/crawl.rs +++ b/crates/lsp-ai/src/crawl.rs @@ -12,7 +12,7 @@ pub struct Crawl { } impl Crawl { - pub fn new(crawl_config: config::Crawl, config: Config) -> Self { + pub(crate) fn new(crawl_config: config::Crawl, config: Config) -> Self { Self { crawl_config, config, diff --git a/crates/lsp-ai/src/custom_requests/generation.rs b/crates/lsp-ai/src/custom_requests/generation.rs index 725cccb..48923c1 100644 --- a/crates/lsp-ai/src/custom_requests/generation.rs +++ b/crates/lsp-ai/src/custom_requests/generation.rs @@ -4,28 +4,28 @@ use serde_json::Value; use crate::config; -pub enum Generation {} +pub(crate) enum Generation {} #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] -pub struct GenerationParams { +pub(crate) struct GenerationParams { // This field was "mixed-in" from TextDocumentPositionParams #[serde(flatten)] - pub text_document_position: TextDocumentPositionParams, + pub(crate) text_document_position: TextDocumentPositionParams, // The model key to use - pub model: String, + pub(crate) model: String, #[serde(default)] // Args are deserialized by the backend using them - pub parameters: Value, + pub(crate) parameters: Value, // Parameters for post processing #[serde(default)] - pub post_process: config::PostProcess, + pub(crate) post_process: config::PostProcess, } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] -pub struct GenerateResult { - pub generated_text: String, +pub(crate) struct GenerateResult { + pub(crate) generated_text: String, } impl lsp_types::request::Request for Generation { diff --git a/crates/lsp-ai/src/custom_requests/generation_stream.rs b/crates/lsp-ai/src/custom_requests/generation_stream.rs index 1c19a55..a09ed30 100644 --- a/crates/lsp-ai/src/custom_requests/generation_stream.rs +++ b/crates/lsp-ai/src/custom_requests/generation_stream.rs @@ -1,7 +1,7 @@ use lsp_types::{ProgressToken, TextDocumentPositionParams}; use serde::{Deserialize, Serialize}; -pub enum GenerationStream {} +pub(crate) enum GenerationStream {} #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] @@ -15,9 +15,9 @@ pub struct GenerationStreamParams { #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] -pub struct GenerationStreamResult { - pub generated_text: String, - pub partial_result_token: ProgressToken, +pub(crate) struct GenerationStreamResult { + pub(crate) generated_text: String, + pub(crate) partial_result_token: ProgressToken, } impl lsp_types::request::Request for GenerationStream { diff --git a/crates/lsp-ai/src/custom_requests/mod.rs b/crates/lsp-ai/src/custom_requests/mod.rs index a62b6e4..e4e0041 100644 --- a/crates/lsp-ai/src/custom_requests/mod.rs +++ b/crates/lsp-ai/src/custom_requests/mod.rs @@ -1,2 +1,2 @@ -pub mod generation; -pub mod generation_stream; +pub(crate) mod generation; +pub(crate) mod generation_stream; diff --git a/crates/lsp-ai/src/memory_backends/file_store.rs b/crates/lsp-ai/src/memory_backends/file_store.rs index 45abb7e..f37ff58 100644 --- a/crates/lsp-ai/src/memory_backends/file_store.rs +++ b/crates/lsp-ai/src/memory_backends/file_store.rs @@ -17,12 +17,12 @@ use crate::{ use super::{ContextAndCodePrompt, FIMPrompt, MemoryBackend, MemoryRunParams, Prompt, PromptType}; #[derive(Default)] -pub struct AdditionalFileStoreParams { +pub(crate) struct AdditionalFileStoreParams { build_tree: bool, } impl AdditionalFileStoreParams { - pub fn new(build_tree: bool) -> Self { + pub(crate) fn new(build_tree: bool) -> Self { Self { build_tree } } } @@ -47,7 +47,7 @@ impl File { } } -pub struct FileStore { +pub(crate) struct FileStore { params: AdditionalFileStoreParams, file_map: Mutex>, accessed_files: Mutex>, @@ -55,7 +55,10 @@ pub struct FileStore { } impl FileStore { - pub fn new(mut file_store_config: config::FileStore, config: Config) -> anyhow::Result { + pub(crate) fn new( + mut file_store_config: config::FileStore, + config: Config, + ) -> anyhow::Result { let crawl = file_store_config .crawl .take() @@ -72,7 +75,7 @@ impl FileStore { Ok(s) } - pub fn new_with_params( + pub(crate) fn new_with_params( mut file_store_config: config::FileStore, config: Config, params: AdditionalFileStoreParams, @@ -192,7 +195,7 @@ impl FileStore { Ok((rope, cursor_index)) } - pub fn get_characters_around_position( + pub(crate) fn get_characters_around_position( &self, position: &TextDocumentPositionParams, characters: usize, @@ -216,7 +219,7 @@ impl FileStore { Ok(rope_slice.to_string()) } - pub fn build_code( + pub(crate) fn build_code( &self, position: &TextDocumentPositionParams, prompt_type: PromptType, @@ -272,15 +275,18 @@ impl FileStore { }) } - pub fn file_map(&self) -> &Mutex> { + pub(crate) fn file_map(&self) -> &Mutex> { &self.file_map } - pub fn contains_file(&self, uri: &str) -> bool { + pub(crate) fn contains_file(&self, uri: &str) -> bool { self.file_map.lock().contains_key(uri) } - pub fn position_to_byte(&self, position: &TextDocumentPositionParams) -> anyhow::Result { + pub(crate) fn position_to_byte( + &self, + position: &TextDocumentPositionParams, + ) -> anyhow::Result { let file_map = self.file_map.lock(); let uri = position.text_document.uri.to_string(); let file = file_map diff --git a/crates/lsp-ai/src/memory_backends/mod.rs b/crates/lsp-ai/src/memory_backends/mod.rs index 9d6fcc5..8fa5914 100644 --- a/crates/lsp-ai/src/memory_backends/mod.rs +++ b/crates/lsp-ai/src/memory_backends/mod.rs @@ -6,7 +6,7 @@ use serde_json::Value; use crate::config::{Config, ValidMemoryBackend}; -pub mod file_store; +pub(crate) mod file_store; mod postgresml; #[derive(Clone, Debug)] @@ -16,9 +16,9 @@ pub enum PromptType { } #[derive(Clone)] -pub struct MemoryRunParams { - pub is_for_chat: bool, - pub max_context: usize, +pub(crate) struct MemoryRunParams { + pub(crate) is_for_chat: bool, + pub(crate) max_context: usize, } impl From<&Value> for MemoryRunParams { diff --git a/crates/lsp-ai/src/memory_backends/postgresml/mod.rs b/crates/lsp-ai/src/memory_backends/postgresml/mod.rs index 2c08065..255350d 100644 --- a/crates/lsp-ai/src/memory_backends/postgresml/mod.rs +++ b/crates/lsp-ai/src/memory_backends/postgresml/mod.rs @@ -84,7 +84,7 @@ async fn split_and_upsert_file( } #[derive(Clone)] -pub struct PostgresML { +pub(crate) struct PostgresML { config: Config, postgresml_config: config::PostgresML, file_store: Arc, @@ -240,12 +240,7 @@ impl PostgresML { }) .collect(); if let Err(e) = task_collection - .delete_documents( - json!({ - "$or": delete_or_statements - }) - .into(), - ) + .delete_documents(json!({ "$or": delete_or_statements }).into()) .await .context("PGML - error deleting documents") { diff --git a/crates/lsp-ai/src/memory_worker.rs b/crates/lsp-ai/src/memory_worker.rs index 1b7a481..38b6127 100644 --- a/crates/lsp-ai/src/memory_worker.rs +++ b/crates/lsp-ai/src/memory_worker.rs @@ -13,7 +13,7 @@ use crate::{ }; #[derive(Debug)] -pub struct PromptRequest { +pub(crate) struct PromptRequest { position: TextDocumentPositionParams, prompt_type: PromptType, params: Value, @@ -21,7 +21,7 @@ pub struct PromptRequest { } impl PromptRequest { - pub fn new( + pub(crate) fn new( position: TextDocumentPositionParams, prompt_type: PromptType, params: Value, @@ -37,13 +37,13 @@ impl PromptRequest { } #[derive(Debug)] -pub struct FilterRequest { +pub(crate) struct FilterRequest { position: TextDocumentPositionParams, tx: tokio::sync::oneshot::Sender, } impl FilterRequest { - pub fn new( + pub(crate) fn new( position: TextDocumentPositionParams, tx: tokio::sync::oneshot::Sender, ) -> Self { @@ -51,7 +51,7 @@ impl FilterRequest { } } -pub enum WorkerRequest { +pub(crate) enum WorkerRequest { FilterText(FilterRequest), Prompt(PromptRequest), DidOpenTextDocument(DidOpenTextDocumentParams), @@ -115,7 +115,7 @@ fn do_run( } } -pub fn run( +pub(crate) fn run( memory_backend: Box, rx: std::sync::mpsc::Receiver, ) { diff --git a/crates/lsp-ai/src/splitters/text_splitter.rs b/crates/lsp-ai/src/splitters/text_splitter.rs index 9b280a1..ba52e04 100644 --- a/crates/lsp-ai/src/splitters/text_splitter.rs +++ b/crates/lsp-ai/src/splitters/text_splitter.rs @@ -8,14 +8,14 @@ pub struct TextSplitter { } impl TextSplitter { - pub fn new(config: config::TextSplitter) -> Self { + pub(crate) fn new(config: config::TextSplitter) -> Self { Self { chunk_size: config.chunk_size, splitter: text_splitter::TextSplitter::new(config.chunk_size), } } - pub fn new_with_chunk_size(chunk_size: usize) -> Self { + pub(crate) fn new_with_chunk_size(chunk_size: usize) -> Self { Self { chunk_size, splitter: text_splitter::TextSplitter::new(chunk_size), diff --git a/crates/lsp-ai/src/splitters/tree_sitter.rs b/crates/lsp-ai/src/splitters/tree_sitter.rs index dbbb9ce..7cda4d8 100644 --- a/crates/lsp-ai/src/splitters/tree_sitter.rs +++ b/crates/lsp-ai/src/splitters/tree_sitter.rs @@ -6,14 +6,14 @@ use crate::{config, memory_backends::file_store::File, utils::parse_tree}; use super::{text_splitter::TextSplitter, ByteRange, Chunk, Splitter}; -pub struct TreeSitter { +pub(crate) struct TreeSitter { chunk_size: usize, splitter: TreeSitterCodeSplitter, text_splitter: TextSplitter, } impl TreeSitter { - pub fn new(config: config::TreeSitter) -> anyhow::Result { + pub(crate) fn new(config: config::TreeSitter) -> anyhow::Result { let text_splitter = TextSplitter::new_with_chunk_size(config.chunk_size); Ok(Self { chunk_size: config.chunk_size, diff --git a/crates/lsp-ai/src/transformer_backends/anthropic.rs b/crates/lsp-ai/src/transformer_backends/anthropic.rs index 58f9ffb..cb07a03 100644 --- a/crates/lsp-ai/src/transformer_backends/anthropic.rs +++ b/crates/lsp-ai/src/transformer_backends/anthropic.rs @@ -30,15 +30,15 @@ const fn temperature_default() -> f32 { // NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes #[derive(Debug, Deserialize)] -pub struct AnthropicRunParams { +pub(crate) struct AnthropicRunParams { system: String, messages: Vec, #[serde(default = "max_tokens_default")] - pub max_tokens: usize, + pub(crate) max_tokens: usize, #[serde(default = "top_p_default")] - pub top_p: f32, + pub(crate) top_p: f32, #[serde(default = "temperature_default")] - pub temperature: f32, + pub(crate) temperature: f32, } pub struct Anthropic { @@ -56,11 +56,11 @@ struct AnthropicChatResponse { error: Option, #[serde(default)] #[serde(flatten)] - pub other: HashMap, + pub(crate) other: HashMap, } impl Anthropic { - pub fn new(config: config::Anthropic) -> Self { + pub(crate) fn new(config: config::Anthropic) -> Self { Self { config } } diff --git a/crates/lsp-ai/src/transformer_backends/gemini.rs b/crates/lsp-ai/src/transformer_backends/gemini.rs index 3203c48..5b0c696 100644 --- a/crates/lsp-ai/src/transformer_backends/gemini.rs +++ b/crates/lsp-ai/src/transformer_backends/gemini.rs @@ -39,7 +39,7 @@ const fn max_tokens_default() -> usize { #[derive(Debug, Serialize, Deserialize, Clone)] struct Part { - pub text: String, + pub(crate) text: String, } #[derive(Debug, Serialize, Deserialize, Clone)] From 19ed5188a0c83f214d5087839b8a115c2332229b Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 11:34:17 +0200 Subject: [PATCH 06/18] fix: clippy warnings --- crates/lsp-ai/src/crawl.rs | 5 ++-- .../lsp-ai/src/memory_backends/file_store.rs | 2 +- .../src/memory_backends/postgresml/mod.rs | 26 +++++++------------ crates/lsp-ai/src/transformer_worker.rs | 2 +- crates/lsp-ai/src/utils.rs | 2 +- crates/splitter-tree-sitter/src/lib.rs | 14 ++++------ 6 files changed, 20 insertions(+), 31 deletions(-) diff --git a/crates/lsp-ai/src/crawl.rs b/crates/lsp-ai/src/crawl.rs index 4546d2c..d4634b8 100644 --- a/crates/lsp-ai/src/crawl.rs +++ b/crates/lsp-ai/src/crawl.rs @@ -37,11 +37,10 @@ impl Crawl { } let extension_to_match = triggered_file - .map(|tf| { + .and_then(|tf| { let path = std::path::Path::new(&tf); path.extension().map(|f| f.to_str().map(|f| f.to_owned())) }) - .flatten() .flatten(); if let Some(extension_to_match) = &extension_to_match { @@ -70,7 +69,7 @@ impl Crawl { } } else { match ( - path.extension().map(|pe| pe.to_str()).flatten(), + path.extension().and_then(|pe| pe.to_str()), &extension_to_match, ) { (Some(path_extension), Some(extension_to_match)) => { diff --git a/crates/lsp-ai/src/memory_backends/file_store.rs b/crates/lsp-ai/src/memory_backends/file_store.rs index f37ff58..51f3c58 100644 --- a/crates/lsp-ai/src/memory_backends/file_store.rs +++ b/crates/lsp-ai/src/memory_backends/file_store.rs @@ -328,7 +328,7 @@ impl MemoryBackend for FileStore { prompt_type: PromptType, params: &Value, ) -> anyhow::Result { - let params: MemoryRunParams = params.try_into()?; + let params: MemoryRunParams = params.into(); self.build_code(position, prompt_type, params, true) } diff --git a/crates/lsp-ai/src/memory_backends/postgresml/mod.rs b/crates/lsp-ai/src/memory_backends/postgresml/mod.rs index 255350d..010c052 100644 --- a/crates/lsp-ai/src/memory_backends/postgresml/mod.rs +++ b/crates/lsp-ai/src/memory_backends/postgresml/mod.rs @@ -250,15 +250,14 @@ impl PostgresML { let documents: Vec = chunks .into_iter() .zip(&file_uris) - .map(|(chunks, uri)| { + .flat_map(|(chunks, uri)| { chunks .into_iter() .map(|chunk| { - chunk_to_document(&uri, chunk, task_root_uri.as_deref()) + chunk_to_document(uri, chunk, task_root_uri.as_deref()) }) .collect::>() }) - .flatten() .map(|f: Value| f.into()) .collect(); if let Err(e) = task_collection @@ -360,15 +359,11 @@ impl PostgresML { current_chunks_bytes += contents.len(); let chunks: Vec = self .splitter - .split_file_contents(&uri, &contents) + .split_file_contents(uri, &contents) .into_iter() .map(|chunk| { - chunk_to_document( - &uri, - chunk, - self.config.client_params.root_uri.as_deref(), - ) - .into() + chunk_to_document(uri, chunk, self.config.client_params.root_uri.as_deref()) + .into() }) .collect(); chunks_to_upsert.extend(chunks); @@ -384,7 +379,7 @@ impl PostgresML { } } // Upsert any remaining chunks - if chunks_to_upsert.len() > 0 { + if chunks_to_upsert.is_empty() { collection .upsert_documents(chunks_to_upsert, None) .await @@ -474,7 +469,7 @@ impl PostgresML { Ok(true) })?; // Upsert any remaining documents - if documents.len() > 0 { + if documents.is_empty() { let mut collection = self.collection.clone(); TOKIO_RUNTIME.spawn(async move { if let Err(e) = collection @@ -505,7 +500,7 @@ impl MemoryBackend for PostgresML { prompt_type: PromptType, params: &Value, ) -> anyhow::Result { - let params: MemoryRunParams = params.try_into()?; + let params: MemoryRunParams = params.into(); let chunk_size = self.splitter.chunk_size(); let total_allowed_characters = tokens_to_estimated_characters(params.max_context); @@ -530,8 +525,7 @@ impl MemoryBackend for PostgresML { .postgresml_config .embedding_model .as_ref() - .map(|m| m.query_parameters.clone()) - .flatten() + .and_then(|m| m.query_parameters.clone()) { Some(query_parameters) => query_parameters, None => json!({ @@ -597,7 +591,7 @@ impl MemoryBackend for PostgresML { Prompt::ContextAndCode(ContextAndCodePrompt::new( context.to_owned(), format_file_excerpt( - &position.text_document.uri.to_string(), + position.text_document.uri.as_str(), &context_and_code.code, self.config.client_params.root_uri.as_deref(), ), diff --git a/crates/lsp-ai/src/transformer_worker.rs b/crates/lsp-ai/src/transformer_worker.rs index 7766a11..f7ab8e9 100644 --- a/crates/lsp-ai/src/transformer_worker.rs +++ b/crates/lsp-ai/src/transformer_worker.rs @@ -338,7 +338,7 @@ async fn do_completion( let mut response = transformer_backend.do_completion(&prompt, params).await?; if let Some(post_process) = config.get_completions_post_process() { - response.insert_text = post_process_response(response.insert_text, &prompt, &post_process); + response.insert_text = post_process_response(response.insert_text, &prompt, post_process); } // Build and send the response diff --git a/crates/lsp-ai/src/utils.rs b/crates/lsp-ai/src/utils.rs index 8b5b8b4..1178060 100644 --- a/crates/lsp-ai/src/utils.rs +++ b/crates/lsp-ai/src/utils.rs @@ -65,6 +65,6 @@ pub fn parse_tree(uri: &str, contents: &str, old_tree: Option<&Tree>) -> anyhow: let extension = extension.as_deref().unwrap_or(""); let mut parser = utils_tree_sitter::get_parser_for_extension(extension)?; parser - .parse(&contents, old_tree) + .parse(contents, old_tree) .with_context(|| format!("parsing tree failed for {uri}")) } diff --git a/crates/splitter-tree-sitter/src/lib.rs b/crates/splitter-tree-sitter/src/lib.rs index 49bf1a0..52e30ed 100644 --- a/crates/splitter-tree-sitter/src/lib.rs +++ b/crates/splitter-tree-sitter/src/lib.rs @@ -55,11 +55,7 @@ impl TreeSitterCodeSplitter { } } - pub fn split<'a, 'b, 'c>( - &'a self, - tree: &'b Tree, - utf8: &'c [u8], - ) -> Result>, SplitError> { + pub fn split<'c>(&self, tree: &Tree, utf8: &'c [u8]) -> Result>, SplitError> { let cursor = tree.walk(); Ok(self .split_recursive(cursor, utf8)? @@ -68,7 +64,7 @@ impl TreeSitterCodeSplitter { // Let's combine some of our smaller chunks together // We also want to do this in reverse as it (seems) to make more sense to combine code slices from bottom to top .try_fold(vec![], |mut acc, current| { - if acc.len() == 0 { + if acc.is_empty() { acc.push(current); Ok::<_, SplitError>(acc) } else { @@ -94,9 +90,9 @@ impl TreeSitterCodeSplitter { .collect()) } - fn split_recursive<'a, 'b, 'c>( - &'a self, - mut cursor: TreeCursor<'b>, + fn split_recursive<'c>( + &self, + mut cursor: TreeCursor<'_>, utf8: &'c [u8], ) -> Result>, SplitError> { let node = cursor.node(); From bb5d54f25dbb203307d04e2704b336e68beebe9b Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 12:21:47 +0200 Subject: [PATCH 07/18] fix: update visibilty --- crates/lsp-ai/src/config.rs | 93 ++++++++++--------- crates/lsp-ai/src/crawl.rs | 2 +- crates/lsp-ai/src/splitters/text_splitter.rs | 2 +- .../src/transformer_backends/anthropic.rs | 2 +- .../lsp-ai/src/transformer_backends/gemini.rs | 26 +++--- .../src/transformer_backends/mistral_fim.rs | 18 ++-- .../lsp-ai/src/transformer_backends/ollama.rs | 6 +- .../src/transformer_backends/open_ai/mod.rs | 36 +++---- crates/lsp-ai/src/transformer_worker.rs | 12 +-- crates/lsp-ai/src/utils.rs | 20 ++-- 10 files changed, 109 insertions(+), 108 deletions(-) diff --git a/crates/lsp-ai/src/config.rs b/crates/lsp-ai/src/config.rs index b89fdd3..8474cea 100644 --- a/crates/lsp-ai/src/config.rs +++ b/crates/lsp-ai/src/config.rs @@ -70,7 +70,7 @@ pub struct TextSplitter { } #[derive(Debug, Clone, Deserialize)] -pub enum ValidMemoryBackend { +pub(crate) enum ValidMemoryBackend { #[serde(rename = "file_store")] FileStore(FileStore), #[serde(rename = "postgresml")] @@ -141,10 +141,10 @@ pub(crate) struct Crawl { } #[derive(Clone, Debug, Deserialize)] -pub struct PostgresMLEmbeddingModel { - pub model: String, - pub embed_parameters: Option, - pub query_parameters: Option, +pub(crate) struct PostgresMLEmbeddingModel { + pub(crate) model: String, + pub(crate) embed_parameters: Option, + pub(crate) query_parameters: Option, } #[derive(Clone, Debug, Deserialize)] @@ -164,38 +164,38 @@ pub(crate) struct FileStore { } impl FileStore { - pub fn new_without_crawl() -> Self { + pub(crate) fn new_without_crawl() -> Self { Self { crawl: None } } } #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] -pub struct Ollama { +pub(crate) struct Ollama { // The generate endpoint, default: 'http://localhost:11434/api/generate' - pub generate_endpoint: Option, + pub(crate) generate_endpoint: Option, // The chat endpoint, default: 'http://localhost:11434/api/chat' - pub chat_endpoint: Option, + pub(crate) chat_endpoint: Option, // The model name - pub model: String, + pub(crate) model: String, // The maximum requests per second #[serde(default = "max_requests_per_second_default")] - pub max_requests_per_second: f32, + pub(crate) max_requests_per_second: f32, } #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] -pub struct MistralFIM { +pub(crate) struct MistralFIM { // The auth token env var name - pub auth_token_env_var_name: Option, - pub auth_token: Option, + pub(crate) auth_token_env_var_name: Option, + pub(crate) auth_token: Option, // The fim endpoint - pub fim_endpoint: Option, + pub(crate) fim_endpoint: Option, // The model name - pub model: String, + pub(crate) model: String, // The maximum requests per second #[serde(default = "max_requests_per_second_default")] - pub max_requests_per_second: f32, + pub(crate) max_requests_per_second: f32, } #[cfg(feature = "llama_cpp")] @@ -229,82 +229,83 @@ pub struct LLaMACPP { #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] -pub struct OpenAI { +pub(crate) struct OpenAI { // The auth token env var name - pub auth_token_env_var_name: Option, + pub(crate) auth_token_env_var_name: Option, // The auth token - pub auth_token: Option, + pub(crate) auth_token: Option, // The completions endpoint - pub completions_endpoint: Option, + pub(crate) completions_endpoint: Option, // The chat endpoint - pub chat_endpoint: Option, + pub(crate) chat_endpoint: Option, // The maximum requests per second #[serde(default = "max_requests_per_second_default")] - pub max_requests_per_second: f32, + pub(crate) max_requests_per_second: f32, // The model name - pub model: String, + pub(crate) model: String, } #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] -pub struct Gemini { +pub(crate) struct Gemini { // The auth token env var name - pub auth_token_env_var_name: Option, + pub(crate) auth_token_env_var_name: Option, // The auth token - pub auth_token: Option, + pub(crate) auth_token: Option, // The completions endpoint - pub completions_endpoint: Option, + #[allow(dead_code)] + pub(crate) completions_endpoint: Option, // The chat endpoint - pub chat_endpoint: Option, + pub(crate) chat_endpoint: Option, // The maximum requests per second #[serde(default = "max_requests_per_second_default")] - pub max_requests_per_second: f32, + pub(crate) max_requests_per_second: f32, // The model name - pub model: String, + pub(crate) model: String, } #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] pub(crate) struct Anthropic { // The auth token env var name - pub auth_token_env_var_name: Option, - pub auth_token: Option, + pub(crate) auth_token_env_var_name: Option, + pub(crate) auth_token: Option, // The completions endpoint #[allow(dead_code)] - pub completions_endpoint: Option, + pub(crate) completions_endpoint: Option, // The chat endpoint - pub chat_endpoint: Option, + pub(crate) chat_endpoint: Option, // The maximum requests per second #[serde(default = "max_requests_per_second_default")] - pub max_requests_per_second: f32, + pub(crate) max_requests_per_second: f32, // The model name - pub model: String, + pub(crate) model: String, } #[derive(Clone, Debug, Deserialize)] -pub struct Completion { +pub(crate) struct Completion { // The model key to use - pub model: String, + pub(crate) model: String, // Args are deserialized by the backend using them #[serde(default)] - pub parameters: Kwargs, + pub(crate) parameters: Kwargs, // Parameters for post processing #[serde(default)] - pub post_process: PostProcess, + pub(crate) post_process: PostProcess, } #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] -pub struct ValidConfig { +pub(crate) struct ValidConfig { pub(crate) memory: ValidMemoryBackend, - pub models: HashMap, - pub completion: Option, + pub(crate) models: HashMap, + pub(crate) completion: Option, } #[derive(Clone, Debug, Deserialize, Default)] -pub struct ValidClientParams { +pub(crate) struct ValidClientParams { #[serde(alias = "rootUri")] - pub root_uri: Option, + pub(crate) root_uri: Option, } #[derive(Clone, Debug)] diff --git a/crates/lsp-ai/src/crawl.rs b/crates/lsp-ai/src/crawl.rs index d4634b8..1a15f07 100644 --- a/crates/lsp-ai/src/crawl.rs +++ b/crates/lsp-ai/src/crawl.rs @@ -4,7 +4,7 @@ use tracing::{error, instrument}; use crate::config::{self, Config}; -pub struct Crawl { +pub(crate) struct Crawl { crawl_config: config::Crawl, config: Config, crawled_file_types: HashSet, diff --git a/crates/lsp-ai/src/splitters/text_splitter.rs b/crates/lsp-ai/src/splitters/text_splitter.rs index ba52e04..6c61b9a 100644 --- a/crates/lsp-ai/src/splitters/text_splitter.rs +++ b/crates/lsp-ai/src/splitters/text_splitter.rs @@ -2,7 +2,7 @@ use crate::{config, memory_backends::file_store::File}; use super::{ByteRange, Chunk, Splitter}; -pub struct TextSplitter { +pub(crate) struct TextSplitter { chunk_size: usize, splitter: text_splitter::TextSplitter, } diff --git a/crates/lsp-ai/src/transformer_backends/anthropic.rs b/crates/lsp-ai/src/transformer_backends/anthropic.rs index cb07a03..603411c 100644 --- a/crates/lsp-ai/src/transformer_backends/anthropic.rs +++ b/crates/lsp-ai/src/transformer_backends/anthropic.rs @@ -41,7 +41,7 @@ pub(crate) struct AnthropicRunParams { pub(crate) temperature: f32, } -pub struct Anthropic { +pub(crate) struct Anthropic { config: config::Anthropic, } diff --git a/crates/lsp-ai/src/transformer_backends/gemini.rs b/crates/lsp-ai/src/transformer_backends/gemini.rs index 5b0c696..be731de 100644 --- a/crates/lsp-ai/src/transformer_backends/gemini.rs +++ b/crates/lsp-ai/src/transformer_backends/gemini.rs @@ -55,37 +55,33 @@ impl GeminiContent { } #[derive(Debug, Deserialize, Serialize, Clone)] +#[serde(rename = "camelCase")] #[serde(deny_unknown_fields)] -pub struct GeminiGenerationConfig { - #[serde(rename = "stopSequences")] +pub(crate) struct GeminiGenerationConfig { #[serde(default)] - pub stop_sequences: Vec, - #[serde(rename = "maxOutputTokens")] + pub(crate) stop_sequences: Vec, #[serde(default = "max_tokens_default")] - pub max_output_tokens: usize, - pub temperature: Option, - #[serde(rename = "topP")] - pub top_p: Option, - #[serde(rename = "topK")] - pub top_k: Option, + pub(crate) max_output_tokens: usize, + pub(crate) temperature: Option, + pub(crate) top_p: Option, + pub(crate) top_k: Option, } // NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes #[derive(Debug, Deserialize, Serialize, Clone)] -pub struct GeminiRunParams { +#[serde(rename = "camelCase")] +pub(crate) struct GeminiRunParams { contents: Vec, - #[serde(rename = "systemInstruction")] system_instruction: GeminiContent, - #[serde(rename = "generationConfig")] generation_config: Option, } -pub struct Gemini { +pub(crate) struct Gemini { configuration: config::Gemini, } impl Gemini { - pub fn new(configuration: config::Gemini) -> Self { + pub(crate) fn new(configuration: config::Gemini) -> Self { Self { configuration } } diff --git a/crates/lsp-ai/src/transformer_backends/mistral_fim.rs b/crates/lsp-ai/src/transformer_backends/mistral_fim.rs index 93f5c72..e6f6de3 100644 --- a/crates/lsp-ai/src/transformer_backends/mistral_fim.rs +++ b/crates/lsp-ai/src/transformer_backends/mistral_fim.rs @@ -26,25 +26,25 @@ const fn temperature_default() -> f32 { // NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes #[derive(Debug, Deserialize)] -pub struct MistralFIMRunParams { +pub(crate) struct MistralFIMRunParams { #[serde(default = "max_tokens_default")] - pub max_tokens: usize, + pub(crate) max_tokens: usize, #[serde(default = "top_p_default")] - pub top_p: f32, + pub(crate) top_p: f32, #[serde(default = "temperature_default")] - pub temperature: f32, - pub min_tokens: Option, - pub random_seed: Option, + pub(crate) temperature: f32, + pub(crate) min_tokens: Option, + pub(crate) random_seed: Option, #[serde(default)] - pub stop: Vec, + pub(crate) stop: Vec, } -pub struct MistralFIM { +pub(crate) struct MistralFIM { config: config::MistralFIM, } impl MistralFIM { - pub fn new(config: config::MistralFIM) -> Self { + pub(crate) fn new(config: config::MistralFIM) -> Self { Self { config } } diff --git a/crates/lsp-ai/src/transformer_backends/ollama.rs b/crates/lsp-ai/src/transformer_backends/ollama.rs index 16486bf..1662473 100644 --- a/crates/lsp-ai/src/transformer_backends/ollama.rs +++ b/crates/lsp-ai/src/transformer_backends/ollama.rs @@ -16,8 +16,8 @@ use super::TransformerBackend; // NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes #[derive(Debug, Deserialize)] -pub struct OllamaRunParams { - pub fim: Option, +pub(crate) struct OllamaRunParams { + pub(crate) fim: Option, messages: Option>, #[serde(default)] options: HashMap, @@ -26,7 +26,7 @@ pub struct OllamaRunParams { keep_alive: Option, } -pub struct Ollama { +pub(crate) struct Ollama { configuration: config::Ollama, } diff --git a/crates/lsp-ai/src/transformer_backends/open_ai/mod.rs b/crates/lsp-ai/src/transformer_backends/open_ai/mod.rs index d516adf..d5f68e5 100644 --- a/crates/lsp-ai/src/transformer_backends/open_ai/mod.rs +++ b/crates/lsp-ai/src/transformer_backends/open_ai/mod.rs @@ -38,22 +38,22 @@ const fn temperature_default() -> f32 { // NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes #[derive(Debug, Deserialize)] -pub struct OpenAIRunParams { - pub fim: Option, +pub(crate) struct OpenAIRunParams { + pub(crate) fim: Option, messages: Option>, #[serde(default = "max_tokens_default")] - pub max_tokens: usize, + pub(crate) max_tokens: usize, #[serde(default = "top_p_default")] - pub top_p: f32, + pub(crate) top_p: f32, #[serde(default = "presence_penalty_default")] - pub presence_penalty: f32, + pub(crate) presence_penalty: f32, #[serde(default = "frequency_penalty_default")] - pub frequency_penalty: f32, + pub(crate) frequency_penalty: f32, #[serde(default = "temperature_default")] - pub temperature: f32, + pub(crate) temperature: f32, } -pub struct OpenAI { +pub(crate) struct OpenAI { configuration: config::OpenAI, } @@ -68,27 +68,27 @@ struct OpenAICompletionsResponse { error: Option, #[serde(default)] #[serde(flatten)] - pub other: HashMap, + pub(crate) other: HashMap, } #[derive(Debug, Deserialize, Serialize)] -pub struct OpenAIChatMessage { - pub role: String, - pub content: String, +pub(crate) struct OpenAIChatMessage { + pub(crate) role: String, + pub(crate) content: String, } #[derive(Deserialize)] -pub struct OpenAIChatChoices { - pub message: OpenAIChatMessage, +pub(crate) struct OpenAIChatChoices { + pub(crate) message: OpenAIChatMessage, } #[derive(Deserialize)] -pub struct OpenAIChatResponse { - pub choices: Option>, - pub error: Option, +pub(crate) struct OpenAIChatResponse { + pub(crate) choices: Option>, + pub(crate) error: Option, #[serde(default)] #[serde(flatten)] - pub other: HashMap, + pub(crate) other: HashMap, } impl OpenAI { diff --git a/crates/lsp-ai/src/transformer_worker.rs b/crates/lsp-ai/src/transformer_worker.rs index f7ab8e9..30a229d 100644 --- a/crates/lsp-ai/src/transformer_worker.rs +++ b/crates/lsp-ai/src/transformer_worker.rs @@ -20,25 +20,25 @@ use crate::transformer_backends::TransformerBackend; use crate::utils::{ToResponseError, TOKIO_RUNTIME}; #[derive(Clone, Debug)] -pub struct CompletionRequest { +pub(crate) struct CompletionRequest { id: RequestId, params: CompletionParams, } impl CompletionRequest { - pub fn new(id: RequestId, params: CompletionParams) -> Self { + pub(crate) fn new(id: RequestId, params: CompletionParams) -> Self { Self { id, params } } } #[derive(Clone, Debug)] -pub struct GenerationRequest { +pub(crate) struct GenerationRequest { id: RequestId, params: GenerationParams, } impl GenerationRequest { - pub fn new(id: RequestId, params: GenerationParams) -> Self { + pub(crate) fn new(id: RequestId, params: GenerationParams) -> Self { Self { id, params } } } @@ -58,7 +58,7 @@ impl GenerationStreamRequest { } #[derive(Clone, Debug)] -pub enum WorkerRequest { +pub(crate) enum WorkerRequest { Completion(CompletionRequest), Generation(GenerationRequest), GenerationStream(GenerationStreamRequest), @@ -159,7 +159,7 @@ fn post_process_response( } } -pub fn run( +pub(crate) fn run( transformer_backends: HashMap>, memory_tx: std::sync::mpsc::Sender, transformer_rx: std::sync::mpsc::Receiver, diff --git a/crates/lsp-ai/src/utils.rs b/crates/lsp-ai/src/utils.rs index 1178060..5e935cb 100644 --- a/crates/lsp-ai/src/utils.rs +++ b/crates/lsp-ai/src/utils.rs @@ -6,7 +6,7 @@ use tree_sitter::Tree; use crate::{config::ChatMessage, memory_backends::ContextAndCodePrompt, splitters::Chunk}; -pub static TOKIO_RUNTIME: Lazy = Lazy::new(|| { +pub(crate) static TOKIO_RUNTIME: Lazy = Lazy::new(|| { runtime::Builder::new_multi_thread() .worker_threads(4) .enable_all() @@ -14,7 +14,7 @@ pub static TOKIO_RUNTIME: Lazy = Lazy::new(|| { .expect("Error building tokio runtime") }); -pub trait ToResponseError { +pub(crate) trait ToResponseError { fn to_response_error(&self, code: i32) -> ResponseError; } @@ -28,11 +28,11 @@ impl ToResponseError for anyhow::Error { } } -pub fn tokens_to_estimated_characters(tokens: usize) -> usize { +pub(crate) fn tokens_to_estimated_characters(tokens: usize) -> usize { tokens * 4 } -pub fn format_chat_messages( +pub(crate) fn format_chat_messages( messages: &[ChatMessage], prompt: &ContextAndCodePrompt, ) -> Vec { @@ -47,19 +47,23 @@ pub fn format_chat_messages( .collect() } -pub fn format_context_code_in_str(s: &str, context: &str, code: &str) -> String { +pub(crate) fn format_context_code_in_str(s: &str, context: &str, code: &str) -> String { s.replace("{CONTEXT}", context).replace("{CODE}", code) } -pub fn format_context_code(context: &str, code: &str) -> String { +pub(crate) fn format_context_code(context: &str, code: &str) -> String { format!("{context}\n\n{code}") } -pub fn chunk_to_id(uri: &str, chunk: &Chunk) -> String { +pub(crate) fn chunk_to_id(uri: &str, chunk: &Chunk) -> String { format!("{uri}#{}-{}", chunk.range.start_byte, chunk.range.end_byte) } -pub fn parse_tree(uri: &str, contents: &str, old_tree: Option<&Tree>) -> anyhow::Result { +pub(crate) fn parse_tree( + uri: &str, + contents: &str, + old_tree: Option<&Tree>, +) -> anyhow::Result { let path = std::path::Path::new(uri); let extension = path.extension().map(|x| x.to_string_lossy()); let extension = extension.as_deref().unwrap_or(""); From f5b94f903ad09302500bb0f4a5171d4da944062b Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 12:27:10 +0200 Subject: [PATCH 08/18] fix(ci): install `openssl-dev` in musl build --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 62ab1c0..f6fe83a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -99,7 +99,7 @@ jobs: steps: - name: Install dependencies - run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ + run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev - name: Checkout repository uses: actions/checkout@v4 From 201d6d3cf718c6cf52dd24df362680a5fa6f9cba Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 12:36:41 +0200 Subject: [PATCH 09/18] fix(ci): install `perl` in musl build --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f6fe83a..6c7315b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -99,7 +99,7 @@ jobs: steps: - name: Install dependencies - run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev + run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl - name: Checkout repository uses: actions/checkout@v4 From 0afe8d67d5438f1339b60cb37e517933551625a1 Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 12:39:58 +0200 Subject: [PATCH 10/18] fix(ci): install `make` in musl build --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6c7315b..1e8f2b2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -99,7 +99,7 @@ jobs: steps: - name: Install dependencies - run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl + run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl make - name: Checkout repository uses: actions/checkout@v4 From 6077aa601daed4c42009a3eabfc7102e9b1f046a Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 12:45:38 +0200 Subject: [PATCH 11/18] fix(ci): install `linux-headers` in musl build --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1e8f2b2..fea1e4b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -99,7 +99,7 @@ jobs: steps: - name: Install dependencies - run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl make + run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl make linux-headers - name: Checkout repository uses: actions/checkout@v4 From 5210076ad937ae2a1c0ed0ba9d8358419a4ebd54 Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 12:53:33 +0200 Subject: [PATCH 12/18] fix(ci): install `x86_64-linux-gnu-g++` in musl build --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index fea1e4b..0e0964e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -99,7 +99,7 @@ jobs: steps: - name: Install dependencies - run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl make linux-headers + run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl make linux-headers x86_64-linux-gnu-g++ - name: Checkout repository uses: actions/checkout@v4 From 4aa0514058ab5e15aa4c61c520cb0c878e3de802 Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 13:06:03 +0200 Subject: [PATCH 13/18] fix(ci): remove inexistent package & add in musl build --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0e0964e..82d0002 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -99,7 +99,7 @@ jobs: steps: - name: Install dependencies - run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl make linux-headers x86_64-linux-gnu-g++ + run: apk add --no-cache git clang clang-dev lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl make linux-headers - name: Checkout repository uses: actions/checkout@v4 From 5411b72f0d067ec4dba09c8d9e8ce6478fc7943f Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 13:43:49 +0200 Subject: [PATCH 14/18] fix(ci): install `gcc` in musl build --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 82d0002..fc44375 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -99,7 +99,7 @@ jobs: steps: - name: Install dependencies - run: apk add --no-cache git clang clang-dev lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl make linux-headers + run: apk add --no-cache git clang clang-dev lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl make linux-headers gcc - name: Checkout repository uses: actions/checkout@v4 From 2554a4dbd669928e9eb852a717878080b927987f Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 14:45:08 +0200 Subject: [PATCH 15/18] fix(ci): `LLM_LS_TARGET` -> `LSP_AI_TARGET` --- .github/workflows/release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index fc44375..e4e26f0 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -89,7 +89,7 @@ jobs: name: dist (x86_64-unknown-linux-musl) runs-on: ubuntu-latest env: - LLM_LS_TARGET: x86_64-unknown-linux-musl + LSP_AI_TARGET: x86_64-unknown-linux-musl # For some reason `-crt-static` is not working for clang without lld RUSTFLAGS: "-C link-arg=-fuse-ld=lld -C target-feature=-crt-static" container: @@ -99,7 +99,7 @@ jobs: steps: - name: Install dependencies - run: apk add --no-cache git clang clang-dev lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl make linux-headers gcc + run: apk add --no-cache git clang lld musl-dev nodejs npm openssl-dev pkgconfig g++ openssl-dev perl make linux-headers - name: Checkout repository uses: actions/checkout@v4 From 551e3f01141776081eeb4ce419d8e1ae7681a5ef Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 15:13:06 +0200 Subject: [PATCH 16/18] fix(ci): set `contents: write` permissions --- .github/workflows/release.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e4e26f0..49826cb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -125,6 +125,8 @@ jobs: name: publish runs-on: ubuntu-latest needs: ["dist", "dist-x86_64-unknown-linux-musl"] + permissions: + contents: write steps: - name: Checkout repository uses: actions/checkout@v4 From da60c7264aade86aebc6b306a35832c7f80fe6c0 Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 15:33:38 +0200 Subject: [PATCH 17/18] docs(ci): add README in `.github` folder --- .github/README.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 .github/README.md diff --git a/.github/README.md b/.github/README.md new file mode 100644 index 0000000..e7b8600 --- /dev/null +++ b/.github/README.md @@ -0,0 +1,8 @@ +# CI pipelines + +## Release + +To create a new release for `lsp-ai`, all you'll need to do is create a new branch with the following format: `release/{release_name}`. `release_name` is usually the version of the release package in SemVer format `x.x.x{-rcx}`. + +This has the advantage of being able to fix issues for a specific release while continuing developping on the `main` branch by cherry-picking patches. It's inspired by trunk-based development. + From 39778989c47a44f27b40a1c02153d149db37b519 Mon Sep 17 00:00:00 2001 From: Luc Georges Date: Wed, 26 Jun 2024 15:36:08 +0200 Subject: [PATCH 18/18] docs(ci): move README to `.github/workflows` --- .github/{ => workflows}/README.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/{ => workflows}/README.md (100%) diff --git a/.github/README.md b/.github/workflows/README.md similarity index 100% rename from .github/README.md rename to .github/workflows/README.md