mirror of
https://github.com/aljazceru/gitpear.git
synced 2025-12-17 06:04:25 +01:00
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
node_modules/
|
||||
coverage/
|
||||
package-lock.json
|
||||
.test_home
|
||||
|
||||
25
Readme.md
Normal file
25
Readme.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# gitpear - 🍐2🍐 transport for git
|
||||
|
||||
CLI, Daemon and [Remote helper](https://www.git-scm.com/docs/gitremote-helpers) for git. It is based on [holepunch](https://docs.holepunch.to/) for networking and data sharing.
|
||||
|
||||
##
|
||||
|
||||
gitpear creates local [bare repository](https://git-scm.com/docs/git-init#Documentation/git-init.txt---bare) in application directory (default `~/.gitpear/<repository name>`), adds it as a [git remote](https://git-scm.com/docs/git-remote) in corresponding repository with name `pear`. So just like in traditional flow doing `git push origin`, here we do `git push pear`. Upon each push gitpear regenerates [pack files](https://git-scm.com/book/en/v2/Git-Internals-Packfiles) that are shared in ephemeral [hyperdrive](https://docs.holepunch.to/building-blocks/hyperdrive).
|
||||
|
||||
To enable clone or fetch or pull using `git <clone|fetch|pull> pear:<public key>/<repo name>`. It implements [git remote helper](https://www.git-scm.com/docs/gitremote-helpers) that uses [hyperswarm](https://docs.holepunch.to/building-blocks/hyperswarm) for networking in order to directly connect to peer. After connection is initialized it sends RPC request to retrieve list of repositories, clone corresponding pack files and unpack them locally.
|
||||
|
||||
##
|
||||
|
||||
All data will be persisted in application directory (default `~/.gitpear`). To change it. Provide environment variable `GIT_PEAR`
|
||||
|
||||
* `gitpear daemon <-s, --start | -k, --stop>` - start or stop daemon
|
||||
|
||||
* `gitpear key` - print out public key. Share it with your peers so that they can do `git pull pear:<public key>/<repo name>`
|
||||
|
||||
* `gitpear init [-s, --share] <path>` - It will create [bare repository](https://git-scm.com/docs/git-init#Documentation/git-init.txt---bare) of the same name in application directory (default ~/.gitpear/<repository name>). It will add [git remote](https://git-scm.com/docs/git-remote) in current repository with name `pear`. So just like in traditional flow doing `git push orign`, here we do `git push pear`. By default repository will not be shared. To enable sharing provide `-s` or call `gitpear share <path>` later
|
||||
|
||||
* `gitpear share <path>` - makes repository sharable
|
||||
|
||||
* `gitpear unshare <path>` - stop sharing repository
|
||||
|
||||
* `gitpear list [-s, --shared]` - list all or (only shared) repositories
|
||||
5
bin/preinstall.sh
Executable file
5
bin/preinstall.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/sh
|
||||
|
||||
if [ ! -d $HOME/.gitpear ]; then
|
||||
mkdir -p $HOME/.gitpear;
|
||||
fi
|
||||
51
package.json
Normal file
51
package.json
Normal file
@@ -0,0 +1,51 @@
|
||||
{
|
||||
"name": "gitpear",
|
||||
"version": "1.0.0",
|
||||
"description": "p2p transport helpers, daemon and cli for git based on holepunch/hypercore stack",
|
||||
"scripts": {
|
||||
"test": "GIT_PEAR=./.test_home brittle ./test/**/*.test.js --coverage --bail",
|
||||
"types": "tsc src/*.js --declaration --allowJs --emitDeclarationOnly --outDir types --target es2015",
|
||||
"lint": "standard --fix",
|
||||
"preinstall": "./bin/preinstall.sh",
|
||||
"set-test": "tar xzf test_home.tar.gz -C . "
|
||||
},
|
||||
"bin": {
|
||||
"gitpeard": "./src/gitpeard.js",
|
||||
"git-remote-pear": "./src/git-remote-pear.js",
|
||||
"gitpear": "./src/cli.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/dzdidi/gitpear.git"
|
||||
},
|
||||
"keywords": [
|
||||
"p2p",
|
||||
"pear2pear",
|
||||
"peer2peer",
|
||||
"git",
|
||||
"transport",
|
||||
"holepunch",
|
||||
"hypercore"
|
||||
],
|
||||
"author": "dzdidi",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/dzdidi/gitpear/issues"
|
||||
},
|
||||
"homepage": "https://github.com/dzdidi/gitpear#readme",
|
||||
"devDependencies": {
|
||||
"@hyperswarm/testnet": "^3.1.4",
|
||||
"brittle": "^3.3.2",
|
||||
"standard": "^17.1.0",
|
||||
"typescript": "^5.1.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"chokidar": "^3.5.3",
|
||||
"commander": "^11.0.0",
|
||||
"corestore": "^6.10.1",
|
||||
"hyperdrive": "^11.5.3",
|
||||
"hyperswarm": "^4.5.1",
|
||||
"protomux-rpc": "^1.4.1",
|
||||
"random-access-memory": "^6.2.0"
|
||||
}
|
||||
}
|
||||
118
src/appHome.js
Normal file
118
src/appHome.js
Normal file
@@ -0,0 +1,118 @@
|
||||
const homedir = require('os').homedir()
|
||||
const crypto = require('hypercore-crypto')
|
||||
const chokidar = require('chokidar')
|
||||
|
||||
const fs = require('fs')
|
||||
|
||||
const APP_HOME = process.env.GIT_PEAR || `${homedir}/.gitpear`
|
||||
|
||||
function createAppFolder (name) {
|
||||
fs.mkdirSync(`${APP_HOME}/${name}/code`, { recursive: true })
|
||||
}
|
||||
|
||||
function shareAppFolder (name) {
|
||||
fs.openSync(`${APP_HOME}/${name}/.git-daemon-export-ok`, 'w')
|
||||
}
|
||||
|
||||
function unshareAppFolder (name) {
|
||||
fs.unlinkSync(`${APP_HOME}/${name}/.git-daemon-export-ok`)
|
||||
}
|
||||
|
||||
function isInitialized (name) {
|
||||
return fs.existsSync(`${APP_HOME}/${name}/code/HEAD`)
|
||||
}
|
||||
|
||||
function isShared (name) {
|
||||
return fs.existsSync(`${APP_HOME}/${name}/.git-daemon-export-ok`)
|
||||
}
|
||||
|
||||
function list (sharedOnly) {
|
||||
const repos = fs.readdirSync(APP_HOME)
|
||||
if (!sharedOnly) return repos.filter(r => !r.startsWith('.'))
|
||||
|
||||
return repos.filter(repo => isShared(repo))
|
||||
}
|
||||
|
||||
function getCodePath (name) {
|
||||
return `${APP_HOME}/${name}/code`
|
||||
}
|
||||
|
||||
function readPk () {
|
||||
try {
|
||||
const seed = fs.readFileSync(`${APP_HOME}/.seed`)
|
||||
const keyPair = crypto.keyPair(seed)
|
||||
return keyPair.publicKey.toString('hex')
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') throw e
|
||||
|
||||
console.error('Seed will be generated after first start of daemon')
|
||||
}
|
||||
}
|
||||
|
||||
function getKeyPair () {
|
||||
let seed
|
||||
try {
|
||||
seed = fs.readFileSync(`${APP_HOME}/.seed`)
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') throw e
|
||||
|
||||
seed = crypto.randomBytes(32)
|
||||
fs.writeFileSync(`${APP_HOME}/.seed`, seed)
|
||||
}
|
||||
return crypto.keyPair(seed)
|
||||
}
|
||||
|
||||
function watch (cb) {
|
||||
chokidar.watch(APP_HOME).on('all', (event, path) => {
|
||||
if (!['add', 'change', 'unlink'].includes(event)) return
|
||||
|
||||
return cb(event, path)
|
||||
})
|
||||
}
|
||||
|
||||
function getOutStream () {
|
||||
return fs.openSync(`${APP_HOME}/out.log`, 'a')
|
||||
}
|
||||
|
||||
function getErrStream () {
|
||||
return fs.openSync(`${APP_HOME}/err.log`, 'a')
|
||||
}
|
||||
|
||||
function storeDaemonPid (pid) {
|
||||
fs.writeFileSync(`${APP_HOME}/.daemon.pid`, Buffer.from(pid.toString()))
|
||||
}
|
||||
|
||||
function getDaemonPid () {
|
||||
try {
|
||||
return parseInt(fs.readFileSync(`${APP_HOME}/.daemon.pid`).toString())
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') throw e
|
||||
}
|
||||
}
|
||||
|
||||
function removeDaemonPid () {
|
||||
try {
|
||||
fs.unlinkSync(`${APP_HOME}/.daemon.pid`)
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') throw e
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createAppFolder,
|
||||
shareAppFolder,
|
||||
unshareAppFolder,
|
||||
isInitialized,
|
||||
isShared,
|
||||
list,
|
||||
readPk,
|
||||
getKeyPair,
|
||||
watch,
|
||||
getCodePath,
|
||||
APP_HOME,
|
||||
getOutStream,
|
||||
getErrStream,
|
||||
storeDaemonPid,
|
||||
getDaemonPid,
|
||||
removeDaemonPid
|
||||
}
|
||||
140
src/cli.js
Executable file
140
src/cli.js
Executable file
@@ -0,0 +1,140 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const { spawn } = require('child_process')
|
||||
|
||||
const commander = require('commander')
|
||||
const program = new commander.Command()
|
||||
|
||||
const path = require('path')
|
||||
const fs = require('fs')
|
||||
|
||||
const appHome = require('./appHome')
|
||||
const git = require('./git')
|
||||
|
||||
const pkg = require('../package.json')
|
||||
program
|
||||
.name('gitpear-cli')
|
||||
.description('CLI to gitpear')
|
||||
.version(pkg.version)
|
||||
|
||||
program
|
||||
.command('init')
|
||||
.description('initialize a gitpear repo')
|
||||
.addArgument(new commander.Argument('[p]', 'path to the repo').default('.'))
|
||||
.option('-s, --share', 'share the repo, default false')
|
||||
.action(async (p, options) => {
|
||||
const fullPath = path.resolve(p)
|
||||
if (!fs.existsSync(path.join(fullPath, '.git'))) {
|
||||
console.error('Not a git repo')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const name = fullPath.split(path.sep).pop()
|
||||
if ((appHome.isInitialized(name))) {
|
||||
console.error(`${name} is already initialized`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
appHome.createAppFolder(name)
|
||||
console.log(`Added project "${name}" to gitpear`)
|
||||
await git.createBareRepo(name)
|
||||
console.log(`Created bare repo for "${name}"`)
|
||||
await git.addRemote(name)
|
||||
console.log(`Added git remote for "${name}" as "pear"`)
|
||||
|
||||
if (options.share) {
|
||||
appHome.shareAppFolder(name)
|
||||
console.log(`Shared "${name}" project`)
|
||||
// push?
|
||||
}
|
||||
})
|
||||
|
||||
program
|
||||
.command('share')
|
||||
.description('share a gitpear repo')
|
||||
.addArgument(new commander.Argument('[p]', 'path to the repo').default('.'))
|
||||
.action(async (p, options) => {
|
||||
const name = path.resolve(p).split(path.sep).pop()
|
||||
if ((appHome.isInitialized(name))) {
|
||||
appHome.shareAppFolder(name)
|
||||
console.log(`Shared "${name}" project`)
|
||||
return
|
||||
}
|
||||
|
||||
console.error(`${name} is not initialized`)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
program
|
||||
.command('unshare')
|
||||
.description('unshare a gitpear repo')
|
||||
.addArgument(new commander.Argument('[p]', 'path to the repo').default('.'))
|
||||
.action((p, options) => {
|
||||
const name = path.resolve(p).split(path.sep).pop()
|
||||
if ((appHome.isInitialized(name))) {
|
||||
appHome.unshareAppFolder(name)
|
||||
console.log(`Unshared "${name}" project`)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
console.error(`${name} is not initialized`)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
program
|
||||
.command('list')
|
||||
.description('list all gitpear repos')
|
||||
.option('-s, --shared', 'list only shared repos')
|
||||
.action((p, options) => {
|
||||
appHome.list(options.opts().shared).forEach(name => console.log(name))
|
||||
})
|
||||
|
||||
program
|
||||
.command('key')
|
||||
.description('get a public key of gitpear')
|
||||
.action((p, options) => {
|
||||
console.log('Public key:', appHome.readPk())
|
||||
})
|
||||
|
||||
program
|
||||
.command('daemon')
|
||||
.description('start/stop gitpear daemon')
|
||||
.option('-s, --start', 'start daemon')
|
||||
.option('-k, --stop', 'stop daemon')
|
||||
.action((p, options) => {
|
||||
if (options.opts().start) {
|
||||
if (appHome.getDaemonPid()) {
|
||||
console.error('Daemon already running with PID:', appHome.getDaemonPid())
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const daemon = spawn('gitpeard', {
|
||||
detached: true,
|
||||
stdio: [
|
||||
'ignore',
|
||||
appHome.getOutStream(),
|
||||
appHome.getErrStream()
|
||||
]
|
||||
})
|
||||
console.log('Daemon started. Process ID:', daemon.pid)
|
||||
appHome.storeDaemonPid(daemon.pid)
|
||||
daemon.unref()
|
||||
} else if (options.opts().stop) {
|
||||
if (!appHome.getDaemonPid()) {
|
||||
console.error('Daemon not running')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const pid = appHome.getDaemonPid()
|
||||
process.kill(pid)
|
||||
|
||||
appHome.removeDaemonPid()
|
||||
console.log('Daemon stopped. Process ID:', pid)
|
||||
} else {
|
||||
console.error('No option provided')
|
||||
process.exit(1)
|
||||
}
|
||||
})
|
||||
|
||||
program.parse()
|
||||
112
src/git-remote-pear.js
Executable file
112
src/git-remote-pear.js
Executable file
@@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const ProtomuxRPC = require('protomux-rpc')
|
||||
|
||||
const RAM = require('random-access-memory')
|
||||
const Corestore = require('corestore')
|
||||
const Hyperswarm = require('hyperswarm')
|
||||
const Hyperdrive = require('hyperdrive')
|
||||
const crypto = require('hypercore-crypto')
|
||||
|
||||
const git = require('./git.js')
|
||||
|
||||
const url = process.argv[3]
|
||||
const matches = url.match(/pear:\/\/([a-f0-9]{64})\/(.*)/)
|
||||
|
||||
if (!matches || matches.length < 3) {
|
||||
console.error('Invalid URL')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const targetKey = matches[1]
|
||||
const repoName = matches[2]
|
||||
|
||||
const store = new Corestore(RAM)
|
||||
const swarm = new Hyperswarm()
|
||||
|
||||
swarm.join(crypto.discoveryKey(Buffer.from(targetKey, 'hex')), { server: false })
|
||||
|
||||
swarm.on('connection', async (socket) => {
|
||||
store.replicate(socket)
|
||||
const rpc = new ProtomuxRPC(socket)
|
||||
|
||||
const reposRes = await rpc.request('get-repos')
|
||||
const repositories = JSON.parse(reposRes.toString())
|
||||
if (!repositories) process.exit(1)
|
||||
|
||||
const driveKey = Buffer.from(repositories[repoName], 'hex')
|
||||
if (!driveKey) {
|
||||
console.error('Failed to retrieve pack key')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const packStore = store.namespace(repoName)
|
||||
const drive = new Hyperdrive(packStore, driveKey)
|
||||
await drive.ready()
|
||||
swarm.join(drive.discoveryKey, { server: false, client: true })
|
||||
await swarm.flush()
|
||||
|
||||
await drive.core.update({ wait: true })
|
||||
|
||||
const refsRes = await rpc.request('get-refs', Buffer.from(repoName))
|
||||
|
||||
await talkToGit(JSON.parse(refsRes.toString()), drive)
|
||||
})
|
||||
|
||||
async function talkToGit (refs, drive) {
|
||||
for (const ref in refs) {
|
||||
console.warn(refs[ref] + '\t' + ref)
|
||||
}
|
||||
process.stdin.setEncoding('utf8')
|
||||
const didFetch = false
|
||||
process.stdin.on('readable', async function () {
|
||||
const chunk = process.stdin.read()
|
||||
if (chunk === 'capabilities\n') {
|
||||
process.stdout.write('fetch\n\n')
|
||||
} else if (chunk === 'list\n') {
|
||||
Object.keys(refs).forEach(function (branch, i) {
|
||||
process.stdout.write(refs[branch] + ' ' + branch + '\n')
|
||||
})
|
||||
process.stdout.write('\n')
|
||||
} else if (chunk && chunk.search(/^fetch/) !== -1) {
|
||||
const lines = chunk.split(/\n/).filter(l => l !== '')
|
||||
|
||||
const targets = []
|
||||
await lines.forEach(async function (line) {
|
||||
if (line === '') return
|
||||
|
||||
line = line.split(/\s/)
|
||||
|
||||
if (targets.includes(line[1])) return
|
||||
|
||||
targets.push(line[1])
|
||||
})
|
||||
|
||||
for (let i = 0; i < targets.length; i++) {
|
||||
const sha = targets[i]
|
||||
|
||||
const exist = await drive.exists(`/packs/${sha}.pack`)
|
||||
if (!exist) process.exit(1)
|
||||
|
||||
const driveStream = drive.createReadStream(`/packs/${sha}.pack`, { start: 0 })
|
||||
await git.unpackStream(driveStream)
|
||||
}
|
||||
|
||||
process.stdout.write('\n\n')
|
||||
process.exit(0)
|
||||
} else if (chunk && chunk !== '' && chunk !== '\n') {
|
||||
console.warn('unhandled command: "' + chunk + '"')
|
||||
}
|
||||
|
||||
if (chunk === '\n') {
|
||||
process.stdout.write('\n')
|
||||
if (!didFetch) {
|
||||
// If git already has all the refs it needs, we should exit now.
|
||||
process.exit()
|
||||
}
|
||||
}
|
||||
})
|
||||
process.stdout.on('error', function () {
|
||||
// stdout was closed
|
||||
})
|
||||
}
|
||||
181
src/git.js
Normal file
181
src/git.js
Normal file
@@ -0,0 +1,181 @@
|
||||
const { getCodePath } = require('./appHome')
|
||||
const { spawn } = require('child_process')
|
||||
|
||||
async function lsPromise (url) {
|
||||
const ls = spawn('git', ['ls-remote', url])
|
||||
const res = {}
|
||||
|
||||
ls.stdout.on('data', lines => lines.toString().split('\n').forEach((line) => {
|
||||
if (!line) return
|
||||
|
||||
const [sha, branch] = line.split('\t')
|
||||
res[branch] = sha
|
||||
}))
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
ls.on('close', (code) => {
|
||||
if (!code) return resolve(res)
|
||||
|
||||
reject(new Error(`git ls-remote exited with code ${code}`))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function createBareRepo (name) {
|
||||
const init = spawn('git', ['init', '--bare'], { env: { GIT_DIR: getCodePath(name) } })
|
||||
init.stderr.pipe(process.stderr)
|
||||
return new Promise((resolve, reject) => {
|
||||
init.on('close', (code) => {
|
||||
if (code) return reject(new Error(`git init exited with code ${code}`))
|
||||
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function addRemote (name) {
|
||||
const init = spawn('git', ['remote', 'add', 'pear', getCodePath(name)])
|
||||
init.stderr.pipe(process.stderr)
|
||||
return new Promise((resolve, reject) => {
|
||||
init.on('close', (code) => {
|
||||
if (code) {
|
||||
return reject(new Error(`git remote add exited with code ${code}`))
|
||||
}
|
||||
|
||||
return resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function pad4 (num) {
|
||||
num = num.toString(16)
|
||||
while (num.length < 4) {
|
||||
num = '0' + num
|
||||
}
|
||||
return num
|
||||
}
|
||||
|
||||
function uploadPack (dir, want, have) {
|
||||
// reference:
|
||||
// https://github.com/git/git/blob/b594c975c7e865be23477989d7f36157ad437dc7/Documentation/technical/pack-protocol.txt#L346-L393
|
||||
const upload = spawn('git-upload-pack', [dir])
|
||||
writeln('want ' + want)
|
||||
writeln()
|
||||
if (have) {
|
||||
writeln('have ' + have)
|
||||
writeln()
|
||||
}
|
||||
writeln('done')
|
||||
|
||||
// We want to read git's output one line at a time, and not read any more
|
||||
// than we have to. That way, when we finish discussing wants and haves, we
|
||||
// can pipe the rest of the output to a stream.
|
||||
//
|
||||
// We use `mode` to keep track of state and formulate responses. It returns
|
||||
// `false` when we should stop reading.
|
||||
let mode = list
|
||||
upload.stdout.on('readable', function () {
|
||||
while (true) {
|
||||
const line = getline()
|
||||
if (line === null) {
|
||||
return // to wait for more output
|
||||
}
|
||||
if (!mode(line)) {
|
||||
upload.stdout.removeAllListeners('readable')
|
||||
upload.emit('ready')
|
||||
return
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
let getLineLen = null
|
||||
// Extracts exactly one line from the stream. Uses `getLineLen` in case the
|
||||
// whole line could not be read.
|
||||
function getline () {
|
||||
// Format: '####line' where '####' represents the length of 'line' in hex.
|
||||
if (!getLineLen) {
|
||||
getLineLen = upload.stdout.read(4)
|
||||
if (getLineLen === null) {
|
||||
return null
|
||||
}
|
||||
getLineLen = parseInt(getLineLen, 16)
|
||||
}
|
||||
|
||||
if (getLineLen === 0) {
|
||||
return ''
|
||||
}
|
||||
|
||||
// Subtract by the four we just read, and the terminating newline.
|
||||
const line = upload.stdout.read(getLineLen - 4 - 1)
|
||||
if (!line) {
|
||||
return null
|
||||
}
|
||||
getLineLen = null
|
||||
upload.stdout.read(1) // And discard the newline.
|
||||
return line.toString()
|
||||
}
|
||||
|
||||
// First, the server lists the refs it has, but we already know from
|
||||
// `git ls-remote`, so wait for it to signal the end.
|
||||
function list (line) {
|
||||
if (line === '') {
|
||||
mode = have ? ackObjectsContinue : waitForNak
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// If we only gave wants, git should respond with 'NAK', then the pack file.
|
||||
function waitForNak (line) {
|
||||
return line !== 'NAK'
|
||||
}
|
||||
|
||||
// With haves, we wait for 'ACK', but only if not ending in 'continue'.
|
||||
function ackObjectsContinue (line) {
|
||||
return !(line.search(/^ACK/) !== -1 && line.search(/continue$/) === -1)
|
||||
}
|
||||
|
||||
// Writes one line to stdin so git-upload-pack can understand.
|
||||
function writeln (line) {
|
||||
if (line) {
|
||||
const len = pad4(line.length + 4 + 1) // Add one for the newline.
|
||||
upload.stdin.write(len + line + '\n')
|
||||
} else {
|
||||
upload.stdin.write('0000')
|
||||
}
|
||||
}
|
||||
|
||||
return upload
|
||||
}
|
||||
|
||||
async function unpackFile (file, path) {
|
||||
const unpack = spawn('git', ['index-pack', '-v', file, '-o', path])
|
||||
unpack.stderr.pipe(process.stderr)
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
unpack.on('exit', (code) => {
|
||||
// These writes are actually necessary for git to finish checkout.
|
||||
process.stdout.write('\n\n')
|
||||
if (code) return reject(code)
|
||||
|
||||
return resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function unpackStream (packStream) {
|
||||
const unpack = spawn('git', ['index-pack', '--stdin', '-v', '--fix-thin'])
|
||||
unpack.stderr.pipe(process.stderr)
|
||||
|
||||
packStream.pipe(unpack.stdin)
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
unpack.on('exit', (code) => {
|
||||
// These writes are actually necessary for git to finish checkout.
|
||||
if (code) return reject(code)
|
||||
|
||||
return resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { lsPromise, uploadPack, unpackFile, unpackStream, createBareRepo, addRemote }
|
||||
57
src/gitpeard.js
Executable file
57
src/gitpeard.js
Executable file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env node
|
||||
const RAM = require('random-access-memory')
|
||||
const Hyperswarm = require('hyperswarm')
|
||||
const crypto = require('hypercore-crypto')
|
||||
|
||||
const RPC = require('./rpc.js')
|
||||
const setState = require('./state.js')
|
||||
const appHome = require('./appHome.js')
|
||||
|
||||
const Corestore = require('corestore')
|
||||
|
||||
;(async () => {
|
||||
const keyPair = appHome.getKeyPair()
|
||||
const swarm = new Hyperswarm({ keyPair })
|
||||
|
||||
const store = new Corestore(RAM)
|
||||
|
||||
swarm.join(crypto.discoveryKey(keyPair.publicKey))
|
||||
await swarm.flush()
|
||||
|
||||
console.log('Public key:', appHome.readPk())
|
||||
|
||||
let state = await setState(store)
|
||||
let { announcedRefs, repositories, drives } = state
|
||||
let oldAnnouncedRefs = Object.keys({ ...announcedRefs }).sort().join(',')
|
||||
|
||||
logRepos(repositories)
|
||||
|
||||
let rpc = new RPC(announcedRefs, repositories, drives)
|
||||
|
||||
appHome.watch(async (event, path) => {
|
||||
state = await setState(store, drives)
|
||||
announcedRefs = state.announcedRefs
|
||||
repositories = state.repositories
|
||||
drives = state.drives
|
||||
|
||||
const newAnnouncedRefs = Object.keys({ ...announcedRefs }).sort().join(',')
|
||||
if (oldAnnouncedRefs === newAnnouncedRefs) return
|
||||
oldAnnouncedRefs = newAnnouncedRefs
|
||||
|
||||
logRepos(repositories)
|
||||
|
||||
rpc = new RPC(announcedRefs, repositories, drives)
|
||||
})
|
||||
|
||||
swarm.on('connection', (socket, peerInfo) => {
|
||||
socket.on('error', console.error)
|
||||
store.replicate(socket)
|
||||
rpc.setHandlers(socket, peerInfo)
|
||||
})
|
||||
})()
|
||||
|
||||
function logRepos (repositories) {
|
||||
for (const repo in repositories) {
|
||||
for (const ref in repositories[repo]) console.log(repositories[repo][ref], '\t', ref, '\t', repo)
|
||||
}
|
||||
}
|
||||
38
src/rpc.js
Normal file
38
src/rpc.js
Normal file
@@ -0,0 +1,38 @@
|
||||
const ProtomuxRPC = require('protomux-rpc')
|
||||
|
||||
module.exports = class RPC {
|
||||
constructor (announcedRefs, repositories, drives) {
|
||||
this.connections = {}
|
||||
this.announcedRefs = announcedRefs
|
||||
this.repositories = repositories
|
||||
this.drives = drives
|
||||
}
|
||||
|
||||
async setHandlers (socket, peerInfo) {
|
||||
if (this.connections[peerInfo.publicKey]) return this.connections[peerInfo.publicKey]
|
||||
|
||||
const rpc = new ProtomuxRPC(socket)
|
||||
// XXX: handshaking can be used for access and permission management
|
||||
// for example check of peerInfo.publicKey is in a list of allowed keys
|
||||
// which can in turn be stored in a .git-daemon-export-ok file
|
||||
|
||||
rpc.respond('get-repos', req => this.getReposHandler(req))
|
||||
rpc.respond('get-refs', async req => await this.getRefsHandler(req))
|
||||
|
||||
this.connections[peerInfo.publicKey] = rpc
|
||||
}
|
||||
|
||||
getReposHandler (_req) {
|
||||
const res = {}
|
||||
for (const repo in this.repositories) {
|
||||
res[repo] = this.drives[repo].key.toString('hex')
|
||||
}
|
||||
return Buffer.from(JSON.stringify(res))
|
||||
}
|
||||
|
||||
getRefsHandler (req) {
|
||||
const res = this.repositories[req.toString()]
|
||||
|
||||
return Buffer.from(JSON.stringify(res))
|
||||
}
|
||||
}
|
||||
32
src/state.js
Normal file
32
src/state.js
Normal file
@@ -0,0 +1,32 @@
|
||||
const Hyperdrive = require('hyperdrive')
|
||||
|
||||
const git = require('./git.js')
|
||||
const appHome = require('./appHome.js')
|
||||
|
||||
module.exports = async function setState (store, drives = {}) {
|
||||
const repos = appHome.list(true)
|
||||
|
||||
const announcedRefs = {}
|
||||
const repositories = {}
|
||||
|
||||
for (const repo of repos) {
|
||||
if (!drives[repo]) {
|
||||
drives[repo] = new Hyperdrive(store.namespace(repo))
|
||||
await drives[repo].ready()
|
||||
}
|
||||
|
||||
const ls = await git.lsPromise(appHome.getCodePath(repo))
|
||||
|
||||
repositories[repo] = {}
|
||||
for (const ref in ls) {
|
||||
repositories[repo][ref] = ls[ref]
|
||||
announcedRefs[ls[ref]] = repo
|
||||
|
||||
const localPackStream = git.uploadPack(appHome.getCodePath(repo), ls[ref])
|
||||
const driveStream = drives[repo].createWriteStream(`/packs/${ls[ref]}.pack`)
|
||||
localPackStream.on('ready', () => localPackStream.stdout.pipe(driveStream))
|
||||
}
|
||||
}
|
||||
|
||||
return { announcedRefs, repositories, drives }
|
||||
}
|
||||
68
test/appHome.test.js
Normal file
68
test/appHome.test.js
Normal file
@@ -0,0 +1,68 @@
|
||||
const { test } = require('brittle')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
|
||||
const appHome = require('../src/appHome')
|
||||
|
||||
test('getAppHome', t => {
|
||||
t.ok(appHome.APP_HOME)
|
||||
})
|
||||
|
||||
test('createAppFolder, share, is shared, unshare, isInitialized, list, getCodePath', t => {
|
||||
appHome.createAppFolder('appHome-test')
|
||||
|
||||
t.ok(fs.existsSync(path.join(appHome.APP_HOME, 'appHome-test', 'code')))
|
||||
|
||||
t.absent(appHome.isShared('appHome-test'))
|
||||
t.absent(fs.existsSync(path.join(appHome.APP_HOME, 'appHome-test', '.git-daemon-export-ok')))
|
||||
|
||||
appHome.shareAppFolder('appHome-test')
|
||||
|
||||
t.ok(appHome.isShared('appHome-test'))
|
||||
t.ok(fs.existsSync(path.join(appHome.APP_HOME, 'appHome-test', '.git-daemon-export-ok')))
|
||||
|
||||
appHome.unshareAppFolder('appHome-test')
|
||||
|
||||
t.absent(appHome.isShared('appHome-test'))
|
||||
t.absent(fs.existsSync(path.join(appHome.APP_HOME, 'appHome-test', '.git-daemon-export-ok')))
|
||||
|
||||
t.absent(appHome.isInitialized('appHome-test'))
|
||||
t.ok(appHome.isInitialized('foo'))
|
||||
|
||||
t.alike(new Set(appHome.list()), new Set(['foo', 'bar', 'zar', 'appHome-test']))
|
||||
t.alike(new Set(appHome.list(true)), new Set(['foo', 'bar', 'zar']))
|
||||
|
||||
t.alike(path.resolve(appHome.getCodePath('appHome-test')), path.resolve(path.join(appHome.APP_HOME, 'appHome-test', 'code')))
|
||||
|
||||
t.teardown(() => {
|
||||
fs.rmdirSync(path.join(appHome.APP_HOME, 'appHome-test', 'code'), { recursive: true })
|
||||
})
|
||||
})
|
||||
|
||||
test('readPk, getKeyPair', t => {
|
||||
t.ok(appHome.readPk())
|
||||
t.ok(appHome.getKeyPair())
|
||||
})
|
||||
|
||||
test('getOutStream, getErrStream', t => {
|
||||
t.absent(fs.existsSync(path.join(appHome.APP_HOME, 'out.log')))
|
||||
t.ok(appHome.getOutStream())
|
||||
t.ok(fs.existsSync(path.join(appHome.APP_HOME, 'out.log')))
|
||||
|
||||
t.absent(fs.existsSync(path.join(appHome.APP_HOME, 'err.log')))
|
||||
t.ok(appHome.getErrStream())
|
||||
t.ok(fs.existsSync(path.join(appHome.APP_HOME, 'err.log')))
|
||||
|
||||
t.teardown(() => {
|
||||
fs.unlinkSync(path.join(appHome.APP_HOME, 'out.log'))
|
||||
fs.unlinkSync(path.join(appHome.APP_HOME, 'err.log'))
|
||||
})
|
||||
})
|
||||
|
||||
test('getDaemonPid, removeDaemonPid', t => {
|
||||
t.absent(appHome.getDaemonPid())
|
||||
appHome.storeDaemonPid(123)
|
||||
t.alike(appHome.getDaemonPid(), 123)
|
||||
appHome.removeDaemonPid()
|
||||
t.absent(appHome.getDaemonPid())
|
||||
})
|
||||
65
test/git.test.js
Normal file
65
test/git.test.js
Normal file
@@ -0,0 +1,65 @@
|
||||
const test = require('brittle')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
|
||||
const appHome = require('../src/appHome.js')
|
||||
|
||||
const git = require('../src/git.js')
|
||||
|
||||
test('git - lsPromise', async t => {
|
||||
const res = await git.lsPromise('./')
|
||||
|
||||
t.ok(res)
|
||||
t.ok(res.HEAD)
|
||||
t.is(Buffer.from(res.HEAD, 'hex').length, 20)
|
||||
for (const key in res) {
|
||||
if (key === 'HEAD') continue
|
||||
|
||||
t.ok(key.startsWith('refs/'))
|
||||
t.is(Buffer.from(res[key], 'hex').length, 20)
|
||||
}
|
||||
})
|
||||
|
||||
test('git - uploadPack (wo have)', async t => {
|
||||
t.plan(3)
|
||||
const { HEAD } = await git.lsPromise('./')
|
||||
t.ok(HEAD)
|
||||
|
||||
const res = git.uploadPack('./', HEAD)
|
||||
res.on('exit', (code) => t.ok(code === 0))
|
||||
res.on('ready', () => {
|
||||
const stream = fs.createWriteStream('/dev/null')
|
||||
res.stdout.pipe(stream)
|
||||
stream.on('close', () => t.pass())
|
||||
})
|
||||
})
|
||||
|
||||
test('git - uploadPack (w have)', { skip: true }, async t => {
|
||||
t.plan(3)
|
||||
const SECOND_COMMIT = ''
|
||||
const { HEAD } = await git.lsPromise('./')
|
||||
t.ok(HEAD)
|
||||
|
||||
const res = git.uploadPack('./', HEAD, SECOND_COMMIT)
|
||||
|
||||
res.on('exit', (code) => t.ok(code === 0))
|
||||
res.on('ready', () => {
|
||||
const stream = fs.createWriteStream('/dev/null')
|
||||
res.stdout.pipe(stream)
|
||||
stream.on('close', () => t.pass())
|
||||
})
|
||||
})
|
||||
|
||||
test('git - createBareRepo', async t => {
|
||||
t.absent(fs.existsSync(path.join(appHome.APP_HOME, 'test-git', 'code')))
|
||||
appHome.createAppFolder('test-git')
|
||||
|
||||
t.absent(fs.existsSync(path.join(appHome.APP_HOME, 'test-git', 'code', 'HEAD')))
|
||||
await git.createBareRepo('test-git')
|
||||
|
||||
t.ok(fs.existsSync(path.join(appHome.APP_HOME, 'test-git', 'code', 'HEAD')))
|
||||
|
||||
t.teardown(() => {
|
||||
fs.rmdirSync(path.join(appHome.APP_HOME, 'test-git'), { recursive: true })
|
||||
})
|
||||
})
|
||||
80
test/rpc.test.js
Normal file
80
test/rpc.test.js
Normal file
@@ -0,0 +1,80 @@
|
||||
const test = require('brittle')
|
||||
const RAM = require('random-access-memory')
|
||||
const createTestnet = require('@hyperswarm/testnet')
|
||||
const Corestore = require('corestore')
|
||||
const Hyperswarm = require('hyperswarm')
|
||||
const Hyperdrive = require('hyperdrive')
|
||||
const ProtomuxRPC = require('protomux-rpc')
|
||||
|
||||
const RPC = require('../src/rpc.js')
|
||||
const setState = require('../src/state.js')
|
||||
|
||||
test('constructor', async t => {
|
||||
const rpc = new RPC('announcedRefs', 'repositories', 'drives')
|
||||
t.ok(rpc)
|
||||
|
||||
t.is(rpc.announcedRefs, 'announcedRefs')
|
||||
t.is(rpc.repositories, 'repositories')
|
||||
t.is(rpc.drives, 'drives')
|
||||
t.alike(rpc.connections, {})
|
||||
})
|
||||
|
||||
test('e2e', async t => {
|
||||
t.plan(3)
|
||||
const testnet = await createTestnet(3, t)
|
||||
|
||||
const { rpc, store } = await getRPC()
|
||||
const clientStore = new Corestore(RAM)
|
||||
const topic = Buffer.alloc(32).fill('pear 2 pear')
|
||||
|
||||
const serverSwarm = new Hyperswarm(testnet)
|
||||
serverSwarm.on('connection', (socket, details) => {
|
||||
store.replicate(socket)
|
||||
rpc.setHandlers(socket, details)
|
||||
})
|
||||
serverSwarm.join(topic)
|
||||
await serverSwarm.flush()
|
||||
|
||||
const clientSwarm = new Hyperswarm(testnet)
|
||||
clientSwarm.on('connection', async (socket) => {
|
||||
clientStore.replicate(socket)
|
||||
const rpc = new ProtomuxRPC(socket)
|
||||
|
||||
const reposRes = await rpc.request('get-repos')
|
||||
const reposJSON = JSON.parse(reposRes.toString())
|
||||
|
||||
const driveKey = Buffer.from(reposJSON.foo, 'hex')
|
||||
t.ok(driveKey)
|
||||
|
||||
const drive = new Hyperdrive(clientStore.namespace('foo'), driveKey)
|
||||
await drive.ready()
|
||||
clientSwarm.join(drive.discoveryKey, { server: false, client: true })
|
||||
await clientSwarm.flush()
|
||||
|
||||
await drive.core.update({ wait: true })
|
||||
|
||||
const refsRes = await rpc.request('get-refs', Buffer.from('foo'))
|
||||
t.ok(refsRes)
|
||||
|
||||
const want = Object.values(JSON.parse(refsRes.toString()))[0]
|
||||
|
||||
const exists = await drive.exists(`/packs/${want}.pack`)
|
||||
t.ok(exists)
|
||||
})
|
||||
|
||||
clientSwarm.join(topic, { server: false, client: true })
|
||||
|
||||
t.teardown(async () => {
|
||||
await serverSwarm.destroy()
|
||||
await clientSwarm.destroy()
|
||||
})
|
||||
})
|
||||
|
||||
async function getRPC () {
|
||||
const store = new Corestore(RAM)
|
||||
const { announcedRefs, repositories, drives } = await setState(store)
|
||||
return {
|
||||
rpc: new RPC(announcedRefs, repositories, drives),
|
||||
store
|
||||
}
|
||||
}
|
||||
23
test/state.test.js
Normal file
23
test/state.test.js
Normal file
@@ -0,0 +1,23 @@
|
||||
const test = require('brittle')
|
||||
const setState = require('../src/state.js')
|
||||
const Corestore = require('corestore')
|
||||
const RAM = require('random-access-memory')
|
||||
|
||||
const repoNames = ['foo', 'bar', 'zar']
|
||||
|
||||
test('setState', async t => {
|
||||
const res = await setState(new Corestore(RAM))
|
||||
|
||||
t.ok(res.announcedRefs)
|
||||
t.alike(new Set(Object.values(res.announcedRefs)), new Set(repoNames))
|
||||
|
||||
t.ok(res.repositories)
|
||||
t.alike(new Set(Object.keys(res.repositories)), new Set(repoNames))
|
||||
|
||||
t.ok(res.drives)
|
||||
|
||||
for (const repo in res.repositories) {
|
||||
t.ok(res.repositories[repo])
|
||||
t.ok(res.drives[repo].key)
|
||||
}
|
||||
})
|
||||
BIN
test_home.tar.gz
Normal file
BIN
test_home.tar.gz
Normal file
Binary file not shown.
Reference in New Issue
Block a user