mirror of
https://github.com/aljazceru/goose.git
synced 2026-01-31 20:24:27 +01:00
Mnovich/temporal foreground tasks (#2895)
Co-authored-by: Carlos M. Lopez <carlopez@squareup.com>
This commit is contained in:
9
.github/workflows/build-cli.yml
vendored
9
.github/workflows/build-cli.yml
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
echo "Building with explicit PROTOC path..."
|
||||
cross build --release --target ${TARGET} -p goose-cli -vv
|
||||
|
||||
- name: Build temporal-service for target platform
|
||||
- name: Build temporal-service for target platform using build.sh script
|
||||
run: |
|
||||
source ./bin/activate-hermit
|
||||
export TARGET="${{ matrix.architecture }}-${{ matrix.target-suffix }}"
|
||||
@@ -116,9 +116,12 @@ jobs:
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Building temporal-service for ${GOOS}/${GOARCH}..."
|
||||
echo "Building temporal-service for ${GOOS}/${GOARCH} using build.sh script..."
|
||||
cd temporal-service
|
||||
go build -o "../target/${TARGET}/release/${BINARY_NAME}" main.go
|
||||
# Run build.sh with cross-compilation environment
|
||||
GOOS="${GOOS}" GOARCH="${GOARCH}" ./build.sh
|
||||
# Move the built binary to the expected location
|
||||
mv "${BINARY_NAME}" "../target/${TARGET}/release/${BINARY_NAME}"
|
||||
echo "temporal-service built successfully for ${TARGET}"
|
||||
|
||||
- name: Package CLI with temporal-service
|
||||
|
||||
7
.github/workflows/bundle-desktop-intel.yml
vendored
7
.github/workflows/bundle-desktop-intel.yml
vendored
@@ -150,13 +150,12 @@ jobs:
|
||||
rustup target add x86_64-apple-darwin
|
||||
cargo build --release -p goose-server --target x86_64-apple-darwin
|
||||
|
||||
# Build temporal-service
|
||||
# Build temporal-service using build.sh script
|
||||
- name: Build temporal-service
|
||||
run: |
|
||||
echo "Building temporal-service..."
|
||||
echo "Building temporal-service using build.sh script..."
|
||||
cd temporal-service
|
||||
go build -o temporal-service main.go
|
||||
chmod +x temporal-service
|
||||
./build.sh
|
||||
echo "temporal-service built successfully"
|
||||
|
||||
# Install and prepare temporal CLI
|
||||
|
||||
40
.github/workflows/bundle-desktop-linux.yml
vendored
40
.github/workflows/bundle-desktop-linux.yml
vendored
@@ -142,7 +142,21 @@ jobs:
|
||||
restore-keys: |
|
||||
${{ runner.os }}-cargo-build-
|
||||
|
||||
# 8) Build the Rust goosed binary
|
||||
# 8) Set up Go for building temporal-service
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@0a12ed9d6a96ab950c8f026ed9f722fe0da7ef32 # pin@v5
|
||||
with:
|
||||
go-version: '1.21'
|
||||
|
||||
# 9) Build temporal-service using build.sh script
|
||||
- name: Build temporal-service
|
||||
run: |
|
||||
echo "Building temporal-service using build.sh script..."
|
||||
cd temporal-service
|
||||
./build.sh
|
||||
echo "temporal-service built successfully"
|
||||
|
||||
# 10) Build the Rust goosed binary
|
||||
- name: Build goosed binary
|
||||
run: |
|
||||
echo "Building goosed binary for Linux..."
|
||||
@@ -150,7 +164,7 @@ jobs:
|
||||
ls -la target/release/
|
||||
file target/release/goosed
|
||||
|
||||
# 9) Clean up build artifacts to save space
|
||||
# 11) Clean up build artifacts to save space
|
||||
- name: Clean up build artifacts
|
||||
run: |
|
||||
echo "Cleaning up to save disk space..."
|
||||
@@ -167,16 +181,18 @@ jobs:
|
||||
# Check disk space
|
||||
df -h
|
||||
|
||||
# 10) Copy binary to Electron folder
|
||||
- name: Copy binary into Electron folder
|
||||
# 12) Copy binaries to Electron folder
|
||||
- name: Copy binaries into Electron folder
|
||||
run: |
|
||||
echo "Copying goosed binary to ui/desktop/src/bin/"
|
||||
echo "Copying binaries to ui/desktop/src/bin/"
|
||||
mkdir -p ui/desktop/src/bin
|
||||
cp target/release/goosed ui/desktop/src/bin/
|
||||
cp temporal-service/temporal-service ui/desktop/src/bin/
|
||||
chmod +x ui/desktop/src/bin/goosed
|
||||
chmod +x ui/desktop/src/bin/temporal-service
|
||||
ls -la ui/desktop/src/bin/
|
||||
|
||||
# 10a) Final cleanup before npm build
|
||||
# 13) Final cleanup before npm build
|
||||
- name: Final cleanup before npm build
|
||||
run: |
|
||||
echo "Final cleanup before npm build..."
|
||||
@@ -188,7 +204,7 @@ jobs:
|
||||
# Check final disk space
|
||||
df -h
|
||||
|
||||
# 12) Install npm dependencies
|
||||
# 14) Install npm dependencies
|
||||
- name: Install npm dependencies
|
||||
run: |
|
||||
cd ui/desktop
|
||||
@@ -199,7 +215,7 @@ jobs:
|
||||
# Verify installation
|
||||
ls -la node_modules/.bin/ | head -5
|
||||
|
||||
# 13) Build Electron app with Linux makers (.deb and .rpm)
|
||||
# 15) Build Electron app with Linux makers (.deb and .rpm)
|
||||
- name: Build Linux packages
|
||||
run: |
|
||||
cd ui/desktop
|
||||
@@ -212,7 +228,7 @@ jobs:
|
||||
ls -la out/
|
||||
find out/ -name "*.deb" -o -name "*.rpm" | head -10
|
||||
|
||||
# 14) List all generated files for debugging
|
||||
# 16) List all generated files for debugging
|
||||
- name: List generated files
|
||||
run: |
|
||||
echo "=== All files in out/ directory ==="
|
||||
@@ -224,7 +240,7 @@ jobs:
|
||||
echo "=== File sizes ==="
|
||||
find ui/desktop/out/ -name "*.deb" -o -name "*.rpm" -exec ls -lh {} \;
|
||||
|
||||
# 15) Upload .deb package
|
||||
# 17) Upload .deb package
|
||||
- name: Upload .deb package
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -232,7 +248,7 @@ jobs:
|
||||
path: ui/desktop/out/make/deb/x64/*.deb
|
||||
if-no-files-found: error
|
||||
|
||||
# 16) Upload .rpm package
|
||||
# 18) Upload .rpm package
|
||||
- name: Upload .rpm package
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -240,7 +256,7 @@ jobs:
|
||||
path: ui/desktop/out/make/rpm/x64/*.rpm
|
||||
if-no-files-found: error
|
||||
|
||||
# 17) Create combined artifact with both packages
|
||||
# 19) Create combined artifact with both packages
|
||||
- name: Upload combined Linux packages
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
||||
11
.github/workflows/bundle-desktop-windows.yml
vendored
11
.github/workflows/bundle-desktop-windows.yml
vendored
@@ -196,15 +196,20 @@ jobs:
|
||||
ls -la ./target/x86_64-pc-windows-gnu/release/goosed.exe
|
||||
ls -la ./target/x86_64-pc-windows-gnu/release/*.dll
|
||||
|
||||
# 4.5) Build temporal-service for Windows
|
||||
# 4.5) Build temporal-service for Windows using build.sh script
|
||||
- name: Build temporal-service for Windows
|
||||
run: |
|
||||
echo "Building temporal-service for Windows..."
|
||||
echo "Building temporal-service for Windows using build.sh script..."
|
||||
docker run --rm \
|
||||
-v "$(pwd)":/usr/src/myapp \
|
||||
-w /usr/src/myapp/temporal-service \
|
||||
golang:latest \
|
||||
sh -c "GOOS=windows GOARCH=amd64 go build -o temporal-service.exe main.go"
|
||||
sh -c "
|
||||
# Make build.sh executable
|
||||
chmod +x build.sh
|
||||
# Set Windows build environment and run build script
|
||||
GOOS=windows GOARCH=amd64 ./build.sh
|
||||
"
|
||||
echo "temporal-service.exe built successfully"
|
||||
|
||||
# 4.6) Download temporal CLI for Windows
|
||||
|
||||
7
.github/workflows/bundle-desktop.yml
vendored
7
.github/workflows/bundle-desktop.yml
vendored
@@ -190,13 +190,12 @@ jobs:
|
||||
- name: Build goosed
|
||||
run: source ./bin/activate-hermit && cargo build --release -p goose-server
|
||||
|
||||
# Build temporal-service
|
||||
# Build temporal-service using build.sh script
|
||||
- name: Build temporal-service
|
||||
run: |
|
||||
echo "Building temporal-service..."
|
||||
echo "Building temporal-service using build.sh script..."
|
||||
cd temporal-service
|
||||
go build -o temporal-service main.go
|
||||
chmod +x temporal-service
|
||||
./build.sh
|
||||
echo "temporal-service built successfully"
|
||||
|
||||
# Install and prepare temporal CLI
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -50,3 +50,8 @@ benchconf.json
|
||||
scripts/fake.sh
|
||||
do_not_version/
|
||||
/ui/desktop/src/bin/temporal
|
||||
/temporal-service/temporal.db
|
||||
/ui/desktop/src/bin/temporal.db
|
||||
/temporal.db
|
||||
/ui/desktop/src/bin/goose-scheduler-executor
|
||||
/ui/desktop/src/bin/goose
|
||||
|
||||
16
Cargo.lock
generated
16
Cargo.lock
generated
@@ -3639,22 +3639,6 @@ dependencies = [
|
||||
"xcap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "goose-scheduler-executor"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.5.31",
|
||||
"futures",
|
||||
"goose",
|
||||
"mcp-core",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "goose-server"
|
||||
version = "1.0.29"
|
||||
|
||||
98
Justfile
98
Justfile
@@ -59,6 +59,13 @@ copy-binary BUILD_MODE="release":
|
||||
echo "Binary not found in target/{{BUILD_MODE}}"; \
|
||||
exit 1; \
|
||||
fi
|
||||
@if [ -f ./target/{{BUILD_MODE}}/goose ]; then \
|
||||
echo "Copying goose CLI binary from target/{{BUILD_MODE}}..."; \
|
||||
cp -p ./target/{{BUILD_MODE}}/goose ./ui/desktop/src/bin/; \
|
||||
else \
|
||||
echo "Goose CLI binary not found in target/{{BUILD_MODE}}"; \
|
||||
exit 1; \
|
||||
fi
|
||||
@if [ -f ./temporal-service/temporal-service ]; then \
|
||||
echo "Copying temporal-service binary..."; \
|
||||
cp -p ./temporal-service/temporal-service ./ui/desktop/src/bin/; \
|
||||
@@ -83,6 +90,13 @@ copy-binary-intel:
|
||||
echo "Intel release binary not found."; \
|
||||
exit 1; \
|
||||
fi
|
||||
@if [ -f ./target/x86_64-apple-darwin/release/goose ]; then \
|
||||
echo "Copying Intel goose CLI binary to ui/desktop/src/bin..."; \
|
||||
cp -p ./target/x86_64-apple-darwin/release/goose ./ui/desktop/src/bin/; \
|
||||
else \
|
||||
echo "Intel goose CLI binary not found."; \
|
||||
exit 1; \
|
||||
fi
|
||||
@if [ -f ./temporal-service/temporal-service ]; then \
|
||||
echo "Copying temporal-service binary..."; \
|
||||
cp -p ./temporal-service/temporal-service ./ui/desktop/src/bin/; \
|
||||
@@ -108,6 +122,12 @@ copy-binary-windows:
|
||||
Write-Host 'Windows binary not found.' -ForegroundColor Red; \
|
||||
exit 1; \
|
||||
}"
|
||||
@powershell.exe -Command "if (Test-Path ./target/x86_64-pc-windows-gnu/release/goose-scheduler-executor.exe) { \
|
||||
Write-Host 'Copying Windows goose-scheduler-executor binary...'; \
|
||||
Copy-Item -Path './target/x86_64-pc-windows-gnu/release/goose-scheduler-executor.exe' -Destination './ui/desktop/src/bin/' -Force; \
|
||||
} else { \
|
||||
Write-Host 'Windows goose-scheduler-executor binary not found.' -ForegroundColor Yellow; \
|
||||
}"
|
||||
@if [ -f ./temporal-service/temporal-service.exe ]; then \
|
||||
echo "Copying Windows temporal-service binary..."; \
|
||||
cp -p ./temporal-service/temporal-service.exe ./ui/desktop/src/bin/; \
|
||||
@@ -134,6 +154,12 @@ run-ui-alpha temporal="true":
|
||||
@echo "Running UI with {{ if temporal == "true" { "Temporal" } else { "Legacy" } }} scheduler..."
|
||||
cd ui/desktop && npm install && ALPHA=true GOOSE_SCHEDULER_TYPE={{ if temporal == "true" { "temporal" } else { "legacy" } }} npm run start-alpha-gui
|
||||
|
||||
# Run UI with alpha changes using legacy scheduler (no Temporal dependency)
|
||||
run-ui-alpha-legacy:
|
||||
@just release-binary
|
||||
@echo "Running UI with Legacy scheduler (no Temporal required)..."
|
||||
cd ui/desktop && npm install && ALPHA=true GOOSE_SCHEDULER_TYPE=legacy npm run start-alpha-gui
|
||||
|
||||
# Run UI with latest (Windows version)
|
||||
run-ui-windows:
|
||||
@just release-windows
|
||||
@@ -160,6 +186,11 @@ make-ui:
|
||||
@just release-binary
|
||||
cd ui/desktop && npm run bundle:default
|
||||
|
||||
# make GUI with latest binary and alpha features enabled
|
||||
make-ui-alpha:
|
||||
@just release-binary
|
||||
cd ui/desktop && npm run bundle:alpha
|
||||
|
||||
# make GUI with latest Windows binary
|
||||
make-ui-windows:
|
||||
@just release-windows
|
||||
@@ -172,25 +203,8 @@ make-ui-windows:
|
||||
echo "Copying Windows binary and DLLs..." && \
|
||||
cp -f ./target/x86_64-pc-windows-gnu/release/goosed.exe ./ui/desktop/src/bin/ && \
|
||||
cp -f ./target/x86_64-pc-windows-gnu/release/*.dll ./ui/desktop/src/bin/ && \
|
||||
if [ -d "./ui/desktop/src/platform/windows/bin" ]; then \
|
||||
echo "Copying Windows platform files..." && \
|
||||
for file in ./ui/desktop/src/platform/windows/bin/*.{exe,dll,cmd}; do \
|
||||
if [ -f "$file" ] && [ "$(basename "$file")" != "goosed.exe" ]; then \
|
||||
cp -f "$file" ./ui/desktop/src/bin/; \
|
||||
fi; \
|
||||
done && \
|
||||
if [ -d "./ui/desktop/src/platform/windows/bin/goose-npm" ]; then \
|
||||
echo "Setting up npm environment..." && \
|
||||
rsync -a --delete ./ui/desktop/src/platform/windows/bin/goose-npm/ ./ui/desktop/src/bin/goose-npm/; \
|
||||
fi && \
|
||||
echo "Windows-specific files copied successfully"; \
|
||||
fi && \
|
||||
echo "Starting Windows package build..." && \
|
||||
(cd ui/desktop && echo "In desktop directory, running npm bundle:windows..." && npm run bundle:windows) && \
|
||||
echo "Creating resources directory..." && \
|
||||
(cd ui/desktop && mkdir -p out/Goose-win32-x64/resources/bin) && \
|
||||
echo "Copying final binaries..." && \
|
||||
(cd ui/desktop && rsync -av src/bin/ out/Goose-win32-x64/resources/bin/) && \
|
||||
(cd ui/desktop && npm run bundle:windows) && \
|
||||
echo "Windows package build complete!"; \
|
||||
else \
|
||||
echo "Windows binary not found."; \
|
||||
@@ -202,10 +216,50 @@ make-ui-intel:
|
||||
@just release-intel
|
||||
cd ui/desktop && npm run bundle:intel
|
||||
|
||||
# Setup langfuse server
|
||||
langfuse-server:
|
||||
#!/usr/bin/env bash
|
||||
./scripts/setup_langfuse.sh
|
||||
# Start Temporal services (server and temporal-service)
|
||||
start-temporal:
|
||||
@echo "Starting Temporal server..."
|
||||
@if ! pgrep -f "temporal server start-dev" > /dev/null; then \
|
||||
echo "Starting Temporal server in background..."; \
|
||||
nohup temporal server start-dev --db-filename temporal.db --port 7233 --ui-port 8233 --log-level warn > temporal-server.log 2>&1 & \
|
||||
echo "Waiting for Temporal server to start..."; \
|
||||
sleep 5; \
|
||||
else \
|
||||
echo "Temporal server is already running"; \
|
||||
fi
|
||||
@echo "Starting temporal-service..."
|
||||
@if ! pgrep -f "temporal-service" > /dev/null; then \
|
||||
echo "Starting temporal-service in background..."; \
|
||||
cd temporal-service && nohup ./temporal-service > temporal-service.log 2>&1 & \
|
||||
echo "Waiting for temporal-service to start..."; \
|
||||
sleep 3; \
|
||||
else \
|
||||
echo "temporal-service is already running"; \
|
||||
fi
|
||||
@echo "Temporal services started. Check logs: temporal-server.log, temporal-service/temporal-service.log"
|
||||
|
||||
# Stop Temporal services
|
||||
stop-temporal:
|
||||
@echo "Stopping Temporal services..."
|
||||
@pkill -f "temporal server start-dev" || echo "Temporal server was not running"
|
||||
@pkill -f "temporal-service" || echo "temporal-service was not running"
|
||||
@echo "Temporal services stopped"
|
||||
|
||||
# Check status of Temporal services
|
||||
status-temporal:
|
||||
@echo "Checking Temporal services status..."
|
||||
@if pgrep -f "temporal server start-dev" > /dev/null; then \
|
||||
echo "✓ Temporal server is running"; \
|
||||
else \
|
||||
echo "✗ Temporal server is not running"; \
|
||||
fi
|
||||
@if pgrep -f "temporal-service" > /dev/null; then \
|
||||
echo "✓ temporal-service is running"; \
|
||||
else \
|
||||
echo "✗ temporal-service is not running"; \
|
||||
fi
|
||||
@echo "Testing temporal-service health..."
|
||||
@curl -s http://localhost:8080/health > /dev/null && echo "✓ temporal-service is responding" || echo "✗ temporal-service is not responding"
|
||||
|
||||
# Run UI with debug build
|
||||
run-dev:
|
||||
|
||||
@@ -28,6 +28,7 @@ tokio = { version = "1.43", features = ["full"] }
|
||||
futures = "0.3"
|
||||
serde = { version = "1.0", features = ["derive"] } # For serialization
|
||||
serde_yaml = "0.9"
|
||||
tempfile = "3"
|
||||
etcetera = "0.8.0"
|
||||
reqwest = { version = "0.12.9", features = [
|
||||
"rustls-tls-native-roots",
|
||||
|
||||
@@ -509,6 +509,16 @@ enum Command {
|
||||
help = "Quiet mode. Suppress non-response output, printing only the model response to stdout"
|
||||
)]
|
||||
quiet: bool,
|
||||
|
||||
/// Scheduled job ID (used internally for scheduled executions)
|
||||
#[arg(
|
||||
long = "scheduled-job-id",
|
||||
value_name = "ID",
|
||||
help = "ID of the scheduled job that triggered this execution (internal use)",
|
||||
long_help = "Internal parameter used when this run command is executed by a scheduled job. This associates the session with the schedule for tracking purposes.",
|
||||
hide = true
|
||||
)]
|
||||
scheduled_job_id: Option<String>,
|
||||
},
|
||||
|
||||
/// Recipe utilities for validation and deeplinking
|
||||
@@ -662,6 +672,7 @@ pub async fn cli() -> Result<()> {
|
||||
settings: None,
|
||||
debug,
|
||||
max_tool_repetitions,
|
||||
scheduled_job_id: None,
|
||||
interactive: true,
|
||||
quiet: false,
|
||||
})
|
||||
@@ -709,6 +720,7 @@ pub async fn cli() -> Result<()> {
|
||||
params,
|
||||
explain,
|
||||
render_recipe,
|
||||
scheduled_job_id,
|
||||
quiet,
|
||||
}) => {
|
||||
let (input_config, session_settings) = match (
|
||||
@@ -808,6 +820,7 @@ pub async fn cli() -> Result<()> {
|
||||
settings: session_settings,
|
||||
debug,
|
||||
max_tool_repetitions,
|
||||
scheduled_job_id,
|
||||
interactive, // Use the interactive flag from the Run command
|
||||
quiet,
|
||||
})
|
||||
@@ -925,6 +938,7 @@ pub async fn cli() -> Result<()> {
|
||||
settings: None::<SessionSettings>,
|
||||
debug: false,
|
||||
max_tool_repetitions: None,
|
||||
scheduled_job_id: None,
|
||||
interactive: true, // Default case is always interactive
|
||||
quiet: false,
|
||||
})
|
||||
|
||||
@@ -44,6 +44,7 @@ pub async fn agent_generator(
|
||||
debug: false,
|
||||
max_tool_repetitions: None,
|
||||
interactive: false, // Benchmarking is non-interactive
|
||||
scheduled_job_id: None,
|
||||
quiet: false,
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -99,6 +99,7 @@ pub async fn handle_schedule_add(
|
||||
paused: false,
|
||||
current_session_id: None,
|
||||
process_start_time: None,
|
||||
execution_mode: Some("background".to_string()), // Default to background for CLI
|
||||
};
|
||||
|
||||
let scheduler_storage_path =
|
||||
|
||||
@@ -464,6 +464,7 @@ async fn process_message_streaming(
|
||||
id: session::Identifier::Path(session_file.clone()),
|
||||
working_dir: std::env::current_dir()?,
|
||||
schedule_id: None,
|
||||
execution_mode: None,
|
||||
};
|
||||
|
||||
// Get response from agent
|
||||
|
||||
@@ -40,6 +40,8 @@ pub struct SessionBuilderConfig {
|
||||
pub debug: bool,
|
||||
/// Maximum number of consecutive identical tool calls allowed
|
||||
pub max_tool_repetitions: Option<u32>,
|
||||
/// ID of the scheduled job that triggered this session (if any)
|
||||
pub scheduled_job_id: Option<String>,
|
||||
/// Whether this session will be used interactively (affects debugging prompts)
|
||||
pub interactive: bool,
|
||||
/// Quiet mode - suppress non-response output
|
||||
@@ -115,7 +117,7 @@ async fn offer_extension_debugging_help(
|
||||
std::env::temp_dir().join(format!("goose_debug_extension_{}.jsonl", extension_name));
|
||||
|
||||
// Create the debugging session
|
||||
let mut debug_session = Session::new(debug_agent, temp_session_file.clone(), false);
|
||||
let mut debug_session = Session::new(debug_agent, temp_session_file.clone(), false, None);
|
||||
|
||||
// Process the debugging request
|
||||
println!("{}", style("Analyzing the extension failure...").yellow());
|
||||
@@ -341,7 +343,12 @@ pub async fn build_session(session_config: SessionBuilderConfig) -> Session {
|
||||
}
|
||||
|
||||
// Create new session
|
||||
let mut session = Session::new(agent, session_file.clone(), session_config.debug);
|
||||
let mut session = Session::new(
|
||||
agent,
|
||||
session_file.clone(),
|
||||
session_config.debug,
|
||||
session_config.scheduled_job_id.clone(),
|
||||
);
|
||||
|
||||
// Add extensions if provided
|
||||
for extension_str in session_config.extensions {
|
||||
@@ -490,6 +497,7 @@ mod tests {
|
||||
settings: None,
|
||||
debug: true,
|
||||
max_tool_repetitions: Some(5),
|
||||
scheduled_job_id: None,
|
||||
interactive: true,
|
||||
quiet: false,
|
||||
};
|
||||
@@ -499,6 +507,7 @@ mod tests {
|
||||
assert_eq!(config.builtins.len(), 1);
|
||||
assert!(config.debug);
|
||||
assert_eq!(config.max_tool_repetitions, Some(5));
|
||||
assert!(config.scheduled_job_id.is_none());
|
||||
assert!(config.interactive);
|
||||
assert!(!config.quiet);
|
||||
}
|
||||
@@ -517,6 +526,7 @@ mod tests {
|
||||
assert!(config.additional_system_prompt.is_none());
|
||||
assert!(!config.debug);
|
||||
assert!(config.max_tool_repetitions.is_none());
|
||||
assert!(config.scheduled_job_id.is_none());
|
||||
assert!(!config.interactive);
|
||||
assert!(!config.quiet);
|
||||
}
|
||||
|
||||
@@ -51,6 +51,7 @@ pub struct Session {
|
||||
completion_cache: Arc<std::sync::RwLock<CompletionCache>>,
|
||||
debug: bool, // New field for debug mode
|
||||
run_mode: RunMode,
|
||||
scheduled_job_id: Option<String>, // ID of the scheduled job that triggered this session
|
||||
}
|
||||
|
||||
// Cache structure for completion data
|
||||
@@ -107,7 +108,12 @@ pub async fn classify_planner_response(
|
||||
}
|
||||
|
||||
impl Session {
|
||||
pub fn new(agent: Agent, session_file: PathBuf, debug: bool) -> Self {
|
||||
pub fn new(
|
||||
agent: Agent,
|
||||
session_file: PathBuf,
|
||||
debug: bool,
|
||||
scheduled_job_id: Option<String>,
|
||||
) -> Self {
|
||||
let messages = match session::read_messages(&session_file) {
|
||||
Ok(msgs) => msgs,
|
||||
Err(e) => {
|
||||
@@ -123,6 +129,7 @@ impl Session {
|
||||
completion_cache: Arc::new(std::sync::RwLock::new(CompletionCache::new())),
|
||||
debug,
|
||||
run_mode: RunMode::Normal,
|
||||
scheduled_job_id,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -307,7 +314,13 @@ impl Session {
|
||||
let provider = self.agent.provider().await?;
|
||||
|
||||
// Persist messages with provider for automatic description generation
|
||||
session::persist_messages(&self.session_file, &self.messages, Some(provider)).await?;
|
||||
session::persist_messages_with_schedule_id(
|
||||
&self.session_file,
|
||||
&self.messages,
|
||||
Some(provider),
|
||||
self.scheduled_job_id.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Track the current directory and last instruction in projects.json
|
||||
let session_id = self
|
||||
@@ -413,10 +426,11 @@ impl Session {
|
||||
let provider = self.agent.provider().await?;
|
||||
|
||||
// Persist messages with provider for automatic description generation
|
||||
session::persist_messages(
|
||||
session::persist_messages_with_schedule_id(
|
||||
&self.session_file,
|
||||
&self.messages,
|
||||
Some(provider),
|
||||
self.scheduled_job_id.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -600,10 +614,11 @@ impl Session {
|
||||
self.messages = summarized_messages;
|
||||
|
||||
// Persist the summarized messages
|
||||
session::persist_messages(
|
||||
session::persist_messages_with_schedule_id(
|
||||
&self.session_file,
|
||||
&self.messages,
|
||||
Some(provider),
|
||||
self.scheduled_job_id.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -727,7 +742,8 @@ impl Session {
|
||||
id: session_id.clone(),
|
||||
working_dir: std::env::current_dir()
|
||||
.expect("failed to get current session working directory"),
|
||||
schedule_id: None,
|
||||
schedule_id: self.scheduled_job_id.clone(),
|
||||
execution_mode: None,
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
@@ -776,7 +792,7 @@ impl Session {
|
||||
Err(ToolError::ExecutionError("Tool call cancelled by user".to_string()))
|
||||
));
|
||||
self.messages.push(response_message);
|
||||
session::persist_messages(&self.session_file, &self.messages, None).await?;
|
||||
session::persist_messages_with_schedule_id(&self.session_file, &self.messages, None, self.scheduled_job_id.clone()).await?;
|
||||
|
||||
drop(stream);
|
||||
break;
|
||||
@@ -862,7 +878,8 @@ impl Session {
|
||||
id: session_id.clone(),
|
||||
working_dir: std::env::current_dir()
|
||||
.expect("failed to get current session working directory"),
|
||||
schedule_id: None,
|
||||
schedule_id: self.scheduled_job_id.clone(),
|
||||
execution_mode: None,
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
@@ -872,7 +889,7 @@ impl Session {
|
||||
self.messages.push(message.clone());
|
||||
|
||||
// No need to update description on assistant messages
|
||||
session::persist_messages(&self.session_file, &self.messages, None).await?;
|
||||
session::persist_messages_with_schedule_id(&self.session_file, &self.messages, None, self.scheduled_job_id.clone()).await?;
|
||||
|
||||
if interactive {output::hide_thinking()};
|
||||
let _ = progress_bars.hide();
|
||||
@@ -1006,7 +1023,13 @@ impl Session {
|
||||
self.messages.push(response_message);
|
||||
|
||||
// No need for description update here
|
||||
session::persist_messages(&self.session_file, &self.messages, None).await?;
|
||||
session::persist_messages_with_schedule_id(
|
||||
&self.session_file,
|
||||
&self.messages,
|
||||
None,
|
||||
self.scheduled_job_id.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let prompt = format!(
|
||||
"The existing call to {} was interrupted. How would you like to proceed?",
|
||||
@@ -1015,7 +1038,13 @@ impl Session {
|
||||
self.messages.push(Message::assistant().with_text(&prompt));
|
||||
|
||||
// No need for description update here
|
||||
session::persist_messages(&self.session_file, &self.messages, None).await?;
|
||||
session::persist_messages_with_schedule_id(
|
||||
&self.session_file,
|
||||
&self.messages,
|
||||
None,
|
||||
self.scheduled_job_id.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
output::render_message(&Message::assistant().with_text(&prompt), self.debug);
|
||||
} else {
|
||||
@@ -1029,8 +1058,13 @@ impl Session {
|
||||
self.messages.push(Message::assistant().with_text(prompt));
|
||||
|
||||
// No need for description update here
|
||||
session::persist_messages(&self.session_file, &self.messages, None)
|
||||
.await?;
|
||||
session::persist_messages_with_schedule_id(
|
||||
&self.session_file,
|
||||
&self.messages,
|
||||
None,
|
||||
self.scheduled_job_id.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
output::render_message(
|
||||
&Message::assistant().with_text(prompt),
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
[package]
|
||||
name = "goose-scheduler-executor"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
goose = { path = "../goose" }
|
||||
mcp-core = { path = "../mcp-core" }
|
||||
anyhow = "1.0"
|
||||
tokio = { version = "1.0", features = ["full"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
clap = { version = "4.0", features = ["derive"] }
|
||||
futures = "0.3"
|
||||
serde_json = "1.0"
|
||||
serde_yaml = "0.9"
|
||||
@@ -1,215 +0,0 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use clap::Parser;
|
||||
use goose::agents::{Agent, SessionConfig};
|
||||
use goose::config::Config;
|
||||
use goose::message::Message;
|
||||
use goose::providers::create;
|
||||
use goose::recipe::Recipe;
|
||||
use goose::session;
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use tracing::info;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Args {
|
||||
/// Job ID for the scheduled job
|
||||
job_id: String,
|
||||
|
||||
/// Path to the recipe file to execute
|
||||
recipe_path: String,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
// Initialize tracing
|
||||
tracing_subscriber::fmt()
|
||||
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
|
||||
.init();
|
||||
|
||||
let args = Args::parse();
|
||||
|
||||
info!("Starting goose-scheduler-executor for job: {}", args.job_id);
|
||||
info!("Recipe path: {}", args.recipe_path);
|
||||
|
||||
// Execute the recipe and get session ID
|
||||
let session_id = execute_recipe(&args.job_id, &args.recipe_path).await?;
|
||||
|
||||
// Output session ID to stdout (this is what the Go service expects)
|
||||
println!("{}", session_id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn execute_recipe(job_id: &str, recipe_path: &str) -> Result<String> {
|
||||
let recipe_path_buf = Path::new(recipe_path);
|
||||
|
||||
// Check if recipe file exists
|
||||
if !recipe_path_buf.exists() {
|
||||
return Err(anyhow!("Recipe file not found: {}", recipe_path));
|
||||
}
|
||||
|
||||
// Read and parse recipe
|
||||
let recipe_content = fs::read_to_string(recipe_path_buf)?;
|
||||
let recipe: Recipe = {
|
||||
let extension = recipe_path_buf
|
||||
.extension()
|
||||
.and_then(|os_str| os_str.to_str())
|
||||
.unwrap_or("yaml")
|
||||
.to_lowercase();
|
||||
|
||||
match extension.as_str() {
|
||||
"json" | "jsonl" => serde_json::from_str::<Recipe>(&recipe_content)
|
||||
.map_err(|e| anyhow!("Failed to parse JSON recipe '{}': {}", recipe_path, e))?,
|
||||
"yaml" | "yml" => serde_yaml::from_str::<Recipe>(&recipe_content)
|
||||
.map_err(|e| anyhow!("Failed to parse YAML recipe '{}': {}", recipe_path, e))?,
|
||||
_ => {
|
||||
return Err(anyhow!(
|
||||
"Unsupported recipe file extension '{}' for: {}",
|
||||
extension,
|
||||
recipe_path
|
||||
));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Create agent
|
||||
let agent = Agent::new();
|
||||
|
||||
// Get provider configuration
|
||||
let global_config = Config::global();
|
||||
let provider_name: String = global_config.get_param("GOOSE_PROVIDER").map_err(|_| {
|
||||
anyhow!("GOOSE_PROVIDER not configured. Run 'goose configure' or set env var.")
|
||||
})?;
|
||||
let model_name: String = global_config.get_param("GOOSE_MODEL").map_err(|_| {
|
||||
anyhow!("GOOSE_MODEL not configured. Run 'goose configure' or set env var.")
|
||||
})?;
|
||||
|
||||
let model_config = goose::model::ModelConfig::new(model_name);
|
||||
let provider = create(&provider_name, model_config)
|
||||
.map_err(|e| anyhow!("Failed to create provider '{}': {}", provider_name, e))?;
|
||||
|
||||
// Set provider on agent
|
||||
agent
|
||||
.update_provider(provider)
|
||||
.await
|
||||
.map_err(|e| anyhow!("Failed to set provider on agent: {}", e))?;
|
||||
|
||||
info!(
|
||||
"Agent configured with provider '{}' for job '{}'",
|
||||
provider_name, job_id
|
||||
);
|
||||
|
||||
// Generate session ID
|
||||
let session_id = session::generate_session_id();
|
||||
|
||||
// Check if recipe has a prompt
|
||||
let Some(prompt_text) = recipe.prompt else {
|
||||
info!(
|
||||
"Recipe '{}' has no prompt to execute for job '{}'",
|
||||
recipe_path, job_id
|
||||
);
|
||||
|
||||
// Create empty session for consistency
|
||||
let session_file_path = goose::session::storage::get_path(
|
||||
goose::session::storage::Identifier::Name(session_id.clone()),
|
||||
);
|
||||
|
||||
let metadata = goose::session::storage::SessionMetadata {
|
||||
working_dir: env::current_dir().unwrap_or_default(),
|
||||
description: "Empty job - no prompt".to_string(),
|
||||
schedule_id: Some(job_id.to_string()),
|
||||
message_count: 0,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
goose::session::storage::save_messages_with_metadata(&session_file_path, &metadata, &[])
|
||||
.map_err(|e| anyhow!("Failed to persist metadata for empty job: {}", e))?;
|
||||
|
||||
return Ok(session_id);
|
||||
};
|
||||
|
||||
// Create session configuration
|
||||
let current_dir =
|
||||
env::current_dir().map_err(|e| anyhow!("Failed to get current directory: {}", e))?;
|
||||
|
||||
let session_config = SessionConfig {
|
||||
id: goose::session::storage::Identifier::Name(session_id.clone()),
|
||||
working_dir: current_dir.clone(),
|
||||
schedule_id: Some(job_id.to_string()),
|
||||
};
|
||||
|
||||
// Execute the recipe
|
||||
let mut messages = vec![Message::user().with_text(prompt_text)];
|
||||
|
||||
info!("Executing recipe for job '{}' with prompt", job_id);
|
||||
|
||||
let mut stream = agent
|
||||
.reply(&messages, Some(session_config))
|
||||
.await
|
||||
.map_err(|e| anyhow!("Agent failed to reply for recipe '{}': {}", recipe_path, e))?;
|
||||
|
||||
// Process the response stream
|
||||
use futures::StreamExt;
|
||||
use goose::agents::AgentEvent;
|
||||
|
||||
while let Some(message_result) = stream.next().await {
|
||||
match message_result {
|
||||
Ok(AgentEvent::Message(msg)) => {
|
||||
if msg.role == mcp_core::role::Role::Assistant {
|
||||
info!("[Job {}] Assistant response received", job_id);
|
||||
}
|
||||
messages.push(msg);
|
||||
}
|
||||
Ok(AgentEvent::McpNotification(_)) => {
|
||||
// Handle notifications if needed
|
||||
}
|
||||
Ok(AgentEvent::ModelChange { .. }) => {
|
||||
// Model change events are informational, just continue
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(anyhow!("Error receiving message from agent: {}", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save session
|
||||
let session_file_path = goose::session::storage::get_path(
|
||||
goose::session::storage::Identifier::Name(session_id.clone()),
|
||||
);
|
||||
|
||||
// Try to read updated metadata, or create fallback
|
||||
match goose::session::storage::read_metadata(&session_file_path) {
|
||||
Ok(mut updated_metadata) => {
|
||||
updated_metadata.message_count = messages.len();
|
||||
goose::session::storage::save_messages_with_metadata(
|
||||
&session_file_path,
|
||||
&updated_metadata,
|
||||
&messages,
|
||||
)
|
||||
.map_err(|e| anyhow!("Failed to persist final messages: {}", e))?;
|
||||
}
|
||||
Err(_) => {
|
||||
let fallback_metadata = goose::session::storage::SessionMetadata {
|
||||
working_dir: current_dir,
|
||||
description: format!("Scheduled job: {}", job_id),
|
||||
schedule_id: Some(job_id.to_string()),
|
||||
message_count: messages.len(),
|
||||
..Default::default()
|
||||
};
|
||||
goose::session::storage::save_messages_with_metadata(
|
||||
&session_file_path,
|
||||
&fallback_metadata,
|
||||
&messages,
|
||||
)
|
||||
.map_err(|e| anyhow!("Failed to persist messages with fallback metadata: {}", e))?;
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Finished executing job '{}', session: {}",
|
||||
job_id, session_id
|
||||
);
|
||||
Ok(session_id)
|
||||
}
|
||||
@@ -40,6 +40,7 @@ struct ChatRequest {
|
||||
messages: Vec<Message>,
|
||||
session_id: Option<String>,
|
||||
session_working_dir: String,
|
||||
scheduled_job_id: Option<String>,
|
||||
}
|
||||
|
||||
pub struct SseResponse {
|
||||
@@ -181,7 +182,8 @@ async fn handler(
|
||||
Some(SessionConfig {
|
||||
id: session::Identifier::Name(session_id.clone()),
|
||||
working_dir: PathBuf::from(session_working_dir),
|
||||
schedule_id: None,
|
||||
schedule_id: request.scheduled_job_id.clone(),
|
||||
execution_mode: None,
|
||||
}),
|
||||
)
|
||||
.await
|
||||
@@ -303,6 +305,7 @@ struct AskRequest {
|
||||
prompt: String,
|
||||
session_id: Option<String>,
|
||||
session_working_dir: String,
|
||||
scheduled_job_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -339,7 +342,8 @@ async fn ask_handler(
|
||||
Some(SessionConfig {
|
||||
id: session::Identifier::Name(session_id.clone()),
|
||||
working_dir: PathBuf::from(session_working_dir),
|
||||
schedule_id: None,
|
||||
schedule_id: request.scheduled_job_id.clone(),
|
||||
execution_mode: None,
|
||||
}),
|
||||
)
|
||||
.await
|
||||
@@ -578,6 +582,7 @@ mod tests {
|
||||
prompt: "test prompt".to_string(),
|
||||
session_id: Some("test-session".to_string()),
|
||||
session_working_dir: "test-working-dir".to_string(),
|
||||
scheduled_job_id: None,
|
||||
})
|
||||
.unwrap(),
|
||||
))
|
||||
|
||||
@@ -19,6 +19,8 @@ pub struct CreateScheduleRequest {
|
||||
id: String,
|
||||
recipe_source: String,
|
||||
cron: String,
|
||||
#[serde(default)]
|
||||
execution_mode: Option<String>, // "foreground" or "background"
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, utoipa::ToSchema)]
|
||||
@@ -124,6 +126,7 @@ async fn create_schedule(
|
||||
paused: false,
|
||||
current_session_id: None,
|
||||
process_start_time: None,
|
||||
execution_mode: req.execution_mode.or(Some("background".to_string())), // Default to background
|
||||
};
|
||||
scheduler
|
||||
.add_scheduled_job(job.clone())
|
||||
|
||||
@@ -574,7 +574,25 @@ impl Agent {
|
||||
let (mut tools, mut toolshim_tools, mut system_prompt) =
|
||||
self.prepare_tools_and_prompt().await?;
|
||||
|
||||
let goose_mode = config.get_param("GOOSE_MODE").unwrap_or("auto".to_string());
|
||||
// Get goose_mode from config, but override with execution_mode if provided in session config
|
||||
let mut goose_mode = config.get_param("GOOSE_MODE").unwrap_or("auto".to_string());
|
||||
|
||||
// If this is a scheduled job with an execution_mode, override the goose_mode
|
||||
if let Some(session_config) = &session {
|
||||
if let Some(execution_mode) = &session_config.execution_mode {
|
||||
// Map "foreground" to "auto" and "background" to "chat"
|
||||
goose_mode = match execution_mode.as_str() {
|
||||
"foreground" => "auto".to_string(),
|
||||
"background" => "chat".to_string(),
|
||||
_ => goose_mode,
|
||||
};
|
||||
tracing::info!(
|
||||
"Using execution_mode '{}' which maps to goose_mode '{}'",
|
||||
execution_mode,
|
||||
goose_mode
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let (tools_with_readonly_annotation, tools_without_annotation) =
|
||||
Self::categorize_tools_by_annotation(&tools);
|
||||
|
||||
@@ -144,6 +144,7 @@ pub fn manage_schedule_tool() -> Tool {
|
||||
"job_id": {"type": "string", "description": "Job identifier for operations on existing jobs"},
|
||||
"recipe_path": {"type": "string", "description": "Path to recipe file for create action"},
|
||||
"cron_expression": {"type": "string", "description": "A six field cron expression for create action"},
|
||||
"execution_mode": {"type": "string", "description": "Execution mode for create action: 'foreground' or 'background'", "enum": ["foreground", "background"], "default": "background"},
|
||||
"limit": {"type": "integer", "description": "Limit for sessions list", "default": 50},
|
||||
"session_id": {"type": "string", "description": "Session identifier for session_content action"}
|
||||
}
|
||||
|
||||
@@ -94,6 +94,20 @@ impl Agent {
|
||||
ToolError::ExecutionError("Missing 'cron_expression' parameter".to_string())
|
||||
})?;
|
||||
|
||||
// Get the execution_mode parameter, defaulting to "background" if not provided
|
||||
let execution_mode = arguments
|
||||
.get("execution_mode")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("background");
|
||||
|
||||
// Validate execution_mode is either "foreground" or "background"
|
||||
if execution_mode != "foreground" && execution_mode != "background" {
|
||||
return Err(ToolError::ExecutionError(format!(
|
||||
"Invalid execution_mode: {}. Must be 'foreground' or 'background'",
|
||||
execution_mode
|
||||
)));
|
||||
}
|
||||
|
||||
// Validate recipe file exists and is readable
|
||||
if !std::path::Path::new(recipe_path).exists() {
|
||||
return Err(ToolError::ExecutionError(format!(
|
||||
@@ -135,12 +149,13 @@ impl Agent {
|
||||
paused: false,
|
||||
current_session_id: None,
|
||||
process_start_time: None,
|
||||
execution_mode: Some(execution_mode.to_string()),
|
||||
};
|
||||
|
||||
match scheduler.add_scheduled_job(job).await {
|
||||
Ok(()) => Ok(vec![Content::text(format!(
|
||||
"Successfully created scheduled job '{}' for recipe '{}' with cron expression '{}'",
|
||||
job_id, recipe_path, cron_expression
|
||||
"Successfully created scheduled job '{}' for recipe '{}' with cron expression '{}' in {} mode",
|
||||
job_id, recipe_path, cron_expression, execution_mode
|
||||
))]),
|
||||
Err(e) => Err(ToolError::ExecutionError(format!(
|
||||
"Failed to create job: {}",
|
||||
|
||||
@@ -23,5 +23,7 @@ pub struct SessionConfig {
|
||||
/// Working directory for the session
|
||||
pub working_dir: PathBuf,
|
||||
/// ID of the schedule that triggered this session, if any
|
||||
pub schedule_id: Option<String>, // NEW
|
||||
pub schedule_id: Option<String>,
|
||||
/// Execution mode for scheduled jobs: "foreground" or "background"
|
||||
pub execution_mode: Option<String>,
|
||||
}
|
||||
|
||||
@@ -122,6 +122,8 @@ pub struct ScheduledJob {
|
||||
pub current_session_id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub process_start_time: Option<DateTime<Utc>>,
|
||||
#[serde(default)]
|
||||
pub execution_mode: Option<String>, // "foreground" or "background"
|
||||
}
|
||||
|
||||
async fn persist_jobs_from_arc(
|
||||
@@ -1059,6 +1061,10 @@ async fn run_scheduled_job_internal(
|
||||
}
|
||||
tracing::info!("Agent configured with provider for job '{}'", job.id);
|
||||
|
||||
// Log the execution mode
|
||||
let execution_mode = job.execution_mode.as_deref().unwrap_or("background");
|
||||
tracing::info!("Job '{}' running in {} mode", job.id, execution_mode);
|
||||
|
||||
let session_id_for_return = session::generate_session_id();
|
||||
|
||||
// Update the job with the session ID if we have access to the jobs arc
|
||||
@@ -1091,6 +1097,7 @@ async fn run_scheduled_job_internal(
|
||||
id: crate::session::storage::Identifier::Name(session_id_for_return.clone()),
|
||||
working_dir: current_dir.clone(),
|
||||
schedule_id: Some(job.id.clone()),
|
||||
execution_mode: job.execution_mode.clone(),
|
||||
};
|
||||
|
||||
match agent
|
||||
@@ -1323,6 +1330,7 @@ mod tests {
|
||||
paused: false,
|
||||
current_session_id: None,
|
||||
process_start_time: None,
|
||||
execution_mode: Some("background".to_string()), // Default for test
|
||||
};
|
||||
|
||||
// Create the mock provider instance for the test
|
||||
|
||||
@@ -15,41 +15,33 @@ impl SchedulerType {
|
||||
pub fn from_config() -> Self {
|
||||
let config = Config::global();
|
||||
|
||||
// First check if alpha features are enabled
|
||||
// If not, always use legacy scheduler regardless of GOOSE_SCHEDULER_TYPE
|
||||
match config.get_param::<String>("ALPHA") {
|
||||
Ok(alpha_value) => {
|
||||
// Only proceed with temporal if alpha is explicitly enabled
|
||||
if alpha_value.to_lowercase() != "true" {
|
||||
tracing::info!("Alpha features disabled, using legacy scheduler");
|
||||
return SchedulerType::Legacy;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
// No ALPHA env var means alpha features are disabled
|
||||
tracing::info!("No ALPHA environment variable found, using legacy scheduler");
|
||||
return SchedulerType::Legacy;
|
||||
}
|
||||
}
|
||||
// Debug logging to help troubleshoot environment variable issues
|
||||
tracing::debug!("Checking scheduler configuration...");
|
||||
|
||||
// Alpha is enabled, now check scheduler type preference
|
||||
// Check scheduler type preference from GOOSE_SCHEDULER_TYPE
|
||||
match config.get_param::<String>("GOOSE_SCHEDULER_TYPE") {
|
||||
Ok(scheduler_type) => match scheduler_type.to_lowercase().as_str() {
|
||||
"temporal" => SchedulerType::Temporal,
|
||||
"legacy" => SchedulerType::Legacy,
|
||||
_ => {
|
||||
tracing::warn!(
|
||||
"Unknown scheduler type '{}', defaulting to legacy scheduler",
|
||||
scheduler_type
|
||||
);
|
||||
SchedulerType::Legacy
|
||||
Ok(scheduler_type) => {
|
||||
tracing::debug!(
|
||||
"Found GOOSE_SCHEDULER_TYPE environment variable: '{}'",
|
||||
scheduler_type
|
||||
);
|
||||
match scheduler_type.to_lowercase().as_str() {
|
||||
"temporal" => SchedulerType::Temporal,
|
||||
"legacy" => SchedulerType::Legacy,
|
||||
_ => {
|
||||
tracing::warn!(
|
||||
"Unknown scheduler type '{}', defaulting to legacy scheduler",
|
||||
scheduler_type
|
||||
);
|
||||
SchedulerType::Legacy
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
Err(_) => {
|
||||
// When alpha is enabled but no explicit scheduler type is set,
|
||||
// default to temporal scheduler
|
||||
tracing::info!("Alpha enabled, defaulting to temporal scheduler");
|
||||
SchedulerType::Temporal
|
||||
tracing::debug!("GOOSE_SCHEDULER_TYPE environment variable not found");
|
||||
// When no explicit scheduler type is set, default to legacy scheduler
|
||||
tracing::info!("No scheduler type specified, defaulting to legacy scheduler");
|
||||
SchedulerType::Legacy
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -123,62 +115,38 @@ mod tests {
|
||||
use temp_env::with_vars;
|
||||
|
||||
#[test]
|
||||
fn test_scheduler_type_no_alpha_env() {
|
||||
// Test that without ALPHA env var, we always get Legacy scheduler
|
||||
with_vars(
|
||||
[
|
||||
("ALPHA", None::<&str>),
|
||||
("GOOSE_SCHEDULER_TYPE", Some("temporal")),
|
||||
],
|
||||
|| {
|
||||
let scheduler_type = SchedulerType::from_config();
|
||||
assert!(matches!(scheduler_type, SchedulerType::Legacy));
|
||||
},
|
||||
);
|
||||
fn test_scheduler_type_no_env() {
|
||||
// Test that without GOOSE_SCHEDULER_TYPE env var, we get Legacy scheduler
|
||||
with_vars([("GOOSE_SCHEDULER_TYPE", None::<&str>)], || {
|
||||
let scheduler_type = SchedulerType::from_config();
|
||||
assert!(matches!(scheduler_type, SchedulerType::Legacy));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scheduler_type_alpha_false() {
|
||||
// Test that with ALPHA=false, we always get Legacy scheduler
|
||||
with_vars(
|
||||
[
|
||||
("ALPHA", Some("false")),
|
||||
("GOOSE_SCHEDULER_TYPE", Some("temporal")),
|
||||
],
|
||||
|| {
|
||||
let scheduler_type = SchedulerType::from_config();
|
||||
assert!(matches!(scheduler_type, SchedulerType::Legacy));
|
||||
},
|
||||
);
|
||||
fn test_scheduler_type_legacy() {
|
||||
// Test that with GOOSE_SCHEDULER_TYPE=legacy, we get Legacy scheduler
|
||||
with_vars([("GOOSE_SCHEDULER_TYPE", Some("legacy"))], || {
|
||||
let scheduler_type = SchedulerType::from_config();
|
||||
assert!(matches!(scheduler_type, SchedulerType::Legacy));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scheduler_type_alpha_true_legacy() {
|
||||
// Test that with ALPHA=true and GOOSE_SCHEDULER_TYPE=legacy, we get Legacy scheduler
|
||||
with_vars(
|
||||
[
|
||||
("ALPHA", Some("true")),
|
||||
("GOOSE_SCHEDULER_TYPE", Some("legacy")),
|
||||
],
|
||||
|| {
|
||||
let scheduler_type = SchedulerType::from_config();
|
||||
assert!(matches!(scheduler_type, SchedulerType::Legacy));
|
||||
},
|
||||
);
|
||||
fn test_scheduler_type_temporal() {
|
||||
// Test that with GOOSE_SCHEDULER_TYPE=temporal, we get Temporal scheduler
|
||||
with_vars([("GOOSE_SCHEDULER_TYPE", Some("temporal"))], || {
|
||||
let scheduler_type = SchedulerType::from_config();
|
||||
assert!(matches!(scheduler_type, SchedulerType::Temporal));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scheduler_type_alpha_true_unknown_scheduler_type() {
|
||||
// Test that with ALPHA=true and unknown scheduler type, we default to Legacy
|
||||
with_vars(
|
||||
[
|
||||
("ALPHA", Some("true")),
|
||||
("GOOSE_SCHEDULER_TYPE", Some("unknown")),
|
||||
],
|
||||
|| {
|
||||
let scheduler_type = SchedulerType::from_config();
|
||||
assert!(matches!(scheduler_type, SchedulerType::Legacy));
|
||||
},
|
||||
);
|
||||
fn test_scheduler_type_unknown() {
|
||||
// Test that with unknown scheduler type, we default to Legacy
|
||||
with_vars([("GOOSE_SCHEDULER_TYPE", Some("unknown"))], || {
|
||||
let scheduler_type = SchedulerType::from_config();
|
||||
assert!(matches!(scheduler_type, SchedulerType::Legacy));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,9 +3,10 @@ pub mod storage;
|
||||
|
||||
// Re-export common session types and functions
|
||||
pub use storage::{
|
||||
ensure_session_dir, generate_description, generate_session_id, get_most_recent_session,
|
||||
get_path, list_sessions, persist_messages, read_messages, read_metadata, update_metadata,
|
||||
Identifier, SessionMetadata,
|
||||
ensure_session_dir, generate_description, generate_description_with_schedule_id,
|
||||
generate_session_id, get_most_recent_session, get_path, list_sessions, persist_messages,
|
||||
persist_messages_with_schedule_id, read_messages, read_metadata, update_metadata, Identifier,
|
||||
SessionMetadata,
|
||||
};
|
||||
|
||||
pub use info::{get_session_info, SessionInfo};
|
||||
|
||||
@@ -437,6 +437,19 @@ pub async fn persist_messages(
|
||||
session_file: &Path,
|
||||
messages: &[Message],
|
||||
provider: Option<Arc<dyn Provider>>,
|
||||
) -> Result<()> {
|
||||
persist_messages_with_schedule_id(session_file, messages, provider, None).await
|
||||
}
|
||||
|
||||
/// Write messages to a session file with metadata, including an optional scheduled job ID
|
||||
///
|
||||
/// Overwrites the file with metadata as the first line, followed by all messages in JSONL format.
|
||||
/// If a provider is supplied, it will automatically generate a description when appropriate.
|
||||
pub async fn persist_messages_with_schedule_id(
|
||||
session_file: &Path,
|
||||
messages: &[Message],
|
||||
provider: Option<Arc<dyn Provider>>,
|
||||
schedule_id: Option<String>,
|
||||
) -> Result<()> {
|
||||
// Count user messages
|
||||
let user_message_count = messages
|
||||
@@ -448,11 +461,16 @@ pub async fn persist_messages(
|
||||
match provider {
|
||||
Some(provider) if user_message_count < 4 => {
|
||||
//generate_description is responsible for writing the messages
|
||||
generate_description(session_file, messages, provider).await
|
||||
generate_description_with_schedule_id(session_file, messages, provider, schedule_id)
|
||||
.await
|
||||
}
|
||||
_ => {
|
||||
// Read existing metadata
|
||||
let metadata = read_metadata(session_file)?;
|
||||
let mut metadata = read_metadata(session_file)?;
|
||||
// Update the schedule_id if provided
|
||||
if schedule_id.is_some() {
|
||||
metadata.schedule_id = schedule_id;
|
||||
}
|
||||
// Write the file with metadata and messages
|
||||
save_messages_with_metadata(session_file, &metadata, messages)
|
||||
}
|
||||
@@ -492,6 +510,19 @@ pub async fn generate_description(
|
||||
session_file: &Path,
|
||||
messages: &[Message],
|
||||
provider: Arc<dyn Provider>,
|
||||
) -> Result<()> {
|
||||
generate_description_with_schedule_id(session_file, messages, provider, None).await
|
||||
}
|
||||
|
||||
/// Generate a description for the session using the provider, including an optional scheduled job ID
|
||||
///
|
||||
/// This function is called when appropriate to generate a short description
|
||||
/// of the session based on the conversation history.
|
||||
pub async fn generate_description_with_schedule_id(
|
||||
session_file: &Path,
|
||||
messages: &[Message],
|
||||
provider: Arc<dyn Provider>,
|
||||
schedule_id: Option<String>,
|
||||
) -> Result<()> {
|
||||
// Create a special message asking for a 3-word description
|
||||
let mut description_prompt = "Based on the conversation so far, provide a concise description of this session in 4 words or less. This will be used for finding the session later in a UI with limited space - reply *ONLY* with the description".to_string();
|
||||
@@ -527,8 +558,11 @@ pub async fn generate_description(
|
||||
// Read current metadata
|
||||
let mut metadata = read_metadata(session_file)?;
|
||||
|
||||
// Update description
|
||||
// Update description and schedule_id
|
||||
metadata.description = description;
|
||||
if schedule_id.is_some() {
|
||||
metadata.schedule_id = schedule_id;
|
||||
}
|
||||
|
||||
// Update the file with the new metadata and existing messages
|
||||
save_messages_with_metadata(session_file, &metadata, messages)
|
||||
|
||||
@@ -16,8 +16,9 @@ use crate::session::storage::SessionMetadata;
|
||||
const TEMPORAL_SERVICE_STARTUP_TIMEOUT: Duration = Duration::from_secs(15);
|
||||
const TEMPORAL_SERVICE_HEALTH_CHECK_INTERVAL: Duration = Duration::from_millis(500);
|
||||
|
||||
// Default ports to try when discovering the service
|
||||
const DEFAULT_HTTP_PORTS: &[u16] = &[8080, 8081, 8082, 8083, 8084, 8085];
|
||||
// Default ports to try when discovering the service - using high, obscure ports
|
||||
// to avoid conflicts with common services
|
||||
const DEFAULT_HTTP_PORTS: &[u16] = &[58080, 58081, 58082, 58083, 58084, 58085];
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
struct JobRequest {
|
||||
@@ -25,6 +26,7 @@ struct JobRequest {
|
||||
job_id: Option<String>,
|
||||
cron: Option<String>,
|
||||
recipe_path: Option<String>,
|
||||
execution_mode: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
@@ -45,6 +47,7 @@ struct TemporalJobStatus {
|
||||
currently_running: bool,
|
||||
paused: bool,
|
||||
created_at: String,
|
||||
execution_mode: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
@@ -52,13 +55,14 @@ struct RunNowResponse {
|
||||
session_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct PortConfig {
|
||||
http_port: u16,
|
||||
temporal_port: u16,
|
||||
ui_port: u16,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TemporalScheduler {
|
||||
http_client: Client,
|
||||
service_url: String,
|
||||
@@ -107,50 +111,61 @@ impl TemporalScheduler {
|
||||
port_config,
|
||||
});
|
||||
|
||||
// Start the status monitor to keep job statuses in sync
|
||||
if let Err(e) = final_scheduler.start_status_monitor().await {
|
||||
tracing::warn!("Failed to start status monitor: {}", e);
|
||||
}
|
||||
|
||||
info!("TemporalScheduler initialized successfully");
|
||||
Ok(final_scheduler)
|
||||
}
|
||||
|
||||
async fn discover_http_port(_http_client: &Client) -> Result<u16, SchedulerError> {
|
||||
// First, try to find a running service using pgrep and lsof
|
||||
if let Ok(port) = Self::find_temporal_service_port_from_processes() {
|
||||
info!(
|
||||
"Found Temporal service port {} from running processes",
|
||||
port
|
||||
);
|
||||
return Ok(port);
|
||||
}
|
||||
|
||||
// If no running service found, we need to find a free port to start the service on
|
||||
info!("No running Temporal service found, finding free port to start service");
|
||||
async fn discover_http_port(http_client: &Client) -> Result<u16, SchedulerError> {
|
||||
info!("Discovering Temporal service port...");
|
||||
|
||||
// Check PORT environment variable first
|
||||
if let Ok(port_str) = std::env::var("PORT") {
|
||||
if let Ok(port) = port_str.parse::<u16>() {
|
||||
if Self::is_port_free(port).await {
|
||||
info!("Using PORT environment variable: {}", port);
|
||||
if Self::is_temporal_service_running(http_client, port).await {
|
||||
info!(
|
||||
"Found running Temporal service on PORT environment variable: {}",
|
||||
port
|
||||
);
|
||||
return Ok(port);
|
||||
} else if Self::is_port_free(port).await {
|
||||
info!("Using PORT environment variable for new service: {}", port);
|
||||
return Ok(port);
|
||||
} else {
|
||||
warn!(
|
||||
"PORT environment variable {} is not free, finding alternative",
|
||||
"PORT environment variable {} is occupied by non-Temporal service",
|
||||
port
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try to find a free port from the default list
|
||||
// Try to find an existing Temporal service on default ports
|
||||
for &port in DEFAULT_HTTP_PORTS {
|
||||
if Self::is_temporal_service_running(http_client, port).await {
|
||||
info!("Found existing Temporal service on port {}", port);
|
||||
return Ok(port);
|
||||
}
|
||||
}
|
||||
|
||||
// If no existing service found, find a free port to start a new one
|
||||
info!("No existing Temporal service found, finding free port to start new service");
|
||||
|
||||
for &port in DEFAULT_HTTP_PORTS {
|
||||
if Self::is_port_free(port).await {
|
||||
info!("Found free port {} for Temporal service", port);
|
||||
info!("Found free port {} for new Temporal service", port);
|
||||
return Ok(port);
|
||||
}
|
||||
}
|
||||
|
||||
// If all default ports are taken, find any free port in a reasonable range
|
||||
for port in 8086..8200 {
|
||||
for port in 58086..58200 {
|
||||
if Self::is_port_free(port).await {
|
||||
info!("Found free port {} for Temporal service", port);
|
||||
info!("Found free port {} for new Temporal service", port);
|
||||
return Ok(port);
|
||||
}
|
||||
}
|
||||
@@ -160,112 +175,51 @@ impl TemporalScheduler {
|
||||
))
|
||||
}
|
||||
|
||||
async fn is_port_free(port: u16) -> bool {
|
||||
use std::net::{SocketAddr, TcpListener};
|
||||
use std::time::Duration;
|
||||
/// Check if a Temporal service is running and responding on the given port
|
||||
async fn is_temporal_service_running(http_client: &Client, port: u16) -> bool {
|
||||
let health_url = format!("http://127.0.0.1:{}/health", port);
|
||||
|
||||
let addr: SocketAddr = format!("127.0.0.1:{}", port).parse().unwrap();
|
||||
|
||||
// First, try to bind to the port
|
||||
let listener_result = TcpListener::bind(addr);
|
||||
match listener_result {
|
||||
Ok(listener) => {
|
||||
// Successfully bound, so port was free
|
||||
drop(listener); // Release the port immediately
|
||||
|
||||
// Double-check by trying to connect to see if anything is actually listening
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(Duration::from_millis(500))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let test_url = format!("http://127.0.0.1:{}", port);
|
||||
match client.get(&test_url).send().await {
|
||||
Ok(_) => {
|
||||
// Something responded, so port is actually in use
|
||||
warn!(
|
||||
"Port {} appeared free but something is listening on it",
|
||||
port
|
||||
);
|
||||
false
|
||||
}
|
||||
Err(_) => {
|
||||
// Nothing responded, port is truly free
|
||||
true
|
||||
}
|
||||
}
|
||||
match http_client
|
||||
.get(&health_url)
|
||||
.timeout(Duration::from_millis(1000))
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) if response.status().is_success() => {
|
||||
info!("Confirmed Temporal service is running on port {}", port);
|
||||
true
|
||||
}
|
||||
Ok(response) => {
|
||||
info!(
|
||||
"Port {} is responding but not a healthy Temporal service (status: {})",
|
||||
port,
|
||||
response.status()
|
||||
);
|
||||
false
|
||||
}
|
||||
Err(_) => {
|
||||
// Could not bind, port is definitely in use
|
||||
// Port might be free or occupied by something else
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn find_temporal_service_port_from_processes() -> Result<u16, SchedulerError> {
|
||||
// Use pgrep to find temporal-service processes
|
||||
let pgrep_output = Command::new("pgrep")
|
||||
.arg("-f")
|
||||
.arg("temporal-service")
|
||||
.output()
|
||||
.map_err(|e| SchedulerError::SchedulerInternalError(format!("pgrep failed: {}", e)))?;
|
||||
async fn is_port_free(port: u16) -> bool {
|
||||
use std::net::{SocketAddr, TcpListener};
|
||||
|
||||
if !pgrep_output.status.success() {
|
||||
return Err(SchedulerError::SchedulerInternalError(
|
||||
"No temporal-service processes found".to_string(),
|
||||
));
|
||||
}
|
||||
let addr: SocketAddr = format!("127.0.0.1:{}", port).parse().unwrap();
|
||||
|
||||
let pids_str = String::from_utf8_lossy(&pgrep_output.stdout);
|
||||
let pids: Vec<&str> = pids_str
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect();
|
||||
|
||||
for pid in pids {
|
||||
// Use lsof to find listening ports for this PID
|
||||
let lsof_output = Command::new("lsof")
|
||||
.arg("-p")
|
||||
.arg(pid)
|
||||
.arg("-i")
|
||||
.arg("tcp")
|
||||
.arg("-P") // Show port numbers instead of service names
|
||||
.arg("-n") // Show IP addresses instead of hostnames
|
||||
.output();
|
||||
|
||||
if let Ok(output) = lsof_output {
|
||||
let lsof_str = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
// Look for HTTP API port (typically 8080-8999 range)
|
||||
for line in lsof_str.lines() {
|
||||
if line.contains("LISTEN") && line.contains("temporal-") {
|
||||
// Parse lines like: "temporal-service 12345 user 6u IPv4 0x... 0t0 TCP *:8081 (LISTEN)"
|
||||
let parts: Vec<&str> = line.split_whitespace().collect();
|
||||
|
||||
// Find the TCP part which contains the port
|
||||
for part in &parts {
|
||||
if part.starts_with("TCP") && part.contains(':') {
|
||||
// Extract port from TCP *:8081 or TCP 127.0.0.1:8081
|
||||
if let Some(port_str) = part.split(':').next_back() {
|
||||
if let Ok(port) = port_str.parse::<u16>() {
|
||||
// HTTP API ports are typically in 8080-8999 range
|
||||
if (8080..9000).contains(&port) {
|
||||
info!("Found HTTP API port {} for PID {}", port, pid);
|
||||
return Ok(port);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Try to bind to the port
|
||||
match TcpListener::bind(addr) {
|
||||
Ok(_listener) => {
|
||||
// Successfully bound, so port is free
|
||||
true
|
||||
}
|
||||
Err(_) => {
|
||||
// Could not bind, port is in use
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
Err(SchedulerError::SchedulerInternalError(
|
||||
"Could not find HTTP API port from temporal-service processes".to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
async fn fetch_port_config(&self) -> Result<PortConfig, SchedulerError> {
|
||||
@@ -305,7 +259,7 @@ impl TemporalScheduler {
|
||||
self.port_config.temporal_port
|
||||
}
|
||||
|
||||
/// Get the HTTP API port
|
||||
/// Get the HTTP API port
|
||||
pub fn get_http_port(&self) -> u16 {
|
||||
self.port_config.http_port
|
||||
}
|
||||
@@ -366,7 +320,7 @@ impl TemporalScheduler {
|
||||
command.process_group(0);
|
||||
}
|
||||
|
||||
let child = command.spawn().map_err(|e| {
|
||||
let mut child = command.spawn().map_err(|e| {
|
||||
SchedulerError::SchedulerInternalError(format!(
|
||||
"Failed to start Go temporal service: {}",
|
||||
e
|
||||
@@ -379,9 +333,6 @@ impl TemporalScheduler {
|
||||
pid, self.port_config.http_port
|
||||
);
|
||||
|
||||
// Don't wait for the child process - let it run independently
|
||||
std::mem::forget(child);
|
||||
|
||||
// Give the process a moment to start up
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
|
||||
@@ -410,6 +361,12 @@ impl TemporalScheduler {
|
||||
}
|
||||
}
|
||||
|
||||
// Detach the child process by not waiting for it
|
||||
// This allows it to continue running independently
|
||||
std::thread::spawn(move || {
|
||||
let _ = child.wait();
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -535,6 +492,7 @@ impl TemporalScheduler {
|
||||
job_id: Some(job.id.clone()),
|
||||
cron: Some(job.cron.clone()),
|
||||
recipe_path: Some(job.source.clone()),
|
||||
execution_mode: job.execution_mode.clone(),
|
||||
};
|
||||
|
||||
let response = self.make_request(request).await?;
|
||||
@@ -554,6 +512,7 @@ impl TemporalScheduler {
|
||||
job_id: None,
|
||||
cron: None,
|
||||
recipe_path: None,
|
||||
execution_mode: None,
|
||||
};
|
||||
|
||||
let response = self.make_request(request).await?;
|
||||
@@ -572,6 +531,7 @@ impl TemporalScheduler {
|
||||
paused: tj.paused,
|
||||
current_session_id: None, // Not provided by Temporal service
|
||||
process_start_time: None, // Not provided by Temporal service
|
||||
execution_mode: tj.execution_mode,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
@@ -587,6 +547,7 @@ impl TemporalScheduler {
|
||||
job_id: Some(id.to_string()),
|
||||
cron: None,
|
||||
recipe_path: None,
|
||||
execution_mode: None,
|
||||
};
|
||||
|
||||
let response = self.make_request(request).await?;
|
||||
@@ -605,6 +566,7 @@ impl TemporalScheduler {
|
||||
job_id: Some(id.to_string()),
|
||||
cron: None,
|
||||
recipe_path: None,
|
||||
execution_mode: None,
|
||||
};
|
||||
|
||||
let response = self.make_request(request).await?;
|
||||
@@ -623,6 +585,7 @@ impl TemporalScheduler {
|
||||
job_id: Some(id.to_string()),
|
||||
cron: None,
|
||||
recipe_path: None,
|
||||
execution_mode: None,
|
||||
};
|
||||
|
||||
let response = self.make_request(request).await?;
|
||||
@@ -642,6 +605,7 @@ impl TemporalScheduler {
|
||||
job_id: Some(id.to_string()),
|
||||
cron: None,
|
||||
recipe_path: None,
|
||||
execution_mode: None,
|
||||
};
|
||||
|
||||
let response = self.make_request(request).await?;
|
||||
@@ -717,20 +681,172 @@ impl TemporalScheduler {
|
||||
|
||||
pub async fn update_schedule(
|
||||
&self,
|
||||
_sched_id: &str,
|
||||
_new_cron: String,
|
||||
sched_id: &str,
|
||||
new_cron: String,
|
||||
) -> Result<(), SchedulerError> {
|
||||
warn!("update_schedule() method not implemented for TemporalScheduler - delete and recreate job instead");
|
||||
Err(SchedulerError::SchedulerInternalError(
|
||||
"update_schedule not supported - delete and recreate job instead".to_string(),
|
||||
))
|
||||
tracing::info!(
|
||||
"TemporalScheduler: update_schedule() called for job '{}' with cron '{}'",
|
||||
sched_id,
|
||||
new_cron
|
||||
);
|
||||
|
||||
let request = JobRequest {
|
||||
action: "update".to_string(),
|
||||
job_id: Some(sched_id.to_string()),
|
||||
cron: Some(new_cron),
|
||||
recipe_path: None,
|
||||
execution_mode: None,
|
||||
};
|
||||
|
||||
let response = self.make_request(request).await?;
|
||||
|
||||
if response.success {
|
||||
info!("Successfully updated scheduled job: {}", sched_id);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(SchedulerError::SchedulerInternalError(response.message))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn kill_running_job(&self, _sched_id: &str) -> Result<(), SchedulerError> {
|
||||
warn!("kill_running_job() method not implemented for TemporalScheduler");
|
||||
Err(SchedulerError::SchedulerInternalError(
|
||||
"kill_running_job not supported by TemporalScheduler".to_string(),
|
||||
))
|
||||
pub async fn kill_running_job(&self, sched_id: &str) -> Result<(), SchedulerError> {
|
||||
tracing::info!(
|
||||
"TemporalScheduler: kill_running_job() called for job '{}'",
|
||||
sched_id
|
||||
);
|
||||
|
||||
let request = JobRequest {
|
||||
action: "kill_job".to_string(),
|
||||
job_id: Some(sched_id.to_string()),
|
||||
cron: None,
|
||||
recipe_path: None,
|
||||
execution_mode: None,
|
||||
};
|
||||
|
||||
let response = self.make_request(request).await?;
|
||||
|
||||
if response.success {
|
||||
info!("Successfully killed running job: {}", sched_id);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(SchedulerError::SchedulerInternalError(response.message))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn update_job_status_from_sessions(&self) -> Result<(), SchedulerError> {
|
||||
tracing::info!("TemporalScheduler: Checking job status based on session activity");
|
||||
|
||||
let jobs = self.list_scheduled_jobs().await?;
|
||||
|
||||
for job in jobs {
|
||||
if job.currently_running {
|
||||
// First, check with the Temporal service directly for the most accurate status
|
||||
let request = JobRequest {
|
||||
action: "status".to_string(),
|
||||
job_id: Some(job.id.clone()),
|
||||
cron: None,
|
||||
recipe_path: None,
|
||||
execution_mode: None,
|
||||
};
|
||||
|
||||
match self.make_request(request).await {
|
||||
Ok(response) => {
|
||||
if response.success {
|
||||
if let Some(jobs) = response.jobs {
|
||||
if let Some(temporal_job) = jobs.iter().find(|j| j.id == job.id) {
|
||||
// If Temporal service says it's not running, trust that
|
||||
if !temporal_job.currently_running {
|
||||
tracing::info!(
|
||||
"Temporal service reports job '{}' is not running",
|
||||
job.id
|
||||
);
|
||||
continue; // Job is already marked as not running by Temporal
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!(
|
||||
"Failed to get status from Temporal service for job '{}': {}",
|
||||
job.id,
|
||||
e
|
||||
);
|
||||
// Fall back to session-based checking if Temporal service is unavailable
|
||||
}
|
||||
}
|
||||
|
||||
// Secondary check: look for recent session activity (more lenient timing)
|
||||
let recent_sessions = self.sessions(&job.id, 3).await?;
|
||||
let mut has_active_session = false;
|
||||
|
||||
for (session_name, _) in recent_sessions {
|
||||
let session_path = crate::session::storage::get_path(
|
||||
crate::session::storage::Identifier::Name(session_name),
|
||||
);
|
||||
|
||||
// Check if session file was modified recently (within last 5 minutes instead of 2)
|
||||
if let Ok(metadata) = std::fs::metadata(&session_path) {
|
||||
if let Ok(modified) = metadata.modified() {
|
||||
let modified_dt: DateTime<Utc> = modified.into();
|
||||
let now = Utc::now();
|
||||
let time_diff = now.signed_duration_since(modified_dt);
|
||||
|
||||
// Increased tolerance to 5 minutes to reduce false positives
|
||||
if time_diff.num_minutes() < 5 {
|
||||
has_active_session = true;
|
||||
tracing::debug!(
|
||||
"Found active session for job '{}' modified {} minutes ago",
|
||||
job.id,
|
||||
time_diff.num_minutes()
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only mark as completed if both Temporal service check failed AND no recent session activity
|
||||
if !has_active_session {
|
||||
tracing::info!(
|
||||
"No active sessions found for job '{}' in the last 5 minutes, marking as completed",
|
||||
job.id
|
||||
);
|
||||
|
||||
let request = JobRequest {
|
||||
action: "mark_completed".to_string(),
|
||||
job_id: Some(job.id.clone()),
|
||||
cron: None,
|
||||
recipe_path: None,
|
||||
execution_mode: None,
|
||||
};
|
||||
|
||||
if let Err(e) = self.make_request(request).await {
|
||||
tracing::warn!("Failed to mark job '{}' as completed: {}", job.id, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Periodically check and update job statuses based on session activity
|
||||
pub async fn start_status_monitor(&self) -> Result<(), SchedulerError> {
|
||||
let scheduler_clone = self.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut interval = tokio::time::interval(Duration::from_secs(60)); // Check every 60 seconds instead of 30
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
if let Err(e) = scheduler_clone.update_job_status_from_sessions().await {
|
||||
tracing::warn!("Failed to update job statuses: {}", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_running_job_info(
|
||||
@@ -742,24 +858,62 @@ impl TemporalScheduler {
|
||||
sched_id
|
||||
);
|
||||
|
||||
// First check if the job is marked as currently running
|
||||
let jobs = self.list_scheduled_jobs().await?;
|
||||
let job = jobs.iter().find(|j| j.id == sched_id);
|
||||
// Get the current job status from Temporal service
|
||||
let request = JobRequest {
|
||||
action: "status".to_string(),
|
||||
job_id: Some(sched_id.to_string()),
|
||||
cron: None,
|
||||
recipe_path: None,
|
||||
execution_mode: None,
|
||||
};
|
||||
|
||||
if let Some(job) = job {
|
||||
if job.currently_running {
|
||||
// For now, we'll return a placeholder session ID and current time
|
||||
// In a more complete implementation, we would track the actual session ID
|
||||
// and start time from the Temporal workflow execution
|
||||
let session_id =
|
||||
format!("temporal-{}-{}", sched_id, chrono::Utc::now().timestamp());
|
||||
let start_time = chrono::Utc::now(); // This should be the actual start time
|
||||
Ok(Some((session_id, start_time)))
|
||||
let response = self.make_request(request).await?;
|
||||
|
||||
if response.success {
|
||||
if let Some(jobs) = response.jobs {
|
||||
if let Some(job) = jobs.iter().find(|j| j.id == sched_id) {
|
||||
if job.currently_running {
|
||||
// Try to get the actual session ID from recent sessions
|
||||
let recent_sessions = self.sessions(sched_id, 1).await?;
|
||||
|
||||
if let Some((session_name, _session_metadata)) = recent_sessions.first() {
|
||||
// Check if this session is still active by looking at the session file
|
||||
let session_path = crate::session::storage::get_path(
|
||||
crate::session::storage::Identifier::Name(session_name.clone()),
|
||||
);
|
||||
|
||||
// If the session file was modified recently (within last 5 minutes),
|
||||
// consider it as the current running session
|
||||
if let Ok(metadata) = std::fs::metadata(&session_path) {
|
||||
if let Ok(modified) = metadata.modified() {
|
||||
let modified_dt: DateTime<Utc> = modified.into();
|
||||
let now = Utc::now();
|
||||
let time_diff = now.signed_duration_since(modified_dt);
|
||||
|
||||
if time_diff.num_minutes() < 5 {
|
||||
// This looks like an active session
|
||||
return Ok(Some((session_name.clone(), modified_dt)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: return a temporal session ID with current time
|
||||
let session_id =
|
||||
format!("temporal-{}-{}", sched_id, Utc::now().timestamp());
|
||||
let start_time = Utc::now();
|
||||
Ok(Some((session_id, start_time)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
} else {
|
||||
Err(SchedulerError::JobNotFound(sched_id.to_string()))
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
Err(SchedulerError::JobNotFound(sched_id.to_string()))
|
||||
}
|
||||
} else {
|
||||
Err(SchedulerError::JobNotFound(sched_id.to_string()))
|
||||
Err(SchedulerError::SchedulerInternalError(response.message))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1021,17 +1175,43 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sessions_method_signature() {
|
||||
// This test verifies the method signature is correct at compile time
|
||||
// We just need to verify the method exists and can be called
|
||||
fn test_job_status_detection_improvements() {
|
||||
// Test that the new job status detection methods compile and work correctly
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
// This will fail to compile if the method doesn't exist or has wrong signature
|
||||
let _test_fn = |scheduler: &TemporalScheduler, id: &str, limit: usize| {
|
||||
// This is a compile-time check - we don't actually call it
|
||||
let _future = scheduler.sessions(id, limit);
|
||||
};
|
||||
let rt = Runtime::new().unwrap();
|
||||
rt.block_on(async {
|
||||
// This test verifies the improved job status detection compiles
|
||||
match TemporalScheduler::new().await {
|
||||
Ok(scheduler) => {
|
||||
// Test the new status update method
|
||||
match scheduler.update_job_status_from_sessions().await {
|
||||
Ok(()) => {
|
||||
println!("✅ update_job_status_from_sessions() works correctly");
|
||||
}
|
||||
Err(e) => {
|
||||
println!("⚠️ update_job_status_from_sessions() returned error (expected if no jobs): {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
println!("✅ sessions() method signature is correct");
|
||||
// Test the improved get_running_job_info method
|
||||
match scheduler.get_running_job_info("test-job").await {
|
||||
Ok(None) => {
|
||||
println!("✅ get_running_job_info() correctly returns None for non-existent job");
|
||||
}
|
||||
Ok(Some((session_id, start_time))) => {
|
||||
println!("✅ get_running_job_info() returned session info: {} at {}", session_id, start_time);
|
||||
}
|
||||
Err(e) => {
|
||||
println!("⚠️ get_running_job_info() returned error (expected): {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!("⚠️ Temporal services not running - method signature test passed: {}", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -361,6 +361,7 @@ impl ScheduleToolTestBuilder {
|
||||
paused: false,
|
||||
current_session_id: None,
|
||||
process_start_time: None,
|
||||
execution_mode: Some("background".to_string()),
|
||||
};
|
||||
{
|
||||
let mut jobs = self.scheduler.jobs.lock().await;
|
||||
|
||||
@@ -14,22 +14,40 @@ if [ ! -f "go.sum" ]; then
|
||||
go mod tidy
|
||||
fi
|
||||
|
||||
# Build the service
|
||||
echo "Compiling Go binary..."
|
||||
go build -o temporal-service main.go
|
||||
# Determine binary name based on target OS
|
||||
BINARY_NAME="temporal-service"
|
||||
if [ "${GOOS:-}" = "windows" ]; then
|
||||
BINARY_NAME="temporal-service.exe"
|
||||
fi
|
||||
|
||||
# Make it executable
|
||||
chmod +x temporal-service
|
||||
# Build the service with cross-compilation support
|
||||
echo "Compiling Go binary..."
|
||||
if [ -n "${GOOS:-}" ] && [ -n "${GOARCH:-}" ]; then
|
||||
echo "Cross-compiling for ${GOOS}/${GOARCH}..."
|
||||
GOOS="${GOOS}" GOARCH="${GOARCH}" go build -o "${BINARY_NAME}" .
|
||||
else
|
||||
echo "Building for current platform..."
|
||||
go build -o "${BINARY_NAME}" .
|
||||
fi
|
||||
|
||||
# Make it executable (skip on Windows as it's not needed)
|
||||
if [ "${GOOS:-}" != "windows" ]; then
|
||||
chmod +x "${BINARY_NAME}"
|
||||
fi
|
||||
|
||||
echo "Build completed successfully!"
|
||||
echo "Binary location: $(pwd)/temporal-service"
|
||||
echo ""
|
||||
echo "Prerequisites:"
|
||||
echo " 1. Install Temporal CLI: brew install temporal"
|
||||
echo " 2. Start Temporal server: temporal server start-dev"
|
||||
echo ""
|
||||
echo "To run the service:"
|
||||
echo " ./temporal-service"
|
||||
echo ""
|
||||
echo "Environment variables:"
|
||||
echo " PORT - HTTP port (default: 8080)"
|
||||
echo "Binary location: $(pwd)/${BINARY_NAME}"
|
||||
|
||||
# Only show usage info if not cross-compiling
|
||||
if [ -z "${GOOS:-}" ] || [ "${GOOS}" = "$(go env GOOS)" ]; then
|
||||
echo ""
|
||||
echo "Prerequisites:"
|
||||
echo " 1. Install Temporal CLI: brew install temporal"
|
||||
echo " 2. Start Temporal server: temporal server start-dev"
|
||||
echo ""
|
||||
echo "To run the service:"
|
||||
echo " ./${BINARY_NAME}"
|
||||
echo ""
|
||||
echo "Environment variables:"
|
||||
echo " PORT - HTTP port (default: 8080)"
|
||||
fi
|
||||
578
temporal-service/goose_workflow.go
Normal file
578
temporal-service/goose_workflow.go
Normal file
@@ -0,0 +1,578 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"go.temporal.io/sdk/activity"
|
||||
"go.temporal.io/sdk/workflow"
|
||||
"go.temporal.io/sdk/temporal"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// Recipe represents the structure we need from recipe files
|
||||
type Recipe struct {
|
||||
Title string `json:"title" yaml:"title"`
|
||||
Description string `json:"description" yaml:"description"`
|
||||
Instructions *string `json:"instructions" yaml:"instructions"`
|
||||
Prompt *string `json:"prompt" yaml:"prompt"`
|
||||
}
|
||||
|
||||
// Workflow definition for executing Goose recipes
|
||||
func GooseJobWorkflow(ctx workflow.Context, jobID, recipePath string) (string, error) {
|
||||
logger := workflow.GetLogger(ctx)
|
||||
logger.Info("Starting Goose job workflow", "jobID", jobID, "recipePath", recipePath)
|
||||
|
||||
ao := workflow.ActivityOptions{
|
||||
StartToCloseTimeout: 2 * time.Hour, // Allow up to 2 hours for job execution
|
||||
RetryPolicy: &temporal.RetryPolicy{
|
||||
InitialInterval: time.Second,
|
||||
BackoffCoefficient: 2.0,
|
||||
MaximumInterval: time.Minute,
|
||||
MaximumAttempts: 3,
|
||||
NonRetryableErrorTypes: []string{"InvalidRecipeError"},
|
||||
},
|
||||
}
|
||||
ctx = workflow.WithActivityOptions(ctx, ao)
|
||||
|
||||
var sessionID string
|
||||
err := workflow.ExecuteActivity(ctx, ExecuteGooseRecipe, jobID, recipePath).Get(ctx, &sessionID)
|
||||
if err != nil {
|
||||
logger.Error("Goose job workflow failed", "jobID", jobID, "error", err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
logger.Info("Goose job workflow completed", "jobID", jobID, "sessionID", sessionID)
|
||||
return sessionID, nil
|
||||
}
|
||||
|
||||
// Activity definition for executing Goose recipes with proper cancellation handling
|
||||
func ExecuteGooseRecipe(ctx context.Context, jobID, recipePath string) (string, error) {
|
||||
logger := activity.GetLogger(ctx)
|
||||
logger.Info("Executing Goose recipe", "jobID", jobID, "recipePath", recipePath)
|
||||
|
||||
// Mark job as running at the start
|
||||
if globalService != nil {
|
||||
globalService.markJobAsRunning(jobID)
|
||||
// Ensure we mark it as not running when we're done
|
||||
defer globalService.markJobAsNotRunning(jobID)
|
||||
}
|
||||
|
||||
// Resolve the actual recipe path (might be embedded in metadata)
|
||||
actualRecipePath, err := resolveRecipePath(jobID, recipePath)
|
||||
if err != nil {
|
||||
return "", temporal.NewNonRetryableApplicationError(
|
||||
fmt.Sprintf("failed to resolve recipe: %v", err),
|
||||
"InvalidRecipeError",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
// Check if recipe file exists
|
||||
if _, err := os.Stat(actualRecipePath); os.IsNotExist(err) {
|
||||
return "", temporal.NewNonRetryableApplicationError(
|
||||
fmt.Sprintf("recipe file not found: %s", actualRecipePath),
|
||||
"InvalidRecipeError",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
// Create a cancellable context for the subprocess
|
||||
subCtx, cancel := context.WithCancel(ctx)
|
||||
defer cancel()
|
||||
|
||||
// Monitor for activity cancellation
|
||||
go func() {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
logger.Info("Activity cancelled, killing process for job", "jobID", jobID)
|
||||
globalProcessManager.KillProcess(jobID)
|
||||
case <-subCtx.Done():
|
||||
// Normal completion
|
||||
}
|
||||
}()
|
||||
|
||||
// Check if this is a foreground job
|
||||
if isForegroundJob(actualRecipePath) {
|
||||
logger.Info("Executing foreground job with cancellation support", "jobID", jobID)
|
||||
return executeForegroundJobWithCancellation(subCtx, jobID, actualRecipePath)
|
||||
}
|
||||
|
||||
// For background jobs, execute with cancellation support
|
||||
logger.Info("Executing background job with cancellation support", "jobID", jobID)
|
||||
return executeBackgroundJobWithCancellation(subCtx, jobID, actualRecipePath)
|
||||
}
|
||||
|
||||
// resolveRecipePath resolves the actual recipe path, handling embedded recipes
|
||||
func resolveRecipePath(jobID, recipePath string) (string, error) {
|
||||
// If the recipe path exists as-is, use it
|
||||
if _, err := os.Stat(recipePath); err == nil {
|
||||
return recipePath, nil
|
||||
}
|
||||
|
||||
// Try to get embedded recipe content from schedule metadata
|
||||
if globalService != nil {
|
||||
if recipeContent, err := globalService.getEmbeddedRecipeContent(jobID); err == nil && recipeContent != "" {
|
||||
// Create a temporary file with the embedded content
|
||||
tempPath := filepath.Join(globalService.recipesDir, fmt.Sprintf("%s-temp.yaml", jobID))
|
||||
if err := os.WriteFile(tempPath, []byte(recipeContent), 0644); err != nil {
|
||||
return "", fmt.Errorf("failed to write temporary recipe file: %w", err)
|
||||
}
|
||||
log.Printf("Created temporary recipe file for job %s: %s", jobID, tempPath)
|
||||
return tempPath, nil
|
||||
}
|
||||
}
|
||||
|
||||
// If no embedded content and original path doesn't exist, return error
|
||||
return "", fmt.Errorf("recipe not found: %s (and no embedded content available)", recipePath)
|
||||
}
|
||||
|
||||
// executeBackgroundJobWithCancellation handles background job execution with proper process management
|
||||
func executeBackgroundJobWithCancellation(ctx context.Context, jobID, recipePath string) (string, error) {
|
||||
log.Printf("Executing background job %s using recipe file: %s", jobID, recipePath)
|
||||
|
||||
// Find the goose CLI binary
|
||||
goosePath, err := findGooseBinary()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to find goose CLI binary: %w", err)
|
||||
}
|
||||
|
||||
// Generate session name for this scheduled job
|
||||
sessionName := fmt.Sprintf("scheduled-%s", jobID)
|
||||
|
||||
// Create command with context for cancellation
|
||||
cmd := exec.CommandContext(ctx, goosePath, "run",
|
||||
"--recipe", recipePath,
|
||||
"--name", sessionName,
|
||||
"--scheduled-job-id", jobID,
|
||||
)
|
||||
|
||||
// Set up process group for proper cleanup
|
||||
cmd.SysProcAttr = &syscall.SysProcAttr{
|
||||
Setpgid: true, // Create new process group
|
||||
}
|
||||
|
||||
// Set up environment
|
||||
cmd.Env = append(os.Environ(),
|
||||
fmt.Sprintf("GOOSE_JOB_ID=%s", jobID),
|
||||
)
|
||||
|
||||
log.Printf("Starting background CLI job %s with session %s", jobID, sessionName)
|
||||
|
||||
// Start the process
|
||||
if err := cmd.Start(); err != nil {
|
||||
return "", fmt.Errorf("failed to start background CLI execution: %w", err)
|
||||
}
|
||||
|
||||
// Register the process with the process manager
|
||||
_, cancel := context.WithCancel(ctx)
|
||||
globalProcessManager.AddProcess(jobID, cmd.Process, cancel)
|
||||
|
||||
// Ensure cleanup
|
||||
defer func() {
|
||||
globalProcessManager.RemoveProcess(jobID)
|
||||
cancel()
|
||||
}()
|
||||
|
||||
// Wait for completion or cancellation
|
||||
done := make(chan error, 1)
|
||||
go func() {
|
||||
done <- cmd.Wait()
|
||||
}()
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
// Context cancelled - kill the process
|
||||
log.Printf("Background job %s cancelled, killing process", jobID)
|
||||
globalProcessManager.KillProcess(jobID)
|
||||
return "", ctx.Err()
|
||||
case err := <-done:
|
||||
if err != nil {
|
||||
log.Printf("Background CLI job %s failed: %v", jobID, err)
|
||||
return "", fmt.Errorf("background CLI execution failed: %w", err)
|
||||
}
|
||||
log.Printf("Background CLI job %s completed successfully with session %s", jobID, sessionName)
|
||||
return sessionName, nil
|
||||
}
|
||||
}
|
||||
|
||||
// executeForegroundJobWithCancellation handles foreground job execution with proper process management
|
||||
func executeForegroundJobWithCancellation(ctx context.Context, jobID, recipePath string) (string, error) {
|
||||
log.Printf("Executing foreground job %s with recipe %s", jobID, recipePath)
|
||||
|
||||
// Parse the recipe file first
|
||||
recipe, err := parseRecipeFile(recipePath)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to parse recipe file: %w", err)
|
||||
}
|
||||
|
||||
// Check if desktop app is running
|
||||
if isDesktopAppRunning() {
|
||||
log.Printf("Desktop app is running, using GUI mode for job %s", jobID)
|
||||
return executeForegroundJobGUIWithCancellation(ctx, jobID, recipe)
|
||||
}
|
||||
|
||||
// Desktop app not running, fall back to CLI
|
||||
log.Printf("Desktop app not running, falling back to CLI mode for job %s", jobID)
|
||||
return executeForegroundJobCLIWithCancellation(ctx, jobID, recipe, recipePath)
|
||||
}
|
||||
|
||||
// executeForegroundJobGUIWithCancellation handles GUI execution with cancellation
|
||||
func executeForegroundJobGUIWithCancellation(ctx context.Context, jobID string, recipe *Recipe) (string, error) {
|
||||
// Generate session name for this scheduled job
|
||||
sessionName := fmt.Sprintf("scheduled-%s", jobID)
|
||||
|
||||
// Generate deep link with session name
|
||||
deepLink, err := generateDeepLink(recipe, jobID, sessionName)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to generate deep link: %w", err)
|
||||
}
|
||||
|
||||
// Open the deep link
|
||||
if err := openDeepLink(deepLink); err != nil {
|
||||
return "", fmt.Errorf("failed to open deep link: %w", err)
|
||||
}
|
||||
|
||||
log.Printf("Foreground GUI job %s initiated with session %s, waiting for completion...", jobID, sessionName)
|
||||
|
||||
// Wait for session completion with cancellation support
|
||||
err = waitForSessionCompletionWithCancellation(ctx, sessionName, 2*time.Hour)
|
||||
if err != nil {
|
||||
if ctx.Err() != nil {
|
||||
log.Printf("GUI session %s cancelled", sessionName)
|
||||
return "", ctx.Err()
|
||||
}
|
||||
return "", fmt.Errorf("GUI session failed or timed out: %w", err)
|
||||
}
|
||||
|
||||
log.Printf("Foreground GUI job %s completed successfully with session %s", jobID, sessionName)
|
||||
return sessionName, nil
|
||||
}
|
||||
|
||||
// executeForegroundJobCLIWithCancellation handles CLI execution with cancellation
|
||||
func executeForegroundJobCLIWithCancellation(ctx context.Context, jobID string, recipe *Recipe, recipePath string) (string, error) {
|
||||
log.Printf("Executing job %s via CLI fallback using recipe file: %s", jobID, recipePath)
|
||||
// Find the goose CLI binary
|
||||
goosePath, err := findGooseBinary()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to find goose CLI binary: %w", err)
|
||||
}
|
||||
|
||||
// Generate session name for this scheduled job
|
||||
sessionName := fmt.Sprintf("scheduled-%s", jobID)
|
||||
// Create command with context for cancellation
|
||||
cmd := exec.CommandContext(ctx, goosePath, "run",
|
||||
"--recipe", recipePath,
|
||||
"--name", sessionName,
|
||||
"--scheduled-job-id", jobID,
|
||||
)
|
||||
|
||||
// Set up process group for proper cleanup
|
||||
cmd.SysProcAttr = &syscall.SysProcAttr{
|
||||
Setpgid: true, // Create new process group
|
||||
}
|
||||
|
||||
// Set up environment
|
||||
cmd.Env = append(os.Environ(),
|
||||
fmt.Sprintf("GOOSE_JOB_ID=%s", jobID),
|
||||
)
|
||||
|
||||
log.Printf("Starting foreground CLI job %s with session %s", jobID, sessionName)
|
||||
|
||||
// Start the process
|
||||
if err := cmd.Start(); err != nil {
|
||||
return "", fmt.Errorf("failed to start foreground CLI execution: %w", err)
|
||||
}
|
||||
|
||||
// Register the process with the process manager
|
||||
_, cancel := context.WithCancel(ctx)
|
||||
globalProcessManager.AddProcess(jobID, cmd.Process, cancel)
|
||||
|
||||
// Ensure cleanup
|
||||
defer func() {
|
||||
globalProcessManager.RemoveProcess(jobID)
|
||||
cancel()
|
||||
}()
|
||||
|
||||
// Wait for completion or cancellation
|
||||
done := make(chan error, 1)
|
||||
go func() {
|
||||
done <- cmd.Wait()
|
||||
}()
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
// Context cancelled - kill the process
|
||||
log.Printf("Foreground CLI job %s cancelled, killing process", jobID)
|
||||
globalProcessManager.KillProcess(jobID)
|
||||
return "", ctx.Err()
|
||||
case err := <-done:
|
||||
if err != nil {
|
||||
log.Printf("Foreground CLI job %s failed: %v", jobID, err)
|
||||
return "", fmt.Errorf("foreground CLI execution failed: %w", err)
|
||||
}
|
||||
log.Printf("Foreground CLI job %s completed successfully with session %s", jobID, sessionName)
|
||||
return sessionName, nil
|
||||
}
|
||||
}
|
||||
|
||||
// findGooseBinary locates the goose CLI binary
|
||||
func findGooseBinary() (string, error) {
|
||||
// Try different possible locations
|
||||
possiblePaths := []string{
|
||||
"goose", // In PATH
|
||||
"./goose", // Current directory
|
||||
"../goose", // Parent directory
|
||||
}
|
||||
|
||||
// Also try relative to the current executable
|
||||
if exePath, err := os.Executable(); err == nil {
|
||||
exeDir := filepath.Dir(exePath)
|
||||
possiblePaths = append(possiblePaths,
|
||||
filepath.Join(exeDir, "goose"),
|
||||
filepath.Join(exeDir, "..", "goose"),
|
||||
)
|
||||
}
|
||||
|
||||
for _, path := range possiblePaths {
|
||||
if _, err := exec.LookPath(path); err == nil {
|
||||
return path, nil
|
||||
}
|
||||
// Also check if file exists directly
|
||||
if _, err := os.Stat(path); err == nil {
|
||||
return path, nil
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("goose CLI binary not found in any of: %v", possiblePaths)
|
||||
}
|
||||
|
||||
// isDesktopAppRunning checks if the Goose desktop app is currently running
|
||||
func isDesktopAppRunning() bool {
|
||||
log.Println("Checking if desktop app is running...")
|
||||
|
||||
var cmd *exec.Cmd
|
||||
switch runtime.GOOS {
|
||||
case "darwin":
|
||||
cmd = exec.Command("pgrep", "-f", "Goose.app")
|
||||
case "windows":
|
||||
cmd = exec.Command("tasklist", "/FI", "IMAGENAME eq Goose.exe")
|
||||
case "linux":
|
||||
cmd = exec.Command("pgrep", "-f", "goose")
|
||||
default:
|
||||
log.Printf("Unsupported OS: %s", runtime.GOOS)
|
||||
return false
|
||||
}
|
||||
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
log.Printf("Failed to check if desktop app is running: %v", err)
|
||||
return false
|
||||
}
|
||||
|
||||
var isRunning bool
|
||||
switch runtime.GOOS {
|
||||
case "darwin", "linux":
|
||||
isRunning = len(output) > 0
|
||||
case "windows":
|
||||
isRunning = strings.Contains(string(output), "Goose.exe")
|
||||
}
|
||||
|
||||
log.Printf("Desktop app running: %v", isRunning)
|
||||
return isRunning
|
||||
}
|
||||
|
||||
// parseRecipeFile parses a recipe file (YAML or JSON)
|
||||
func parseRecipeFile(recipePath string) (*Recipe, error) {
|
||||
content, err := os.ReadFile(recipePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var recipe Recipe
|
||||
|
||||
// Try YAML first, then JSON
|
||||
if err := yaml.Unmarshal(content, &recipe); err != nil {
|
||||
if err := json.Unmarshal(content, &recipe); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse as YAML or JSON: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return &recipe, nil
|
||||
}
|
||||
|
||||
// generateDeepLink creates a deep link for the recipe with session name
|
||||
func generateDeepLink(recipe *Recipe, jobID, sessionName string) (string, error) {
|
||||
// Create the recipe config for the deep link
|
||||
recipeConfig := map[string]interface{}{
|
||||
"id": jobID,
|
||||
"title": recipe.Title,
|
||||
"description": recipe.Description,
|
||||
"instructions": recipe.Instructions,
|
||||
"activities": []string{}, // Empty activities array
|
||||
"prompt": recipe.Prompt,
|
||||
"sessionName": sessionName, // Include session name for proper tracking
|
||||
}
|
||||
|
||||
// Encode the config as JSON then base64
|
||||
configJSON, err := json.Marshal(recipeConfig)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
configBase64 := base64.StdEncoding.EncodeToString(configJSON)
|
||||
|
||||
// Create the deep link URL with scheduled job ID parameter
|
||||
deepLink := fmt.Sprintf("goose://recipe?config=%s&scheduledJob=%s", configBase64, jobID)
|
||||
|
||||
log.Printf("Generated deep link for job %s with session %s (length: %d)", jobID, sessionName, len(deepLink))
|
||||
return deepLink, nil
|
||||
}
|
||||
|
||||
// openDeepLink opens a deep link using the system's default protocol handler
|
||||
func openDeepLink(deepLink string) error {
|
||||
log.Printf("Opening deep link: %s", deepLink)
|
||||
|
||||
var cmd *exec.Cmd
|
||||
switch runtime.GOOS {
|
||||
case "darwin":
|
||||
cmd = exec.Command("open", deepLink)
|
||||
case "windows":
|
||||
cmd = exec.Command("cmd", "/c", "start", "", deepLink)
|
||||
case "linux":
|
||||
cmd = exec.Command("xdg-open", deepLink)
|
||||
default:
|
||||
return fmt.Errorf("unsupported OS: %s", runtime.GOOS)
|
||||
}
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
return fmt.Errorf("failed to open deep link: %w", err)
|
||||
}
|
||||
|
||||
log.Println("Deep link opened successfully")
|
||||
return nil
|
||||
}
|
||||
|
||||
// waitForSessionCompletionWithCancellation polls for session completion with cancellation support
|
||||
func waitForSessionCompletionWithCancellation(ctx context.Context, sessionName string, timeout time.Duration) error {
|
||||
log.Printf("Waiting for session %s to complete (timeout: %v)", sessionName, timeout)
|
||||
|
||||
start := time.Now()
|
||||
ticker := time.NewTicker(10 * time.Second) // Check every 10 seconds
|
||||
defer ticker.Stop()
|
||||
|
||||
timeoutCtx, cancel := context.WithTimeout(ctx, timeout)
|
||||
defer cancel()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-timeoutCtx.Done():
|
||||
if timeoutCtx.Err() == context.DeadlineExceeded {
|
||||
return fmt.Errorf("session %s timed out after %v", sessionName, timeout)
|
||||
}
|
||||
return timeoutCtx.Err() // Cancelled
|
||||
case <-ticker.C:
|
||||
elapsed := time.Since(start)
|
||||
log.Printf("Checking session %s status (elapsed: %v)", sessionName, elapsed)
|
||||
|
||||
// Check if session exists and is complete
|
||||
complete, err := isSessionComplete(sessionName)
|
||||
if err != nil {
|
||||
log.Printf("Error checking session %s status: %v", sessionName, err)
|
||||
// Continue polling - session might not be created yet
|
||||
continue
|
||||
}
|
||||
|
||||
if complete {
|
||||
log.Printf("Session %s completed after %v", sessionName, elapsed)
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Printf("Session %s still running (elapsed: %v)", sessionName, elapsed)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// isSessionComplete checks if a session is complete by querying the Goose sessions API
|
||||
func isSessionComplete(sessionName string) (bool, error) {
|
||||
// Try to find the goose CLI binary to query session status
|
||||
goosePath, err := findGooseBinary()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to find goose CLI binary: %w", err)
|
||||
}
|
||||
|
||||
// Use goose CLI to list sessions and check if our session exists and is complete
|
||||
cmd := exec.Command(goosePath, "sessions", "list", "--format", "json")
|
||||
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to list sessions: %w", err)
|
||||
}
|
||||
|
||||
// Parse the JSON output to find our session
|
||||
var sessions []map[string]interface{}
|
||||
if err := json.Unmarshal(output, &sessions); err != nil {
|
||||
return false, fmt.Errorf("failed to parse sessions JSON: %w", err)
|
||||
}
|
||||
|
||||
// Look for our session by name
|
||||
for _, session := range sessions {
|
||||
if name, ok := session["name"].(string); ok && name == sessionName {
|
||||
// Session exists, check if it's complete
|
||||
// A session is considered complete if it's not currently active
|
||||
// We can check this by looking for an "active" field or similar
|
||||
if active, ok := session["active"].(bool); ok {
|
||||
return !active, nil // Complete if not active
|
||||
}
|
||||
|
||||
// If no active field, check for completion indicators
|
||||
// This might vary based on the actual Goose CLI output format
|
||||
if status, ok := session["status"].(string); ok {
|
||||
return status == "completed" || status == "finished" || status == "done", nil
|
||||
}
|
||||
|
||||
// If we found the session but can't determine status, assume it's still running
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
// Session not found - it might not be created yet, so not complete
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// isForegroundJob checks if a recipe is configured for foreground execution
|
||||
func isForegroundJob(recipePath string) bool {
|
||||
// Simple struct to just check the schedule.foreground field
|
||||
type ScheduleConfig struct {
|
||||
Foreground bool `json:"foreground" yaml:"foreground"`
|
||||
}
|
||||
type MinimalRecipe struct {
|
||||
Schedule *ScheduleConfig `json:"schedule" yaml:"schedule"`
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(recipePath)
|
||||
if err != nil {
|
||||
return false // Default to background if we can't read
|
||||
}
|
||||
|
||||
var recipe MinimalRecipe
|
||||
|
||||
// Try YAML first, then JSON
|
||||
if err := yaml.Unmarshal(content, &recipe); err != nil {
|
||||
if err := json.Unmarshal(content, &recipe); err != nil {
|
||||
return false // Default to background if we can't parse
|
||||
}
|
||||
}
|
||||
|
||||
return recipe.Schedule != nil && recipe.Schedule.Foreground
|
||||
}
|
||||
@@ -2,7 +2,6 @@ package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"net"
|
||||
@@ -11,17 +10,14 @@ import (
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"go.temporal.io/api/workflowservice/v1"
|
||||
"go.temporal.io/sdk/activity"
|
||||
"go.temporal.io/sdk/client"
|
||||
"go.temporal.io/sdk/temporal"
|
||||
"go.temporal.io/sdk/worker"
|
||||
"go.temporal.io/sdk/workflow"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -36,6 +32,48 @@ type PortConfig struct {
|
||||
HTTPPort int // HTTP API port
|
||||
}
|
||||
|
||||
// getManagedRecipesDir returns the proper directory for storing managed recipes
|
||||
func getManagedRecipesDir() (string, error) {
|
||||
var baseDir string
|
||||
|
||||
switch runtime.GOOS {
|
||||
case "darwin":
|
||||
// macOS: ~/Library/Application Support/temporal/managed-recipes
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get user home directory: %w", err)
|
||||
}
|
||||
baseDir = filepath.Join(homeDir, "Library", "Application Support", "temporal", "managed-recipes")
|
||||
case "linux":
|
||||
// Linux: ~/.local/share/temporal/managed-recipes
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get user home directory: %w", err)
|
||||
}
|
||||
baseDir = filepath.Join(homeDir, ".local", "share", "temporal", "managed-recipes")
|
||||
case "windows":
|
||||
// Windows: %APPDATA%\temporal\managed-recipes
|
||||
appDataDir := os.Getenv("APPDATA")
|
||||
if appDataDir == "" {
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get user home directory: %w", err)
|
||||
}
|
||||
appDataDir = filepath.Join(homeDir, "AppData", "Roaming")
|
||||
}
|
||||
baseDir = filepath.Join(appDataDir, "temporal", "managed-recipes")
|
||||
default:
|
||||
// Fallback for unknown OS
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get user home directory: %w", err)
|
||||
}
|
||||
baseDir = filepath.Join(homeDir, ".local", "share", "temporal", "managed-recipes")
|
||||
}
|
||||
|
||||
return baseDir, nil
|
||||
}
|
||||
|
||||
// findAvailablePort finds an available port starting from the given port
|
||||
func findAvailablePort(startPort int) (int, error) {
|
||||
for port := startPort; port < startPort+100; port++ {
|
||||
@@ -55,12 +93,12 @@ func findAvailablePorts() (*PortConfig, error) {
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find available port for Temporal server: %w", err)
|
||||
}
|
||||
|
||||
|
||||
uiPort, err := findAvailablePort(8233)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find available port for Temporal UI: %w", err)
|
||||
}
|
||||
|
||||
|
||||
// For HTTP port, check environment variable first
|
||||
httpPort := 8080
|
||||
if portEnv := os.Getenv("PORT"); portEnv != "" {
|
||||
@@ -68,13 +106,13 @@ func findAvailablePorts() (*PortConfig, error) {
|
||||
httpPort = parsed
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Verify HTTP port is available, find alternative if not
|
||||
finalHTTPPort, err := findAvailablePort(httpPort)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find available port for HTTP server: %w", err)
|
||||
}
|
||||
|
||||
|
||||
return &PortConfig{
|
||||
TemporalPort: temporalPort,
|
||||
UIPort: uiPort,
|
||||
@@ -82,92 +120,6 @@ func findAvailablePorts() (*PortConfig, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Global service instance for activities to access
|
||||
var globalService *TemporalService
|
||||
|
||||
// Request/Response types for HTTP API
|
||||
type JobRequest struct {
|
||||
Action string `json:"action"` // create, delete, pause, unpause, list, run_now
|
||||
JobID string `json:"job_id"`
|
||||
CronExpr string `json:"cron"`
|
||||
RecipePath string `json:"recipe_path"`
|
||||
}
|
||||
|
||||
type JobResponse struct {
|
||||
Success bool `json:"success"`
|
||||
Message string `json:"message"`
|
||||
Jobs []JobStatus `json:"jobs,omitempty"`
|
||||
Data interface{} `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
type JobStatus struct {
|
||||
ID string `json:"id"`
|
||||
CronExpr string `json:"cron"`
|
||||
RecipePath string `json:"recipe_path"`
|
||||
LastRun *string `json:"last_run,omitempty"`
|
||||
NextRun *string `json:"next_run,omitempty"`
|
||||
CurrentlyRunning bool `json:"currently_running"`
|
||||
Paused bool `json:"paused"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
}
|
||||
|
||||
type RunNowResponse struct {
|
||||
SessionID string `json:"session_id"`
|
||||
}
|
||||
|
||||
// ensureTemporalServerRunning checks if Temporal server is running and starts it if needed
|
||||
func ensureTemporalServerRunning(ports *PortConfig) error {
|
||||
log.Println("Checking if Temporal server is running...")
|
||||
|
||||
// Check if Temporal server is already running by trying to connect
|
||||
if isTemporalServerRunning(ports.TemporalPort) {
|
||||
log.Printf("Temporal server is already running on port %d", ports.TemporalPort)
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Printf("Temporal server not running, attempting to start it on port %d...", ports.TemporalPort)
|
||||
|
||||
// Find the temporal CLI binary
|
||||
temporalCmd, err := findTemporalCLI()
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not find temporal CLI: %w", err)
|
||||
}
|
||||
|
||||
log.Printf("Using Temporal CLI at: %s", temporalCmd)
|
||||
|
||||
// Start Temporal server in background
|
||||
cmd := exec.Command(temporalCmd, "server", "start-dev",
|
||||
"--db-filename", "temporal.db",
|
||||
"--port", strconv.Itoa(ports.TemporalPort),
|
||||
"--ui-port", strconv.Itoa(ports.UIPort),
|
||||
"--log-level", "warn")
|
||||
|
||||
// Start the process in background
|
||||
if err := cmd.Start(); err != nil {
|
||||
return fmt.Errorf("failed to start Temporal server: %w", err)
|
||||
}
|
||||
|
||||
log.Printf("Temporal server started with PID: %d (port: %d, UI port: %d)",
|
||||
cmd.Process.Pid, ports.TemporalPort, ports.UIPort)
|
||||
|
||||
// Wait for server to be ready (with timeout)
|
||||
timeout := time.After(30 * time.Second)
|
||||
ticker := time.NewTicker(2 * time.Second)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-timeout:
|
||||
return fmt.Errorf("timeout waiting for Temporal server to start")
|
||||
case <-ticker.C:
|
||||
if isTemporalServerRunning(ports.TemporalPort) {
|
||||
log.Printf("Temporal server is now ready on port %d", ports.TemporalPort)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// isTemporalServerRunning checks if Temporal server is accessible
|
||||
func isTemporalServerRunning(port int) bool {
|
||||
// Try to create a client connection to check if server is running
|
||||
@@ -179,26 +131,36 @@ func isTemporalServerRunning(port int) bool {
|
||||
return false
|
||||
}
|
||||
defer c.Close()
|
||||
|
||||
|
||||
// Try a simple operation to verify the connection works
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
||||
defer cancel()
|
||||
|
||||
|
||||
_, err = c.WorkflowService().GetSystemInfo(ctx, &workflowservice.GetSystemInfoRequest{})
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// findTemporalCLI attempts to find the temporal CLI binary
|
||||
func findTemporalCLI() (string, error) {
|
||||
log.Println("Looking for temporal CLI binary...")
|
||||
|
||||
// First, try to find temporal in PATH using exec.LookPath
|
||||
log.Println("Checking PATH for temporal CLI...")
|
||||
if path, err := exec.LookPath("temporal"); err == nil {
|
||||
log.Printf("Found temporal in PATH at: %s", path)
|
||||
// Verify it's the correct temporal CLI by checking version
|
||||
log.Println("Verifying temporal CLI version...")
|
||||
cmd := exec.Command(path, "--version")
|
||||
if err := cmd.Run(); err == nil {
|
||||
log.Printf("Successfully verified temporal CLI at: %s", path)
|
||||
return path, nil
|
||||
} else {
|
||||
log.Printf("Failed to verify temporal CLI at %s: %v", path, err)
|
||||
}
|
||||
} else {
|
||||
log.Printf("temporal not found in PATH: %v", err)
|
||||
}
|
||||
|
||||
|
||||
// Try using 'which' command to find temporal
|
||||
cmd := exec.Command("which", "temporal")
|
||||
if output, err := cmd.Output(); err == nil {
|
||||
@@ -211,512 +173,128 @@ func findTemporalCLI() (string, error) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// If not found in PATH, try different possible locations for the temporal CLI
|
||||
log.Println("Checking bundled/local locations for temporal CLI...")
|
||||
possiblePaths := []string{
|
||||
"./temporal", // Current directory
|
||||
"./temporal", // Current directory
|
||||
}
|
||||
|
||||
|
||||
// Also try relative to the current executable (most important for bundled apps)
|
||||
if exePath, err := os.Executable(); err == nil {
|
||||
exeDir := filepath.Dir(exePath)
|
||||
possiblePaths = append(possiblePaths,
|
||||
log.Printf("Executable directory: %s", exeDir)
|
||||
additionalPaths := []string{
|
||||
filepath.Join(exeDir, "temporal"),
|
||||
filepath.Join(exeDir, "temporal.exe"), // Windows
|
||||
// Also try one level up (for development)
|
||||
filepath.Join(exeDir, "..", "temporal"),
|
||||
filepath.Join(exeDir, "..", "temporal.exe"),
|
||||
)
|
||||
}
|
||||
possiblePaths = append(possiblePaths, additionalPaths...)
|
||||
log.Printf("Will check these additional paths: %v", additionalPaths)
|
||||
} else {
|
||||
log.Printf("Failed to get executable path: %v", err)
|
||||
}
|
||||
|
||||
|
||||
log.Printf("Checking %d possible paths for temporal CLI", len(possiblePaths))
|
||||
|
||||
// Test each possible path
|
||||
for _, path := range possiblePaths {
|
||||
for i, path := range possiblePaths {
|
||||
log.Printf("Checking path %d/%d: %s", i+1, len(possiblePaths), path)
|
||||
if _, err := os.Stat(path); err == nil {
|
||||
log.Printf("File exists at: %s", path)
|
||||
// File exists, test if it's executable and the right binary
|
||||
cmd := exec.Command(path, "--version")
|
||||
if err := cmd.Run(); err == nil {
|
||||
log.Printf("Successfully verified temporal CLI at: %s", path)
|
||||
return path, nil
|
||||
} else {
|
||||
log.Printf("Failed to verify temporal CLI at %s: %v", path, err)
|
||||
}
|
||||
} else {
|
||||
log.Printf("File does not exist at %s: %v", path, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return "", fmt.Errorf("temporal CLI not found in PATH or any of the expected locations: %v", possiblePaths)
|
||||
}
|
||||
|
||||
// TemporalService manages the Temporal client and provides HTTP API
|
||||
type TemporalService struct {
|
||||
client client.Client
|
||||
worker worker.Worker
|
||||
scheduleJobs map[string]*JobStatus // In-memory job tracking
|
||||
runningJobs map[string]bool // Track which jobs are currently running
|
||||
ports *PortConfig // Port configuration
|
||||
}
|
||||
// ensureTemporalServerRunning checks if Temporal server is running and starts it if needed
|
||||
func ensureTemporalServerRunning(ports *PortConfig) error {
|
||||
log.Println("Checking if Temporal server is running...")
|
||||
|
||||
// NewTemporalService creates a new Temporal service and ensures Temporal server is running
|
||||
func NewTemporalService() (*TemporalService, error) {
|
||||
// First, find available ports
|
||||
ports, err := findAvailablePorts()
|
||||
// Check if Temporal server is already running by trying to connect
|
||||
if isTemporalServerRunning(ports.TemporalPort) {
|
||||
log.Printf("Temporal server is already running on port %d", ports.TemporalPort)
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Printf("Temporal server not running, attempting to start it on port %d...", ports.TemporalPort)
|
||||
|
||||
// Find the temporal CLI binary
|
||||
temporalCmd, err := findTemporalCLI()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find available ports: %w", err)
|
||||
}
|
||||
|
||||
log.Printf("Using ports - Temporal: %d, UI: %d, HTTP: %d",
|
||||
ports.TemporalPort, ports.UIPort, ports.HTTPPort)
|
||||
|
||||
// Ensure Temporal server is running
|
||||
if err := ensureTemporalServerRunning(ports); err != nil {
|
||||
return nil, fmt.Errorf("failed to ensure Temporal server is running: %w", err)
|
||||
log.Printf("ERROR: Could not find temporal CLI: %v", err)
|
||||
return fmt.Errorf("could not find temporal CLI: %w", err)
|
||||
}
|
||||
|
||||
// Create client (Temporal server should now be running)
|
||||
c, err := client.Dial(client.Options{
|
||||
HostPort: fmt.Sprintf("127.0.0.1:%d", ports.TemporalPort),
|
||||
Namespace: Namespace,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create temporal client: %w", err)
|
||||
log.Printf("Using Temporal CLI at: %s", temporalCmd)
|
||||
|
||||
// Start Temporal server in background
|
||||
args := []string{"server", "start-dev",
|
||||
"--db-filename", "temporal.db",
|
||||
"--port", strconv.Itoa(ports.TemporalPort),
|
||||
"--ui-port", strconv.Itoa(ports.UIPort),
|
||||
"--log-level", "warn"}
|
||||
|
||||
log.Printf("Starting Temporal server with command: %s %v", temporalCmd, args)
|
||||
|
||||
cmd := exec.Command(temporalCmd, args...)
|
||||
|
||||
// Properly detach the process so it survives when the parent exits
|
||||
cmd.SysProcAttr = &syscall.SysProcAttr{
|
||||
Setpgid: true, // Create new process group
|
||||
Pgid: 0, // Use process ID as group ID
|
||||
}
|
||||
|
||||
// Create worker
|
||||
w := worker.New(c, TaskQueueName, worker.Options{})
|
||||
w.RegisterWorkflow(GooseJobWorkflow)
|
||||
w.RegisterActivity(ExecuteGooseRecipe)
|
||||
// Redirect stdin/stdout/stderr to avoid hanging
|
||||
cmd.Stdin = nil
|
||||
cmd.Stdout = nil
|
||||
cmd.Stderr = nil
|
||||
|
||||
if err := w.Start(); err != nil {
|
||||
c.Close()
|
||||
return nil, fmt.Errorf("failed to start worker: %w", err)
|
||||
// Start the process
|
||||
if err := cmd.Start(); err != nil {
|
||||
log.Printf("ERROR: Failed to start Temporal server: %v", err)
|
||||
return fmt.Errorf("failed to start Temporal server: %w", err)
|
||||
}
|
||||
|
||||
log.Printf("Connected to Temporal server successfully on port %d", ports.TemporalPort)
|
||||
log.Printf("Temporal server started with PID: %d (port: %d, UI port: %d)",
|
||||
cmd.Process.Pid, ports.TemporalPort, ports.UIPort)
|
||||
|
||||
service := &TemporalService{
|
||||
client: c,
|
||||
worker: w,
|
||||
scheduleJobs: make(map[string]*JobStatus),
|
||||
runningJobs: make(map[string]bool),
|
||||
ports: ports,
|
||||
}
|
||||
|
||||
// Set global service for activities
|
||||
globalService = service
|
||||
// Wait for server to be ready (with timeout)
|
||||
log.Println("Waiting for Temporal server to be ready...")
|
||||
timeout := time.After(30 * time.Second)
|
||||
ticker := time.NewTicker(2 * time.Second)
|
||||
defer ticker.Stop()
|
||||
|
||||
return service, nil
|
||||
}
|
||||
|
||||
// Stop gracefully shuts down the Temporal service
|
||||
func (ts *TemporalService) Stop() {
|
||||
log.Println("Shutting down Temporal service...")
|
||||
if ts.worker != nil {
|
||||
ts.worker.Stop()
|
||||
}
|
||||
if ts.client != nil {
|
||||
ts.client.Close()
|
||||
}
|
||||
log.Println("Temporal service stopped")
|
||||
}
|
||||
|
||||
// GetHTTPPort returns the HTTP port for this service
|
||||
func (ts *TemporalService) GetHTTPPort() int {
|
||||
return ts.ports.HTTPPort
|
||||
}
|
||||
|
||||
// GetTemporalPort returns the Temporal server port for this service
|
||||
func (ts *TemporalService) GetTemporalPort() int {
|
||||
return ts.ports.TemporalPort
|
||||
}
|
||||
|
||||
// GetUIPort returns the Temporal UI port for this service
|
||||
func (ts *TemporalService) GetUIPort() int {
|
||||
return ts.ports.UIPort
|
||||
}
|
||||
|
||||
// Workflow definition for executing Goose recipes
|
||||
func GooseJobWorkflow(ctx workflow.Context, jobID, recipePath string) (string, error) {
|
||||
logger := workflow.GetLogger(ctx)
|
||||
logger.Info("Starting Goose job workflow", "jobID", jobID, "recipePath", recipePath)
|
||||
|
||||
ao := workflow.ActivityOptions{
|
||||
StartToCloseTimeout: 2 * time.Hour, // Allow up to 2 hours for job execution
|
||||
RetryPolicy: &temporal.RetryPolicy{
|
||||
InitialInterval: time.Second,
|
||||
BackoffCoefficient: 2.0,
|
||||
MaximumInterval: time.Minute,
|
||||
MaximumAttempts: 3,
|
||||
NonRetryableErrorTypes: []string{"InvalidRecipeError"},
|
||||
},
|
||||
}
|
||||
ctx = workflow.WithActivityOptions(ctx, ao)
|
||||
|
||||
var sessionID string
|
||||
err := workflow.ExecuteActivity(ctx, ExecuteGooseRecipe, jobID, recipePath).Get(ctx, &sessionID)
|
||||
if err != nil {
|
||||
logger.Error("Goose job workflow failed", "jobID", jobID, "error", err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
logger.Info("Goose job workflow completed", "jobID", jobID, "sessionID", sessionID)
|
||||
return sessionID, nil
|
||||
}
|
||||
|
||||
// Activity definition for executing Goose recipes
|
||||
func ExecuteGooseRecipe(ctx context.Context, jobID, recipePath string) (string, error) {
|
||||
logger := activity.GetLogger(ctx)
|
||||
logger.Info("Executing Goose recipe", "jobID", jobID, "recipePath", recipePath)
|
||||
|
||||
// Mark job as running at the start
|
||||
if globalService != nil {
|
||||
globalService.markJobAsRunning(jobID)
|
||||
// Ensure we mark it as not running when we're done
|
||||
defer globalService.markJobAsNotRunning(jobID)
|
||||
}
|
||||
|
||||
// Check if recipe file exists
|
||||
if _, err := os.Stat(recipePath); os.IsNotExist(err) {
|
||||
return "", temporal.NewNonRetryableApplicationError(
|
||||
fmt.Sprintf("recipe file not found: %s", recipePath),
|
||||
"InvalidRecipeError",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
// Execute the Goose recipe via the executor binary
|
||||
cmd := exec.CommandContext(ctx, "goose-scheduler-executor", jobID, recipePath)
|
||||
cmd.Env = append(os.Environ(), fmt.Sprintf("GOOSE_JOB_ID=%s", jobID))
|
||||
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
if exitError, ok := err.(*exec.ExitError); ok {
|
||||
logger.Error("Recipe execution failed", "jobID", jobID, "stderr", string(exitError.Stderr))
|
||||
return "", fmt.Errorf("recipe execution failed: %s", string(exitError.Stderr))
|
||||
}
|
||||
return "", fmt.Errorf("failed to execute recipe: %w", err)
|
||||
}
|
||||
|
||||
sessionID := strings.TrimSpace(string(output))
|
||||
logger.Info("Recipe executed successfully", "jobID", jobID, "sessionID", sessionID)
|
||||
return sessionID, nil
|
||||
}
|
||||
|
||||
// HTTP API handlers
|
||||
|
||||
func (ts *TemporalService) handleJobs(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
if r.Method != http.MethodPost {
|
||||
ts.writeErrorResponse(w, http.StatusMethodNotAllowed, "Method not allowed")
|
||||
return
|
||||
}
|
||||
|
||||
var req JobRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
ts.writeErrorResponse(w, http.StatusBadRequest, fmt.Sprintf("Invalid JSON: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
var resp JobResponse
|
||||
|
||||
switch req.Action {
|
||||
case "create":
|
||||
resp = ts.createSchedule(req)
|
||||
case "delete":
|
||||
resp = ts.deleteSchedule(req)
|
||||
case "pause":
|
||||
resp = ts.pauseSchedule(req)
|
||||
case "unpause":
|
||||
resp = ts.unpauseSchedule(req)
|
||||
case "list":
|
||||
resp = ts.listSchedules()
|
||||
case "run_now":
|
||||
resp = ts.runNow(req)
|
||||
default:
|
||||
resp = JobResponse{Success: false, Message: fmt.Sprintf("Unknown action: %s", req.Action)}
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
json.NewEncoder(w).Encode(resp)
|
||||
}
|
||||
|
||||
func (ts *TemporalService) createSchedule(req JobRequest) JobResponse {
|
||||
if req.JobID == "" || req.CronExpr == "" || req.RecipePath == "" {
|
||||
return JobResponse{Success: false, Message: "Missing required fields: job_id, cron, recipe_path"}
|
||||
}
|
||||
|
||||
// Check if job already exists
|
||||
if _, exists := ts.scheduleJobs[req.JobID]; exists {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Job with ID '%s' already exists", req.JobID)}
|
||||
}
|
||||
|
||||
// Validate recipe file exists
|
||||
if _, err := os.Stat(req.RecipePath); os.IsNotExist(err) {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Recipe file not found: %s", req.RecipePath)}
|
||||
}
|
||||
|
||||
scheduleID := fmt.Sprintf("goose-job-%s", req.JobID)
|
||||
|
||||
// Create Temporal schedule
|
||||
schedule := client.ScheduleOptions{
|
||||
ID: scheduleID,
|
||||
Spec: client.ScheduleSpec{
|
||||
CronExpressions: []string{req.CronExpr},
|
||||
},
|
||||
Action: &client.ScheduleWorkflowAction{
|
||||
ID: fmt.Sprintf("workflow-%s-{{.ScheduledTime.Unix}}", req.JobID),
|
||||
Workflow: GooseJobWorkflow,
|
||||
Args: []interface{}{req.JobID, req.RecipePath},
|
||||
TaskQueue: TaskQueueName,
|
||||
},
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
_, err := ts.client.ScheduleClient().Create(ctx, schedule)
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to create schedule: %v", err)}
|
||||
}
|
||||
|
||||
// Track job in memory
|
||||
jobStatus := &JobStatus{
|
||||
ID: req.JobID,
|
||||
CronExpr: req.CronExpr,
|
||||
RecipePath: req.RecipePath,
|
||||
CurrentlyRunning: false,
|
||||
Paused: false,
|
||||
CreatedAt: time.Now(),
|
||||
}
|
||||
ts.scheduleJobs[req.JobID] = jobStatus
|
||||
|
||||
log.Printf("Created schedule for job: %s", req.JobID)
|
||||
return JobResponse{Success: true, Message: "Schedule created successfully"}
|
||||
}
|
||||
|
||||
func (ts *TemporalService) deleteSchedule(req JobRequest) JobResponse {
|
||||
if req.JobID == "" {
|
||||
return JobResponse{Success: false, Message: "Missing job_id"}
|
||||
}
|
||||
|
||||
scheduleID := fmt.Sprintf("goose-job-%s", req.JobID)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
handle := ts.client.ScheduleClient().GetHandle(ctx, scheduleID)
|
||||
err := handle.Delete(ctx)
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to delete schedule: %v", err)}
|
||||
}
|
||||
|
||||
// Remove from memory
|
||||
delete(ts.scheduleJobs, req.JobID)
|
||||
|
||||
log.Printf("Deleted schedule for job: %s", req.JobID)
|
||||
return JobResponse{Success: true, Message: "Schedule deleted successfully"}
|
||||
}
|
||||
|
||||
func (ts *TemporalService) pauseSchedule(req JobRequest) JobResponse {
|
||||
if req.JobID == "" {
|
||||
return JobResponse{Success: false, Message: "Missing job_id"}
|
||||
}
|
||||
|
||||
scheduleID := fmt.Sprintf("goose-job-%s", req.JobID)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
handle := ts.client.ScheduleClient().GetHandle(ctx, scheduleID)
|
||||
err := handle.Pause(ctx, client.SchedulePauseOptions{
|
||||
Note: "Paused via API",
|
||||
})
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to pause schedule: %v", err)}
|
||||
}
|
||||
|
||||
// Update in memory
|
||||
if job, exists := ts.scheduleJobs[req.JobID]; exists {
|
||||
job.Paused = true
|
||||
}
|
||||
|
||||
log.Printf("Paused schedule for job: %s", req.JobID)
|
||||
return JobResponse{Success: true, Message: "Schedule paused successfully"}
|
||||
}
|
||||
|
||||
func (ts *TemporalService) unpauseSchedule(req JobRequest) JobResponse {
|
||||
if req.JobID == "" {
|
||||
return JobResponse{Success: false, Message: "Missing job_id"}
|
||||
}
|
||||
|
||||
scheduleID := fmt.Sprintf("goose-job-%s", req.JobID)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
handle := ts.client.ScheduleClient().GetHandle(ctx, scheduleID)
|
||||
err := handle.Unpause(ctx, client.ScheduleUnpauseOptions{
|
||||
Note: "Unpaused via API",
|
||||
})
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to unpause schedule: %v", err)}
|
||||
}
|
||||
|
||||
// Update in memory
|
||||
if job, exists := ts.scheduleJobs[req.JobID]; exists {
|
||||
job.Paused = false
|
||||
}
|
||||
|
||||
log.Printf("Unpaused schedule for job: %s", req.JobID)
|
||||
return JobResponse{Success: true, Message: "Schedule unpaused successfully"}
|
||||
}
|
||||
|
||||
func (ts *TemporalService) listSchedules() JobResponse {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// List all schedules from Temporal
|
||||
iter, err := ts.client.ScheduleClient().List(ctx, client.ScheduleListOptions{})
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to list schedules: %v", err)}
|
||||
}
|
||||
|
||||
var jobs []JobStatus
|
||||
for iter.HasNext() {
|
||||
schedule, err := iter.Next()
|
||||
if err != nil {
|
||||
log.Printf("Error listing schedules: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Extract job ID from schedule ID
|
||||
if strings.HasPrefix(schedule.ID, "goose-job-") {
|
||||
jobID := strings.TrimPrefix(schedule.ID, "goose-job-")
|
||||
|
||||
// Get additional details from in-memory tracking
|
||||
var jobStatus JobStatus
|
||||
if tracked, exists := ts.scheduleJobs[jobID]; exists {
|
||||
jobStatus = *tracked
|
||||
attemptCount := 0
|
||||
for {
|
||||
select {
|
||||
case <-timeout:
|
||||
log.Printf("ERROR: Timeout waiting for Temporal server to start after %d attempts", attemptCount)
|
||||
return fmt.Errorf("timeout waiting for Temporal server to start")
|
||||
case <-ticker.C:
|
||||
attemptCount++
|
||||
log.Printf("Checking if Temporal server is ready (attempt %d)...", attemptCount)
|
||||
if isTemporalServerRunning(ports.TemporalPort) {
|
||||
log.Printf("Temporal server is now ready on port %d", ports.TemporalPort)
|
||||
return nil
|
||||
} else {
|
||||
// Fallback for schedules not in memory
|
||||
jobStatus = JobStatus{
|
||||
ID: jobID,
|
||||
CreatedAt: time.Now(), // We don't have the real creation time
|
||||
}
|
||||
log.Printf("Temporal server not ready yet (attempt %d)", attemptCount)
|
||||
}
|
||||
|
||||
// Update with Temporal schedule info
|
||||
if len(schedule.Spec.CronExpressions) > 0 {
|
||||
jobStatus.CronExpr = schedule.Spec.CronExpressions[0]
|
||||
}
|
||||
|
||||
// Get detailed schedule information including paused state and running status
|
||||
scheduleHandle := ts.client.ScheduleClient().GetHandle(ctx, schedule.ID)
|
||||
if desc, err := scheduleHandle.Describe(ctx); err == nil {
|
||||
jobStatus.Paused = desc.Schedule.State.Paused
|
||||
|
||||
// Check if there are any running workflows for this job
|
||||
jobStatus.CurrentlyRunning = ts.isJobCurrentlyRunning(ctx, jobID)
|
||||
|
||||
// Update last run time if available
|
||||
if len(desc.Info.RecentActions) > 0 {
|
||||
lastAction := desc.Info.RecentActions[len(desc.Info.RecentActions)-1]
|
||||
if !lastAction.ActualTime.IsZero() {
|
||||
lastRunStr := lastAction.ActualTime.Format(time.RFC3339)
|
||||
jobStatus.LastRun = &lastRunStr
|
||||
}
|
||||
}
|
||||
|
||||
// Update next run time if available - this field may not exist in older SDK versions
|
||||
// We'll skip this for now to avoid compilation errors
|
||||
} else {
|
||||
log.Printf("Warning: Could not get detailed info for schedule %s: %v", schedule.ID, err)
|
||||
}
|
||||
|
||||
// Update in-memory tracking with latest info
|
||||
ts.scheduleJobs[jobID] = &jobStatus
|
||||
|
||||
jobs = append(jobs, jobStatus)
|
||||
}
|
||||
}
|
||||
|
||||
return JobResponse{Success: true, Jobs: jobs}
|
||||
}
|
||||
|
||||
// isJobCurrentlyRunning checks if there are any running workflows for the given job ID
|
||||
func (ts *TemporalService) isJobCurrentlyRunning(ctx context.Context, jobID string) bool {
|
||||
// Check our in-memory tracking of running jobs
|
||||
if running, exists := ts.runningJobs[jobID]; exists && running {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// markJobAsRunning sets a job as currently running
|
||||
func (ts *TemporalService) markJobAsRunning(jobID string) {
|
||||
ts.runningJobs[jobID] = true
|
||||
log.Printf("Marked job %s as running", jobID)
|
||||
}
|
||||
|
||||
// markJobAsNotRunning sets a job as not currently running
|
||||
func (ts *TemporalService) markJobAsNotRunning(jobID string) {
|
||||
delete(ts.runningJobs, jobID)
|
||||
log.Printf("Marked job %s as not running", jobID)
|
||||
}
|
||||
|
||||
func (ts *TemporalService) runNow(req JobRequest) JobResponse {
|
||||
if req.JobID == "" {
|
||||
return JobResponse{Success: false, Message: "Missing job_id"}
|
||||
}
|
||||
|
||||
// Get job details
|
||||
job, exists := ts.scheduleJobs[req.JobID]
|
||||
if !exists {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Job '%s' not found", req.JobID)}
|
||||
}
|
||||
|
||||
// Execute workflow immediately
|
||||
workflowOptions := client.StartWorkflowOptions{
|
||||
ID: fmt.Sprintf("manual-%s-%d", req.JobID, time.Now().Unix()),
|
||||
TaskQueue: TaskQueueName,
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
we, err := ts.client.ExecuteWorkflow(ctx, workflowOptions, GooseJobWorkflow, req.JobID, job.RecipePath)
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to start workflow: %v", err)}
|
||||
}
|
||||
|
||||
// Don't wait for completion in run_now, just return the workflow ID
|
||||
log.Printf("Manual execution started for job: %s, workflow: %s", req.JobID, we.GetID())
|
||||
return JobResponse{
|
||||
Success: true,
|
||||
Message: "Job execution started",
|
||||
Data: RunNowResponse{SessionID: we.GetID()}, // Return workflow ID as session ID for now
|
||||
}
|
||||
}
|
||||
|
||||
func (ts *TemporalService) writeErrorResponse(w http.ResponseWriter, statusCode int, message string) {
|
||||
w.WriteHeader(statusCode)
|
||||
json.NewEncoder(w).Encode(JobResponse{Success: false, Message: message})
|
||||
}
|
||||
|
||||
func (ts *TemporalService) handleHealth(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
json.NewEncoder(w).Encode(map[string]string{"status": "healthy"})
|
||||
}
|
||||
|
||||
// handlePorts returns the port configuration for this service
|
||||
func (ts *TemporalService) handlePorts(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
|
||||
portInfo := map[string]int{
|
||||
"http_port": ts.ports.HTTPPort,
|
||||
"temporal_port": ts.ports.TemporalPort,
|
||||
"ui_port": ts.ports.UIPort,
|
||||
}
|
||||
|
||||
json.NewEncoder(w).Encode(portInfo)
|
||||
}
|
||||
|
||||
func main() {
|
||||
@@ -755,6 +333,9 @@ func main() {
|
||||
<-sigChan
|
||||
log.Println("Received shutdown signal")
|
||||
|
||||
// Kill all managed processes first
|
||||
globalProcessManager.KillAllProcesses()
|
||||
|
||||
// Shutdown HTTP server
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
@@ -774,4 +355,4 @@ func main() {
|
||||
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||
log.Fatalf("HTTP server failed: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
287
temporal-service/process_manager.go
Normal file
287
temporal-service/process_manager.go
Normal file
@@ -0,0 +1,287 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
// ProcessManager tracks and manages spawned processes
|
||||
type ProcessManager struct {
|
||||
processes map[string]*ManagedProcess
|
||||
mutex sync.RWMutex
|
||||
}
|
||||
|
||||
// ManagedProcess represents a process being managed by the ProcessManager
|
||||
type ManagedProcess struct {
|
||||
JobID string
|
||||
Process *os.Process
|
||||
Cancel context.CancelFunc
|
||||
StartTime time.Time
|
||||
}
|
||||
|
||||
// Global process manager instance
|
||||
var globalProcessManager = &ProcessManager{
|
||||
processes: make(map[string]*ManagedProcess),
|
||||
}
|
||||
|
||||
// AddProcess adds a process to be managed
|
||||
func (pm *ProcessManager) AddProcess(jobID string, process *os.Process, cancel context.CancelFunc) {
|
||||
pm.mutex.Lock()
|
||||
defer pm.mutex.Unlock()
|
||||
|
||||
pm.processes[jobID] = &ManagedProcess{
|
||||
JobID: jobID,
|
||||
Process: process,
|
||||
Cancel: cancel,
|
||||
StartTime: time.Now(),
|
||||
}
|
||||
log.Printf("Added process %d for job %s to process manager", process.Pid, jobID)
|
||||
}
|
||||
|
||||
// RemoveProcess removes a process from management
|
||||
func (pm *ProcessManager) RemoveProcess(jobID string) {
|
||||
pm.mutex.Lock()
|
||||
defer pm.mutex.Unlock()
|
||||
|
||||
if mp, exists := pm.processes[jobID]; exists {
|
||||
log.Printf("Removed process %d for job %s from process manager", mp.Process.Pid, jobID)
|
||||
delete(pm.processes, jobID)
|
||||
}
|
||||
}
|
||||
|
||||
// KillProcess kills a specific process and its children
|
||||
func (pm *ProcessManager) KillProcess(jobID string) error {
|
||||
pm.mutex.Lock()
|
||||
defer pm.mutex.Unlock()
|
||||
|
||||
mp, exists := pm.processes[jobID]
|
||||
if !exists {
|
||||
return fmt.Errorf("no process found for job %s", jobID)
|
||||
}
|
||||
|
||||
log.Printf("Killing process %d for job %s", mp.Process.Pid, jobID)
|
||||
|
||||
// Cancel the context first
|
||||
if mp.Cancel != nil {
|
||||
mp.Cancel()
|
||||
}
|
||||
|
||||
// Kill the process and its children
|
||||
if err := killProcessGroup(mp.Process); err != nil {
|
||||
log.Printf("Error killing process group for job %s: %v", jobID, err)
|
||||
return err
|
||||
}
|
||||
|
||||
delete(pm.processes, jobID)
|
||||
return nil
|
||||
}
|
||||
|
||||
// KillAllProcesses kills all managed processes
|
||||
func (pm *ProcessManager) KillAllProcesses() {
|
||||
pm.mutex.Lock()
|
||||
defer pm.mutex.Unlock()
|
||||
|
||||
log.Printf("Killing all %d managed processes", len(pm.processes))
|
||||
|
||||
for jobID, mp := range pm.processes {
|
||||
log.Printf("Killing process %d for job %s", mp.Process.Pid, jobID)
|
||||
|
||||
if mp.Cancel != nil {
|
||||
mp.Cancel()
|
||||
}
|
||||
|
||||
if err := killProcessGroup(mp.Process); err != nil {
|
||||
log.Printf("Error killing process group for job %s: %v", jobID, err)
|
||||
}
|
||||
}
|
||||
|
||||
pm.processes = make(map[string]*ManagedProcess)
|
||||
}
|
||||
|
||||
// ListProcesses returns a copy of the current process map
|
||||
func (pm *ProcessManager) ListProcesses() map[string]*ManagedProcess {
|
||||
pm.mutex.RLock()
|
||||
defer pm.mutex.RUnlock()
|
||||
|
||||
result := make(map[string]*ManagedProcess)
|
||||
for k, v := range pm.processes {
|
||||
result[k] = v
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// killProcessGroup kills a process and all its children
|
||||
func killProcessGroup(process *os.Process) error {
|
||||
if process == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
pid := process.Pid
|
||||
log.Printf("Attempting to kill process group for PID %d", pid)
|
||||
|
||||
switch runtime.GOOS {
|
||||
case "windows":
|
||||
// On Windows, kill the process tree
|
||||
cmd := exec.Command("taskkill", "/F", "/T", "/PID", fmt.Sprintf("%d", pid))
|
||||
if err := cmd.Run(); err != nil {
|
||||
log.Printf("Failed to kill Windows process tree for PID %d: %v", pid, err)
|
||||
return err
|
||||
}
|
||||
log.Printf("Successfully killed Windows process tree for PID %d", pid)
|
||||
return nil
|
||||
default:
|
||||
// On Unix-like systems, kill the process group more aggressively
|
||||
log.Printf("Killing Unix process group for PID %d", pid)
|
||||
|
||||
// First, try to kill the entire process group with SIGTERM
|
||||
if err := syscall.Kill(-pid, syscall.SIGTERM); err != nil {
|
||||
log.Printf("Failed to send SIGTERM to process group -%d: %v", pid, err)
|
||||
} else {
|
||||
log.Printf("Sent SIGTERM to process group -%d", pid)
|
||||
}
|
||||
|
||||
// Also try to kill the main process directly
|
||||
if err := syscall.Kill(pid, syscall.SIGTERM); err != nil {
|
||||
log.Printf("Failed to send SIGTERM to process %d: %v", pid, err)
|
||||
} else {
|
||||
log.Printf("Sent SIGTERM to process %d", pid)
|
||||
}
|
||||
|
||||
// Give processes a brief moment to terminate gracefully
|
||||
time.Sleep(1 * time.Second)
|
||||
|
||||
// Force kill the process group with SIGKILL
|
||||
if err := syscall.Kill(-pid, syscall.SIGKILL); err != nil {
|
||||
log.Printf("Failed to send SIGKILL to process group -%d: %v", pid, err)
|
||||
} else {
|
||||
log.Printf("Sent SIGKILL to process group -%d", pid)
|
||||
}
|
||||
|
||||
// Force kill the main process with SIGKILL
|
||||
if err := syscall.Kill(pid, syscall.SIGKILL); err != nil {
|
||||
log.Printf("Failed to send SIGKILL to process %d: %v", pid, err)
|
||||
} else {
|
||||
log.Printf("Sent SIGKILL to process %d", pid)
|
||||
}
|
||||
|
||||
// Also try using the process.Kill() method as a fallback
|
||||
if err := process.Kill(); err != nil {
|
||||
log.Printf("Failed to kill process using process.Kill(): %v", err)
|
||||
} else {
|
||||
log.Printf("Successfully killed process using process.Kill()")
|
||||
}
|
||||
|
||||
log.Printf("Completed kill attempts for process group %d", pid)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// FindAndKillProcessesByPattern finds and kills processes related to a job by searching for patterns
|
||||
func FindAndKillProcessesByPattern(jobID string) int {
|
||||
log.Printf("Searching for additional processes to kill for job %s", jobID)
|
||||
|
||||
killedCount := 0
|
||||
|
||||
switch runtime.GOOS {
|
||||
case "darwin", "linux":
|
||||
// Search for goose processes that might be related to this job
|
||||
patterns := []string{
|
||||
fmt.Sprintf("scheduled-%s", jobID), // Session name pattern
|
||||
fmt.Sprintf("GOOSE_JOB_ID=%s", jobID), // Environment variable pattern
|
||||
jobID, // Job ID itself
|
||||
}
|
||||
|
||||
for _, pattern := range patterns {
|
||||
// Use pgrep to find processes
|
||||
cmd := exec.Command("pgrep", "-f", pattern)
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
log.Printf("No processes found for pattern '%s': %v", pattern, err)
|
||||
continue
|
||||
}
|
||||
|
||||
pidStr := strings.TrimSpace(string(output))
|
||||
if pidStr == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
pids := strings.Split(pidStr, "\n")
|
||||
for _, pidStr := range pids {
|
||||
if pidStr == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
pid, err := strconv.Atoi(pidStr)
|
||||
if err != nil {
|
||||
log.Printf("Invalid PID '%s': %v", pidStr, err)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Printf("Found process %d matching pattern '%s' for job %s", pid, pattern, jobID)
|
||||
|
||||
// Kill the process
|
||||
if err := syscall.Kill(pid, syscall.SIGTERM); err != nil {
|
||||
log.Printf("Failed to send SIGTERM to PID %d: %v", pid, err)
|
||||
} else {
|
||||
log.Printf("Sent SIGTERM to PID %d", pid)
|
||||
killedCount++
|
||||
}
|
||||
|
||||
// Wait a moment then force kill
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
if err := syscall.Kill(pid, syscall.SIGKILL); err != nil {
|
||||
log.Printf("Failed to send SIGKILL to PID %d: %v", pid, err)
|
||||
} else {
|
||||
log.Printf("Sent SIGKILL to PID %d", pid)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case "windows":
|
||||
// On Windows, search for goose.exe processes
|
||||
sessionPattern := fmt.Sprintf("scheduled-%s", jobID)
|
||||
|
||||
// Use tasklist to find processes
|
||||
cmd := exec.Command("tasklist", "/FI", "IMAGENAME eq goose.exe", "/FO", "CSV")
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
log.Printf("Failed to list Windows processes: %v", err)
|
||||
return killedCount
|
||||
}
|
||||
|
||||
lines := strings.Split(string(output), "\n")
|
||||
for _, line := range lines {
|
||||
if strings.Contains(line, sessionPattern) || strings.Contains(line, jobID) {
|
||||
// Extract PID from CSV format
|
||||
fields := strings.Split(line, ",")
|
||||
if len(fields) >= 2 {
|
||||
pidStr := strings.Trim(fields[1], "\"")
|
||||
if pid, err := strconv.Atoi(pidStr); err == nil {
|
||||
log.Printf("Found Windows process %d for job %s", pid, jobID)
|
||||
|
||||
// Kill the process
|
||||
killCmd := exec.Command("taskkill", "/F", "/PID", fmt.Sprintf("%d", pid))
|
||||
if err := killCmd.Run(); err != nil {
|
||||
log.Printf("Failed to kill Windows process %d: %v", pid, err)
|
||||
} else {
|
||||
log.Printf("Killed Windows process %d", pid)
|
||||
killedCount++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log.Printf("Killed %d additional processes for job %s", killedCount, jobID)
|
||||
return killedCount
|
||||
}
|
||||
716
temporal-service/schedule.go
Normal file
716
temporal-service/schedule.go
Normal file
@@ -0,0 +1,716 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"go.temporal.io/sdk/client"
|
||||
)
|
||||
|
||||
type JobStatus struct {
|
||||
ID string `json:"id"`
|
||||
CronExpr string `json:"cron"`
|
||||
RecipePath string `json:"recipe_path"`
|
||||
LastRun *string `json:"last_run,omitempty"`
|
||||
NextRun *string `json:"next_run,omitempty"`
|
||||
CurrentlyRunning bool `json:"currently_running"`
|
||||
Paused bool `json:"paused"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
ExecutionMode *string `json:"execution_mode,omitempty"` // "foreground" or "background"
|
||||
LastManualRun *string `json:"last_manual_run,omitempty"` // Track manual runs separately
|
||||
}
|
||||
|
||||
// Request/Response types for HTTP API
|
||||
type JobRequest struct {
|
||||
Action string `json:"action"` // create, delete, pause, unpause, list, run_now, kill_job, update
|
||||
JobID string `json:"job_id"`
|
||||
CronExpr string `json:"cron"`
|
||||
RecipePath string `json:"recipe_path"`
|
||||
ExecutionMode string `json:"execution_mode,omitempty"` // "foreground" or "background"
|
||||
}
|
||||
|
||||
type JobResponse struct {
|
||||
Success bool `json:"success"`
|
||||
Message string `json:"message"`
|
||||
Jobs []JobStatus `json:"jobs,omitempty"`
|
||||
Data interface{} `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
type RunNowResponse struct {
|
||||
SessionID string `json:"session_id"`
|
||||
}
|
||||
|
||||
// createSchedule handles the creation of a new schedule
|
||||
func (ts *TemporalService) createSchedule(req JobRequest) JobResponse {
|
||||
if req.JobID == "" || req.CronExpr == "" || req.RecipePath == "" {
|
||||
return JobResponse{Success: false, Message: "Missing required fields: job_id, cron, recipe_path"}
|
||||
}
|
||||
|
||||
// Check if job already exists
|
||||
if _, exists := ts.scheduleJobs[req.JobID]; exists {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Job with ID '%s' already exists", req.JobID)}
|
||||
}
|
||||
|
||||
// Validate and copy recipe file to managed storage
|
||||
managedRecipePath, recipeContent, err := ts.storeRecipeForSchedule(req.JobID, req.RecipePath)
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to store recipe: %v", err)}
|
||||
}
|
||||
|
||||
scheduleID := fmt.Sprintf("goose-job-%s", req.JobID)
|
||||
|
||||
// Prepare metadata to store with the schedule as a JSON string in the Note field
|
||||
executionMode := req.ExecutionMode
|
||||
if executionMode == "" {
|
||||
executionMode = "background" // Default to background if not specified
|
||||
}
|
||||
|
||||
scheduleMetadata := map[string]interface{}{
|
||||
"job_id": req.JobID,
|
||||
"cron_expr": req.CronExpr,
|
||||
"recipe_path": managedRecipePath, // Use managed path
|
||||
"original_path": req.RecipePath, // Keep original for reference
|
||||
"execution_mode": executionMode,
|
||||
"created_at": time.Now().Format(time.RFC3339),
|
||||
}
|
||||
|
||||
// For small recipes, embed content directly in metadata
|
||||
if len(recipeContent) < 8192 { // 8KB limit for embedding
|
||||
scheduleMetadata["recipe_content"] = string(recipeContent)
|
||||
log.Printf("Embedded recipe content in metadata for job %s (size: %d bytes)", req.JobID, len(recipeContent))
|
||||
} else {
|
||||
log.Printf("Recipe too large for embedding, using managed file for job %s (size: %d bytes)", req.JobID, len(recipeContent))
|
||||
}
|
||||
|
||||
metadataJSON, err := json.Marshal(scheduleMetadata)
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to encode metadata: %v", err)}
|
||||
}
|
||||
|
||||
// Create Temporal schedule with metadata in Note field
|
||||
schedule := client.ScheduleOptions{
|
||||
ID: scheduleID,
|
||||
Spec: client.ScheduleSpec{
|
||||
CronExpressions: []string{req.CronExpr},
|
||||
},
|
||||
Action: &client.ScheduleWorkflowAction{
|
||||
ID: fmt.Sprintf("workflow-%s-{{.ScheduledTime.Unix}}", req.JobID),
|
||||
Workflow: GooseJobWorkflow,
|
||||
Args: []interface{}{req.JobID, req.RecipePath},
|
||||
TaskQueue: TaskQueueName,
|
||||
},
|
||||
Note: string(metadataJSON), // Store metadata as JSON in the Note field
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
_, err = ts.client.ScheduleClient().Create(ctx, schedule)
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to create schedule: %v", err)}
|
||||
}
|
||||
|
||||
// Track job in memory - ensure execution mode has a default value
|
||||
jobStatus := &JobStatus{
|
||||
ID: req.JobID,
|
||||
CronExpr: req.CronExpr,
|
||||
RecipePath: req.RecipePath,
|
||||
CurrentlyRunning: false,
|
||||
Paused: false,
|
||||
CreatedAt: time.Now(),
|
||||
ExecutionMode: &executionMode,
|
||||
}
|
||||
ts.scheduleJobs[req.JobID] = jobStatus
|
||||
|
||||
log.Printf("Created schedule for job: %s", req.JobID)
|
||||
return JobResponse{Success: true, Message: "Schedule created successfully"}
|
||||
}
|
||||
|
||||
// deleteSchedule handles the deletion of a schedule
|
||||
func (ts *TemporalService) deleteSchedule(req JobRequest) JobResponse {
|
||||
if req.JobID == "" {
|
||||
return JobResponse{Success: false, Message: "Missing job_id"}
|
||||
}
|
||||
|
||||
scheduleID := fmt.Sprintf("goose-job-%s", req.JobID)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
handle := ts.client.ScheduleClient().GetHandle(ctx, scheduleID)
|
||||
err := handle.Delete(ctx)
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to delete schedule: %v", err)}
|
||||
}
|
||||
|
||||
// Clean up managed recipe files
|
||||
ts.cleanupManagedRecipe(req.JobID)
|
||||
|
||||
// Remove from memory
|
||||
delete(ts.scheduleJobs, req.JobID)
|
||||
|
||||
log.Printf("Deleted schedule for job: %s", req.JobID)
|
||||
return JobResponse{Success: true, Message: "Schedule deleted successfully"}
|
||||
}
|
||||
|
||||
// pauseSchedule handles pausing a schedule
|
||||
func (ts *TemporalService) pauseSchedule(req JobRequest) JobResponse {
|
||||
if req.JobID == "" {
|
||||
return JobResponse{Success: false, Message: "Missing job_id"}
|
||||
}
|
||||
|
||||
scheduleID := fmt.Sprintf("goose-job-%s", req.JobID)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
handle := ts.client.ScheduleClient().GetHandle(ctx, scheduleID)
|
||||
err := handle.Pause(ctx, client.SchedulePauseOptions{
|
||||
Note: "Paused via API",
|
||||
})
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to pause schedule: %v", err)}
|
||||
}
|
||||
|
||||
// Update in memory
|
||||
if job, exists := ts.scheduleJobs[req.JobID]; exists {
|
||||
job.Paused = true
|
||||
}
|
||||
|
||||
log.Printf("Paused schedule for job: %s", req.JobID)
|
||||
return JobResponse{Success: true, Message: "Schedule paused successfully"}
|
||||
}
|
||||
|
||||
// unpauseSchedule handles unpausing a schedule
|
||||
func (ts *TemporalService) unpauseSchedule(req JobRequest) JobResponse {
|
||||
if req.JobID == "" {
|
||||
return JobResponse{Success: false, Message: "Missing job_id"}
|
||||
}
|
||||
|
||||
scheduleID := fmt.Sprintf("goose-job-%s", req.JobID)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
handle := ts.client.ScheduleClient().GetHandle(ctx, scheduleID)
|
||||
err := handle.Unpause(ctx, client.ScheduleUnpauseOptions{
|
||||
Note: "Unpaused via API",
|
||||
})
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to unpause schedule: %v", err)}
|
||||
}
|
||||
|
||||
// Update in memory
|
||||
if job, exists := ts.scheduleJobs[req.JobID]; exists {
|
||||
job.Paused = false
|
||||
}
|
||||
|
||||
log.Printf("Unpaused schedule for job: %s", req.JobID)
|
||||
return JobResponse{Success: true, Message: "Schedule unpaused successfully"}
|
||||
}
|
||||
|
||||
// updateSchedule handles updating a schedule
|
||||
func (ts *TemporalService) updateSchedule(req JobRequest) JobResponse {
|
||||
if req.JobID == "" || req.CronExpr == "" {
|
||||
return JobResponse{Success: false, Message: "Missing required fields: job_id, cron"}
|
||||
}
|
||||
|
||||
// Check if job exists
|
||||
job, exists := ts.scheduleJobs[req.JobID]
|
||||
if !exists {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Job with ID '%s' not found", req.JobID)}
|
||||
}
|
||||
|
||||
// Check if job is currently running
|
||||
if job.CurrentlyRunning {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Cannot update schedule '%s' while it's currently running", req.JobID)}
|
||||
}
|
||||
|
||||
scheduleID := fmt.Sprintf("goose-job-%s", req.JobID)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// Get the existing schedule handle
|
||||
handle := ts.client.ScheduleClient().GetHandle(ctx, scheduleID)
|
||||
|
||||
// Update the schedule with new cron expression while preserving metadata
|
||||
err := handle.Update(ctx, client.ScheduleUpdateOptions{
|
||||
DoUpdate: func(input client.ScheduleUpdateInput) (*client.ScheduleUpdate, error) {
|
||||
// Update the cron expression
|
||||
input.Description.Schedule.Spec.CronExpressions = []string{req.CronExpr}
|
||||
|
||||
// Update the cron expression in metadata stored in Note field
|
||||
if input.Description.Schedule.State.Note != "" {
|
||||
var metadata map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(input.Description.Schedule.State.Note), &metadata); err == nil {
|
||||
metadata["cron_expr"] = req.CronExpr
|
||||
if updatedMetadataJSON, err := json.Marshal(metadata); err == nil {
|
||||
input.Description.Schedule.State.Note = string(updatedMetadataJSON)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &client.ScheduleUpdate{
|
||||
Schedule: &input.Description.Schedule,
|
||||
}, nil
|
||||
},
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to update schedule: %v", err)}
|
||||
}
|
||||
|
||||
// Update in memory
|
||||
job.CronExpr = req.CronExpr
|
||||
|
||||
log.Printf("Updated schedule for job: %s with new cron: %s", req.JobID, req.CronExpr)
|
||||
return JobResponse{Success: true, Message: "Schedule updated successfully"}
|
||||
}
|
||||
|
||||
// listSchedules lists all schedules
|
||||
func (ts *TemporalService) listSchedules() JobResponse {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// List all schedules from Temporal
|
||||
iter, err := ts.client.ScheduleClient().List(ctx, client.ScheduleListOptions{})
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to list schedules: %v", err)}
|
||||
}
|
||||
|
||||
var jobs []JobStatus
|
||||
for iter.HasNext() {
|
||||
schedule, err := iter.Next()
|
||||
if err != nil {
|
||||
log.Printf("Error listing schedules: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Extract job ID from schedule ID
|
||||
if strings.HasPrefix(schedule.ID, "goose-job-") {
|
||||
jobID := strings.TrimPrefix(schedule.ID, "goose-job-")
|
||||
|
||||
// Get detailed schedule information to access metadata
|
||||
scheduleHandle := ts.client.ScheduleClient().GetHandle(ctx, schedule.ID)
|
||||
desc, err := scheduleHandle.Describe(ctx)
|
||||
if err != nil {
|
||||
log.Printf("Warning: Could not get detailed info for schedule %s: %v", schedule.ID, err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Initialize job status with defaults
|
||||
jobStatus := JobStatus{
|
||||
ID: jobID,
|
||||
CurrentlyRunning: ts.isJobCurrentlyRunning(ctx, jobID),
|
||||
Paused: desc.Schedule.State.Paused,
|
||||
CreatedAt: time.Now(), // Fallback if not in metadata
|
||||
}
|
||||
|
||||
// Extract metadata from the schedule's Note field (stored as JSON)
|
||||
if desc.Schedule.State.Note != "" {
|
||||
var metadata map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(desc.Schedule.State.Note), &metadata); err == nil {
|
||||
// Extract cron expression
|
||||
if cronExpr, ok := metadata["cron_expr"].(string); ok {
|
||||
jobStatus.CronExpr = cronExpr
|
||||
} else if len(desc.Schedule.Spec.CronExpressions) > 0 {
|
||||
// Fallback to spec if not in metadata
|
||||
jobStatus.CronExpr = desc.Schedule.Spec.CronExpressions[0]
|
||||
}
|
||||
|
||||
// Extract recipe path
|
||||
if recipePath, ok := metadata["recipe_path"].(string); ok {
|
||||
jobStatus.RecipePath = recipePath
|
||||
}
|
||||
|
||||
// Extract execution mode
|
||||
if executionMode, ok := metadata["execution_mode"].(string); ok {
|
||||
jobStatus.ExecutionMode = &executionMode
|
||||
}
|
||||
|
||||
// Extract creation time
|
||||
if createdAtStr, ok := metadata["created_at"].(string); ok {
|
||||
if createdAt, err := time.Parse(time.RFC3339, createdAtStr); err == nil {
|
||||
jobStatus.CreatedAt = createdAt
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.Printf("Failed to parse metadata from Note field for schedule %s: %v", schedule.ID, err)
|
||||
// Fallback to spec values
|
||||
if len(desc.Schedule.Spec.CronExpressions) > 0 {
|
||||
jobStatus.CronExpr = desc.Schedule.Spec.CronExpressions[0]
|
||||
}
|
||||
defaultMode := "background"
|
||||
jobStatus.ExecutionMode = &defaultMode
|
||||
}
|
||||
} else {
|
||||
// Fallback for schedules without metadata (legacy schedules)
|
||||
log.Printf("Schedule %s has no metadata, using fallback values", schedule.ID)
|
||||
if len(desc.Schedule.Spec.CronExpressions) > 0 {
|
||||
jobStatus.CronExpr = desc.Schedule.Spec.CronExpressions[0]
|
||||
}
|
||||
// For legacy schedules, we can't recover recipe path or execution mode
|
||||
defaultMode := "background"
|
||||
jobStatus.ExecutionMode = &defaultMode
|
||||
}
|
||||
|
||||
// Update last run time - use the most recent between scheduled and manual runs
|
||||
var mostRecentRun *string
|
||||
|
||||
// Check scheduled runs from Temporal
|
||||
if len(desc.Info.RecentActions) > 0 {
|
||||
lastAction := desc.Info.RecentActions[len(desc.Info.RecentActions)-1]
|
||||
if !lastAction.ActualTime.IsZero() {
|
||||
scheduledRunStr := lastAction.ActualTime.Format(time.RFC3339)
|
||||
mostRecentRun = &scheduledRunStr
|
||||
log.Printf("Job %s scheduled run: %s", jobID, scheduledRunStr)
|
||||
}
|
||||
}
|
||||
|
||||
// Check manual runs from our in-memory tracking (if available)
|
||||
if tracked, exists := ts.scheduleJobs[jobID]; exists && tracked.LastManualRun != nil {
|
||||
log.Printf("Job %s manual run: %s", jobID, *tracked.LastManualRun)
|
||||
|
||||
// Compare times if we have both
|
||||
if mostRecentRun != nil {
|
||||
scheduledTime, err1 := time.Parse(time.RFC3339, *mostRecentRun)
|
||||
manualTime, err2 := time.Parse(time.RFC3339, *tracked.LastManualRun)
|
||||
|
||||
if err1 == nil && err2 == nil {
|
||||
if manualTime.After(scheduledTime) {
|
||||
mostRecentRun = tracked.LastManualRun
|
||||
log.Printf("Job %s: manual run is more recent", jobID)
|
||||
} else {
|
||||
log.Printf("Job %s: scheduled run is more recent", jobID)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Only manual run available
|
||||
mostRecentRun = tracked.LastManualRun
|
||||
log.Printf("Job %s: only manual run available", jobID)
|
||||
}
|
||||
}
|
||||
|
||||
if mostRecentRun != nil {
|
||||
jobStatus.LastRun = mostRecentRun
|
||||
} else {
|
||||
log.Printf("Job %s has no runs (scheduled or manual)", jobID)
|
||||
}
|
||||
|
||||
// Update in-memory tracking with latest info for manual run tracking
|
||||
ts.scheduleJobs[jobID] = &jobStatus
|
||||
|
||||
jobs = append(jobs, jobStatus)
|
||||
}
|
||||
}
|
||||
|
||||
return JobResponse{Success: true, Jobs: jobs}
|
||||
}
|
||||
|
||||
// runNow executes a job immediately
|
||||
func (ts *TemporalService) runNow(req JobRequest) JobResponse {
|
||||
if req.JobID == "" {
|
||||
return JobResponse{Success: false, Message: "Missing job_id"}
|
||||
}
|
||||
|
||||
// Get job details
|
||||
job, exists := ts.scheduleJobs[req.JobID]
|
||||
if !exists {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Job '%s' not found", req.JobID)}
|
||||
}
|
||||
|
||||
// Record the manual run time
|
||||
now := time.Now()
|
||||
manualRunStr := now.Format(time.RFC3339)
|
||||
job.LastManualRun = &manualRunStr
|
||||
log.Printf("Recording manual run for job %s at %s", req.JobID, manualRunStr)
|
||||
|
||||
// Execute workflow immediately
|
||||
workflowOptions := client.StartWorkflowOptions{
|
||||
ID: fmt.Sprintf("manual-%s-%d", req.JobID, now.Unix()),
|
||||
TaskQueue: TaskQueueName,
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
we, err := ts.client.ExecuteWorkflow(ctx, workflowOptions, GooseJobWorkflow, req.JobID, job.RecipePath)
|
||||
if err != nil {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Failed to start workflow: %v", err)}
|
||||
}
|
||||
|
||||
// Track the workflow for this job
|
||||
ts.addRunningWorkflow(req.JobID, we.GetID())
|
||||
|
||||
// Don't wait for completion in run_now, just return the workflow ID
|
||||
log.Printf("Manual execution started for job: %s, workflow: %s", req.JobID, we.GetID())
|
||||
return JobResponse{
|
||||
Success: true,
|
||||
Message: "Job execution started",
|
||||
Data: RunNowResponse{SessionID: we.GetID()}, // Return workflow ID as session ID for now
|
||||
}
|
||||
}
|
||||
|
||||
// killJob kills a running job
|
||||
func (ts *TemporalService) killJob(req JobRequest) JobResponse {
|
||||
if req.JobID == "" {
|
||||
return JobResponse{Success: false, Message: "Missing job_id"}
|
||||
}
|
||||
|
||||
// Check if job exists
|
||||
_, exists := ts.scheduleJobs[req.JobID]
|
||||
if !exists {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Job '%s' not found", req.JobID)}
|
||||
}
|
||||
|
||||
// Check if job is currently running
|
||||
if !ts.isJobCurrentlyRunning(context.Background(), req.JobID) {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Job '%s' is not currently running", req.JobID)}
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
log.Printf("Starting kill process for job %s", req.JobID)
|
||||
|
||||
// Step 1: Kill managed processes first
|
||||
processKilled := false
|
||||
if err := globalProcessManager.KillProcess(req.JobID); err != nil {
|
||||
log.Printf("Failed to kill managed process for job %s: %v", req.JobID, err)
|
||||
} else {
|
||||
log.Printf("Successfully killed managed process for job %s", req.JobID)
|
||||
processKilled = true
|
||||
}
|
||||
|
||||
// Step 2: Terminate Temporal workflows
|
||||
workflowsKilled := 0
|
||||
workflowIDs, exists := ts.runningWorkflows[req.JobID]
|
||||
if exists && len(workflowIDs) > 0 {
|
||||
for _, workflowID := range workflowIDs {
|
||||
// Terminate the workflow
|
||||
err := ts.client.TerminateWorkflow(ctx, workflowID, "", "Killed by user request")
|
||||
if err != nil {
|
||||
log.Printf("Error terminating workflow %s for job %s: %v", workflowID, req.JobID, err)
|
||||
continue
|
||||
}
|
||||
log.Printf("Terminated workflow %s for job %s", workflowID, req.JobID)
|
||||
workflowsKilled++
|
||||
}
|
||||
log.Printf("Terminated %d workflow(s) for job %s", workflowsKilled, req.JobID)
|
||||
}
|
||||
|
||||
// Step 3: Find and kill any remaining processes by name/pattern
|
||||
additionalKills := FindAndKillProcessesByPattern(req.JobID)
|
||||
|
||||
// Step 4: Mark job as not running in our tracking
|
||||
ts.markJobAsNotRunning(req.JobID)
|
||||
|
||||
// Prepare response message
|
||||
var messages []string
|
||||
if processKilled {
|
||||
messages = append(messages, "killed managed process")
|
||||
}
|
||||
if workflowsKilled > 0 {
|
||||
messages = append(messages, fmt.Sprintf("terminated %d workflow(s)", workflowsKilled))
|
||||
}
|
||||
if additionalKills > 0 {
|
||||
messages = append(messages, fmt.Sprintf("killed %d additional process(es)", additionalKills))
|
||||
}
|
||||
|
||||
if len(messages) == 0 {
|
||||
messages = append(messages, "no active processes found but marked as not running")
|
||||
}
|
||||
|
||||
log.Printf("Killed job: %s (%s)", req.JobID, strings.Join(messages, ", "))
|
||||
return JobResponse{
|
||||
Success: true,
|
||||
Message: fmt.Sprintf("Successfully killed job '%s': %s", req.JobID, strings.Join(messages, ", ")),
|
||||
}
|
||||
}
|
||||
|
||||
// inspectJob inspects a running job
|
||||
func (ts *TemporalService) inspectJob(req JobRequest) JobResponse {
|
||||
if req.JobID == "" {
|
||||
return JobResponse{Success: false, Message: "Missing job_id"}
|
||||
}
|
||||
|
||||
// Check if job exists
|
||||
_, exists := ts.scheduleJobs[req.JobID]
|
||||
if !exists {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Job '%s' not found", req.JobID)}
|
||||
}
|
||||
|
||||
// Check if job is currently running
|
||||
if !ts.isJobCurrentlyRunning(context.Background(), req.JobID) {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Job '%s' is not currently running", req.JobID)}
|
||||
}
|
||||
|
||||
// Get process information
|
||||
processes := globalProcessManager.ListProcesses()
|
||||
if mp, exists := processes[req.JobID]; exists {
|
||||
duration := time.Since(mp.StartTime)
|
||||
|
||||
inspectData := map[string]interface{}{
|
||||
"job_id": req.JobID,
|
||||
"process_id": mp.Process.Pid,
|
||||
"running_duration": duration.String(),
|
||||
"running_duration_seconds": int(duration.Seconds()),
|
||||
"start_time": mp.StartTime.Format(time.RFC3339),
|
||||
}
|
||||
|
||||
// Try to get session ID from workflow tracking
|
||||
if workflowIDs, exists := ts.runningWorkflows[req.JobID]; exists && len(workflowIDs) > 0 {
|
||||
inspectData["session_id"] = workflowIDs[0] // Use the first workflow ID as session ID
|
||||
}
|
||||
|
||||
return JobResponse{
|
||||
Success: true,
|
||||
Message: fmt.Sprintf("Job '%s' is running", req.JobID),
|
||||
Data: inspectData,
|
||||
}
|
||||
}
|
||||
|
||||
// If no managed process found, check workflows only
|
||||
if workflowIDs, exists := ts.runningWorkflows[req.JobID]; exists && len(workflowIDs) > 0 {
|
||||
inspectData := map[string]interface{}{
|
||||
"job_id": req.JobID,
|
||||
"session_id": workflowIDs[0],
|
||||
"message": "Job is running but process information not available",
|
||||
}
|
||||
|
||||
return JobResponse{
|
||||
Success: true,
|
||||
Message: fmt.Sprintf("Job '%s' is running (workflow only)", req.JobID),
|
||||
Data: inspectData,
|
||||
}
|
||||
}
|
||||
|
||||
return JobResponse{
|
||||
Success: false,
|
||||
Message: fmt.Sprintf("Job '%s' appears to be running but no process or workflow information found", req.JobID),
|
||||
}
|
||||
}
|
||||
|
||||
// markCompleted marks a job as completed
|
||||
func (ts *TemporalService) markCompleted(req JobRequest) JobResponse {
|
||||
if req.JobID == "" {
|
||||
return JobResponse{Success: false, Message: "Missing job_id"}
|
||||
}
|
||||
|
||||
// Check if job exists
|
||||
_, exists := ts.scheduleJobs[req.JobID]
|
||||
if !exists {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Job '%s' not found", req.JobID)}
|
||||
}
|
||||
|
||||
log.Printf("Marking job %s as completed (requested by Rust scheduler)", req.JobID)
|
||||
|
||||
// Mark job as not running in our tracking
|
||||
ts.markJobAsNotRunning(req.JobID)
|
||||
|
||||
// Also try to clean up any lingering processes
|
||||
if err := globalProcessManager.KillProcess(req.JobID); err != nil {
|
||||
log.Printf("No process to clean up for job %s: %v", req.JobID, err)
|
||||
}
|
||||
|
||||
return JobResponse{
|
||||
Success: true,
|
||||
Message: fmt.Sprintf("Job '%s' marked as completed", req.JobID),
|
||||
}
|
||||
}
|
||||
|
||||
// getJobStatus gets the status of a job
|
||||
func (ts *TemporalService) getJobStatus(req JobRequest) JobResponse {
|
||||
if req.JobID == "" {
|
||||
return JobResponse{Success: false, Message: "Missing job_id"}
|
||||
}
|
||||
|
||||
// Check if job exists
|
||||
job, exists := ts.scheduleJobs[req.JobID]
|
||||
if !exists {
|
||||
return JobResponse{Success: false, Message: fmt.Sprintf("Job '%s' not found", req.JobID)}
|
||||
}
|
||||
|
||||
// Update the currently running status based on our tracking
|
||||
job.CurrentlyRunning = ts.isJobCurrentlyRunning(context.Background(), req.JobID)
|
||||
|
||||
// Return the job as a single-item array for consistency with list endpoint
|
||||
jobs := []JobStatus{*job}
|
||||
|
||||
return JobResponse{
|
||||
Success: true,
|
||||
Message: fmt.Sprintf("Status for job '%s'", req.JobID),
|
||||
Jobs: jobs,
|
||||
}
|
||||
}
|
||||
|
||||
// storeRecipeForSchedule copies a recipe file to managed storage and returns the managed path and content
|
||||
func (ts *TemporalService) storeRecipeForSchedule(jobID, originalPath string) (string, []byte, error) {
|
||||
// Validate original recipe file exists
|
||||
if _, err := os.Stat(originalPath); os.IsNotExist(err) {
|
||||
return "", nil, fmt.Errorf("recipe file not found: %s", originalPath)
|
||||
}
|
||||
|
||||
// Read the original recipe content
|
||||
recipeContent, err := os.ReadFile(originalPath)
|
||||
if err != nil {
|
||||
return "", nil, fmt.Errorf("failed to read recipe file: %w", err)
|
||||
}
|
||||
|
||||
// Validate it's a valid recipe by trying to parse it
|
||||
if _, err := ts.parseRecipeContent(recipeContent); err != nil {
|
||||
return "", nil, fmt.Errorf("invalid recipe file: %w", err)
|
||||
}
|
||||
|
||||
// Create managed file path
|
||||
originalFilename := filepath.Base(originalPath)
|
||||
ext := filepath.Ext(originalFilename)
|
||||
if ext == "" {
|
||||
ext = ".yaml" // Default to yaml if no extension
|
||||
}
|
||||
|
||||
managedFilename := fmt.Sprintf("%s%s", jobID, ext)
|
||||
managedPath := filepath.Join(ts.recipesDir, managedFilename)
|
||||
|
||||
// Write to managed storage
|
||||
if err := os.WriteFile(managedPath, recipeContent, 0644); err != nil {
|
||||
return "", nil, fmt.Errorf("failed to write managed recipe file: %w", err)
|
||||
}
|
||||
|
||||
log.Printf("Stored recipe for job %s: %s -> %s (size: %d bytes)",
|
||||
jobID, originalPath, managedPath, len(recipeContent))
|
||||
|
||||
return managedPath, recipeContent, nil
|
||||
}
|
||||
|
||||
// cleanupManagedRecipe removes managed recipe files for a job
|
||||
func (ts *TemporalService) cleanupManagedRecipe(jobID string) {
|
||||
// Clean up both permanent and temporary files
|
||||
patterns := []string{
|
||||
fmt.Sprintf("%s.*", jobID), // Permanent files (jobID.yaml, jobID.json, etc.)
|
||||
fmt.Sprintf("%s-temp.*", jobID), // Temporary files
|
||||
}
|
||||
|
||||
for _, pattern := range patterns {
|
||||
matches, err := filepath.Glob(filepath.Join(ts.recipesDir, pattern))
|
||||
if err != nil {
|
||||
log.Printf("Error finding recipe files for cleanup: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, filePath := range matches {
|
||||
if err := os.Remove(filePath); err != nil {
|
||||
log.Printf("Warning: Failed to remove recipe file %s: %v", filePath, err)
|
||||
} else {
|
||||
log.Printf("Cleaned up recipe file: %s", filePath)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
283
temporal-service/service.go
Normal file
283
temporal-service/service.go
Normal file
@@ -0,0 +1,283 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"go.temporal.io/sdk/client"
|
||||
"go.temporal.io/sdk/worker"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// Global service instance for activities to access
|
||||
var globalService *TemporalService
|
||||
|
||||
// TemporalService manages the Temporal client and provides HTTP API
|
||||
type TemporalService struct {
|
||||
client client.Client
|
||||
worker worker.Worker
|
||||
scheduleJobs map[string]*JobStatus // In-memory job tracking
|
||||
runningJobs map[string]bool // Track which jobs are currently running
|
||||
runningWorkflows map[string][]string // Track workflow IDs for each job
|
||||
recipesDir string // Directory for managed recipe storage
|
||||
ports *PortConfig // Port configuration
|
||||
}
|
||||
|
||||
// NewTemporalService creates a new Temporal service and ensures Temporal server is running
|
||||
func NewTemporalService() (*TemporalService, error) {
|
||||
// First, find available ports
|
||||
ports, err := findAvailablePorts()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find available ports: %w", err)
|
||||
}
|
||||
|
||||
log.Printf("Using ports - Temporal: %d, UI: %d, HTTP: %d",
|
||||
ports.TemporalPort, ports.UIPort, ports.HTTPPort)
|
||||
|
||||
// Ensure Temporal server is running
|
||||
if err := ensureTemporalServerRunning(ports); err != nil {
|
||||
return nil, fmt.Errorf("failed to ensure Temporal server is running: %w", err)
|
||||
}
|
||||
|
||||
// Set up managed recipes directory in user data directory
|
||||
recipesDir, err := getManagedRecipesDir()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to determine managed recipes directory: %w", err)
|
||||
}
|
||||
if err := os.MkdirAll(recipesDir, 0755); err != nil {
|
||||
return nil, fmt.Errorf("failed to create managed recipes directory: %w", err)
|
||||
}
|
||||
log.Printf("Using managed recipes directory: %s", recipesDir)
|
||||
|
||||
// Create client (Temporal server should now be running)
|
||||
c, err := client.Dial(client.Options{
|
||||
HostPort: fmt.Sprintf("127.0.0.1:%d", ports.TemporalPort),
|
||||
Namespace: Namespace,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create temporal client: %w", err)
|
||||
}
|
||||
|
||||
// Create worker
|
||||
w := worker.New(c, TaskQueueName, worker.Options{})
|
||||
w.RegisterWorkflow(GooseJobWorkflow)
|
||||
w.RegisterActivity(ExecuteGooseRecipe)
|
||||
|
||||
if err := w.Start(); err != nil {
|
||||
c.Close()
|
||||
return nil, fmt.Errorf("failed to start worker: %w", err)
|
||||
}
|
||||
|
||||
log.Printf("Connected to Temporal server successfully on port %d", ports.TemporalPort)
|
||||
|
||||
service := &TemporalService{
|
||||
client: c,
|
||||
worker: w,
|
||||
scheduleJobs: make(map[string]*JobStatus),
|
||||
runningJobs: make(map[string]bool),
|
||||
runningWorkflows: make(map[string][]string),
|
||||
recipesDir: recipesDir,
|
||||
ports: ports,
|
||||
}
|
||||
|
||||
// Set global service for activities
|
||||
globalService = service
|
||||
|
||||
return service, nil
|
||||
}
|
||||
|
||||
// Stop gracefully shuts down the Temporal service
|
||||
func (ts *TemporalService) Stop() {
|
||||
log.Println("Shutting down Temporal service...")
|
||||
if ts.worker != nil {
|
||||
ts.worker.Stop()
|
||||
}
|
||||
if ts.client != nil {
|
||||
ts.client.Close()
|
||||
}
|
||||
log.Println("Temporal service stopped")
|
||||
}
|
||||
|
||||
// GetHTTPPort returns the HTTP port for this service
|
||||
func (ts *TemporalService) GetHTTPPort() int {
|
||||
return ts.ports.HTTPPort
|
||||
}
|
||||
|
||||
// GetTemporalPort returns the Temporal server port for this service
|
||||
func (ts *TemporalService) GetTemporalPort() int {
|
||||
return ts.ports.TemporalPort
|
||||
}
|
||||
|
||||
// GetUIPort returns the Temporal UI port for this service
|
||||
func (ts *TemporalService) GetUIPort() int {
|
||||
return ts.ports.UIPort
|
||||
}
|
||||
|
||||
// HTTP API handlers
|
||||
|
||||
func (ts *TemporalService) handleJobs(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
if r.Method != http.MethodPost {
|
||||
ts.writeErrorResponse(w, http.StatusMethodNotAllowed, "Method not allowed")
|
||||
return
|
||||
}
|
||||
|
||||
var req JobRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
ts.writeErrorResponse(w, http.StatusBadRequest, fmt.Sprintf("Invalid JSON: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
var resp JobResponse
|
||||
|
||||
switch req.Action {
|
||||
case "create":
|
||||
resp = ts.createSchedule(req)
|
||||
case "delete":
|
||||
resp = ts.deleteSchedule(req)
|
||||
case "pause":
|
||||
resp = ts.pauseSchedule(req)
|
||||
case "unpause":
|
||||
resp = ts.unpauseSchedule(req)
|
||||
case "update":
|
||||
resp = ts.updateSchedule(req)
|
||||
case "list":
|
||||
resp = ts.listSchedules()
|
||||
case "run_now":
|
||||
resp = ts.runNow(req)
|
||||
case "kill_job":
|
||||
resp = ts.killJob(req)
|
||||
case "inspect_job":
|
||||
resp = ts.inspectJob(req)
|
||||
case "mark_completed":
|
||||
resp = ts.markCompleted(req)
|
||||
case "status":
|
||||
resp = ts.getJobStatus(req)
|
||||
default:
|
||||
resp = JobResponse{Success: false, Message: fmt.Sprintf("Unknown action: %s", req.Action)}
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
json.NewEncoder(w).Encode(resp)
|
||||
}
|
||||
|
||||
func (ts *TemporalService) handleHealth(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
json.NewEncoder(w).Encode(map[string]string{"status": "healthy"})
|
||||
}
|
||||
|
||||
func (ts *TemporalService) handlePorts(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
|
||||
portInfo := map[string]int{
|
||||
"http_port": ts.ports.HTTPPort,
|
||||
"temporal_port": ts.ports.TemporalPort,
|
||||
"ui_port": ts.ports.UIPort,
|
||||
}
|
||||
|
||||
json.NewEncoder(w).Encode(portInfo)
|
||||
}
|
||||
|
||||
// markJobAsRunning sets a job as currently running and tracks the workflow ID
|
||||
func (ts *TemporalService) markJobAsRunning(jobID string) {
|
||||
ts.runningJobs[jobID] = true
|
||||
log.Printf("Marked job %s as running", jobID)
|
||||
}
|
||||
|
||||
// markJobAsNotRunning sets a job as not currently running and clears workflow tracking
|
||||
func (ts *TemporalService) markJobAsNotRunning(jobID string) {
|
||||
delete(ts.runningJobs, jobID)
|
||||
delete(ts.runningWorkflows, jobID)
|
||||
log.Printf("Marked job %s as not running", jobID)
|
||||
}
|
||||
|
||||
// addRunningWorkflow tracks a workflow ID for a job
|
||||
func (ts *TemporalService) addRunningWorkflow(jobID, workflowID string) {
|
||||
if ts.runningWorkflows[jobID] == nil {
|
||||
ts.runningWorkflows[jobID] = make([]string, 0)
|
||||
}
|
||||
ts.runningWorkflows[jobID] = append(ts.runningWorkflows[jobID], workflowID)
|
||||
log.Printf("Added workflow %s for job %s", workflowID, jobID)
|
||||
}
|
||||
|
||||
// removeRunningWorkflow removes a workflow ID from job tracking
|
||||
func (ts *TemporalService) removeRunningWorkflow(jobID, workflowID string) {
|
||||
if workflows, exists := ts.runningWorkflows[jobID]; exists {
|
||||
for i, id := range workflows {
|
||||
if id == workflowID {
|
||||
ts.runningWorkflows[jobID] = append(workflows[:i], workflows[i+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
if len(ts.runningWorkflows[jobID]) == 0 {
|
||||
delete(ts.runningWorkflows, jobID)
|
||||
ts.runningJobs[jobID] = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// getEmbeddedRecipeContent retrieves embedded recipe content from schedule metadata
|
||||
func (ts *TemporalService) getEmbeddedRecipeContent(jobID string) (string, error) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
scheduleID := fmt.Sprintf("goose-job-%s", jobID)
|
||||
handle := ts.client.ScheduleClient().GetHandle(ctx, scheduleID)
|
||||
|
||||
desc, err := handle.Describe(ctx)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get schedule description: %w", err)
|
||||
}
|
||||
|
||||
if desc.Schedule.State.Note == "" {
|
||||
return "", fmt.Errorf("no metadata found in schedule")
|
||||
}
|
||||
|
||||
var metadata map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(desc.Schedule.State.Note), &metadata); err != nil {
|
||||
return "", fmt.Errorf("failed to parse schedule metadata: %w", err)
|
||||
}
|
||||
|
||||
if recipeContent, ok := metadata["recipe_content"].(string); ok {
|
||||
return recipeContent, nil
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("no embedded recipe content found")
|
||||
}
|
||||
|
||||
// writeErrorResponse writes a standardized error response
|
||||
func (ts *TemporalService) writeErrorResponse(w http.ResponseWriter, statusCode int, message string) {
|
||||
w.WriteHeader(statusCode)
|
||||
json.NewEncoder(w).Encode(JobResponse{Success: false, Message: message})
|
||||
}
|
||||
|
||||
// isJobCurrentlyRunning checks if there are any running workflows for the given job ID
|
||||
func (ts *TemporalService) isJobCurrentlyRunning(ctx context.Context, jobID string) bool {
|
||||
// Check our in-memory tracking of running jobs
|
||||
if running, exists := ts.runningJobs[jobID]; exists && running {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// parseRecipeContent parses recipe content from bytes (YAML or JSON)
|
||||
func (ts *TemporalService) parseRecipeContent(content []byte) (*Recipe, error) {
|
||||
var recipe Recipe
|
||||
|
||||
// Try YAML first, then JSON
|
||||
if err := yaml.Unmarshal(content, &recipe); err != nil {
|
||||
if err := json.Unmarshal(content, &recipe); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse as YAML or JSON: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return &recipe, nil
|
||||
}
|
||||
Binary file not shown.
@@ -1091,6 +1091,10 @@
|
||||
"cron": {
|
||||
"type": "string"
|
||||
},
|
||||
"execution_mode": {
|
||||
"type": "string",
|
||||
"nullable": true
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -1907,6 +1911,10 @@
|
||||
"currently_running": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"execution_mode": {
|
||||
"type": "string",
|
||||
"nullable": true
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
@@ -15,9 +15,10 @@
|
||||
"start:test-error": "GOOSE_TEST_ERROR=true electron-forge start",
|
||||
"package": "electron-forge package",
|
||||
"make": "electron-forge make",
|
||||
"bundle:default": "npm run make && (cd out/Goose-darwin-arm64 && ditto -c -k --sequesterRsrc --keepParent Goose.app Goose.zip) || echo 'out/Goose-darwin-arm64 not found; either the binary is not built or you are not on macOS'",
|
||||
"bundle:windows": "node scripts/build-main.js && node scripts/prepare-platform.js && npm run make -- --platform=win32 --arch=x64 && node scripts/copy-windows-dlls.js",
|
||||
"bundle:intel": "npm run make -- --arch=x64 && cd out/Goose-darwin-x64 && ditto -c -k --sequesterRsrc --keepParent Goose.app Goose_intel_mac.zip",
|
||||
"bundle:default": "node scripts/prepare-platform-binaries.js && npm run make && (cd out/Goose-darwin-arm64 && ditto -c -k --sequesterRsrc --keepParent Goose.app Goose.zip) || echo 'out/Goose-darwin-arm64 not found; either the binary is not built or you are not on macOS'",
|
||||
"bundle:alpha": "ALPHA=true node scripts/prepare-platform-binaries.js && ALPHA=true npm run make && (cd out/Goose-darwin-arm64 && ditto -c -k --sequesterRsrc --keepParent Goose.app Goose_alpha.zip) || echo 'out/Goose-darwin-arm64 not found; either the binary is not built or you are not on macOS'",
|
||||
"bundle:windows": "node scripts/build-main.js && ELECTRON_PLATFORM=win32 node scripts/prepare-platform-binaries.js && npm run make -- --platform=win32 --arch=x64",
|
||||
"bundle:intel": "node scripts/prepare-platform-binaries.js && npm run make -- --arch=x64 && cd out/Goose-darwin-x64 && ditto -c -k --sequesterRsrc --keepParent Goose.app Goose_intel_mac.zip",
|
||||
"debug": "echo 'run --remote-debugging-port=8315' && lldb out/Goose-darwin-arm64/Goose.app",
|
||||
"test-e2e": "npm run generate-api && playwright test",
|
||||
"test-e2e:dev": "npm run generate-api && playwright test --reporter=list --retries=0 --max-failures=1",
|
||||
|
||||
155
ui/desktop/scripts/prepare-platform-binaries.js
Normal file
155
ui/desktop/scripts/prepare-platform-binaries.js
Normal file
@@ -0,0 +1,155 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// Paths
|
||||
const srcBinDir = path.join(__dirname, '..', 'src', 'bin');
|
||||
const platformWinDir = path.join(__dirname, '..', 'src', 'platform', 'windows', 'bin');
|
||||
|
||||
// Platform-specific file patterns
|
||||
const windowsFiles = [
|
||||
'*.exe',
|
||||
'*.dll',
|
||||
'*.cmd',
|
||||
'goose-npm/**/*'
|
||||
];
|
||||
|
||||
const macosFiles = [
|
||||
'goosed',
|
||||
'goose',
|
||||
'temporal',
|
||||
'temporal-service',
|
||||
'jbang',
|
||||
'npx',
|
||||
'uvx',
|
||||
'*.db',
|
||||
'*.log',
|
||||
'.gitkeep'
|
||||
];
|
||||
|
||||
// Helper function to check if file matches patterns
|
||||
function matchesPattern(filename, patterns) {
|
||||
return patterns.some(pattern => {
|
||||
if (pattern.includes('**')) {
|
||||
// Handle directory patterns
|
||||
const basePattern = pattern.split('/**')[0];
|
||||
return filename.startsWith(basePattern);
|
||||
} else if (pattern.includes('*')) {
|
||||
// Handle wildcard patterns - be more precise with file extensions
|
||||
if (pattern.startsWith('*.')) {
|
||||
// For file extension patterns like *.exe, *.dll
|
||||
const extension = pattern.substring(2); // Remove "*."
|
||||
return filename.endsWith('.' + extension);
|
||||
} else {
|
||||
// For other wildcard patterns
|
||||
const regex = new RegExp('^' + pattern.replace(/\*/g, '.*') + '$');
|
||||
return regex.test(filename);
|
||||
}
|
||||
} else {
|
||||
// Exact match
|
||||
return filename === pattern;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to clean directory of cross-platform files
|
||||
function cleanBinDirectory(targetPlatform) {
|
||||
console.log(`Cleaning bin directory for ${targetPlatform} build...`);
|
||||
|
||||
if (!fs.existsSync(srcBinDir)) {
|
||||
console.log('src/bin directory does not exist, skipping cleanup');
|
||||
return;
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(srcBinDir, { withFileTypes: true });
|
||||
|
||||
files.forEach(file => {
|
||||
const filePath = path.join(srcBinDir, file.name);
|
||||
|
||||
if (targetPlatform === 'darwin' || targetPlatform === 'linux') {
|
||||
// For macOS/Linux, remove Windows-specific files
|
||||
if (matchesPattern(file.name, windowsFiles)) {
|
||||
console.log(`Removing Windows file: ${file.name}`);
|
||||
if (file.isDirectory()) {
|
||||
fs.rmSync(filePath, { recursive: true, force: true });
|
||||
} else {
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
}
|
||||
} else if (targetPlatform === 'win32') {
|
||||
// For Windows, remove macOS-specific files (keep only Windows files and common files)
|
||||
if (!matchesPattern(file.name, windowsFiles) && !matchesPattern(file.name, ['*.db', '*.log', '.gitkeep'])) {
|
||||
// Check if it's a macOS binary (executable without extension)
|
||||
if (file.isFile() && !path.extname(file.name) && file.name !== '.gitkeep') {
|
||||
try {
|
||||
// Check if file is executable (likely a macOS binary)
|
||||
const stats = fs.statSync(filePath);
|
||||
if (stats.mode & parseInt('111', 8)) { // Check if any execute bit is set
|
||||
console.log(`Removing macOS binary: ${file.name}`);
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(`Could not check file ${file.name}:`, err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to copy platform-specific files
|
||||
function copyPlatformFiles(targetPlatform) {
|
||||
if (targetPlatform === 'win32') {
|
||||
console.log('Copying Windows-specific files...');
|
||||
|
||||
if (!fs.existsSync(platformWinDir)) {
|
||||
console.warn('Windows platform directory does not exist');
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure src/bin exists
|
||||
if (!fs.existsSync(srcBinDir)) {
|
||||
fs.mkdirSync(srcBinDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Copy Windows-specific files
|
||||
const files = fs.readdirSync(platformWinDir, { withFileTypes: true });
|
||||
files.forEach(file => {
|
||||
if (file.name === 'README.md' || file.name === '.gitignore') {
|
||||
return;
|
||||
}
|
||||
|
||||
const srcPath = path.join(platformWinDir, file.name);
|
||||
const destPath = path.join(srcBinDir, file.name);
|
||||
|
||||
if (file.isDirectory()) {
|
||||
fs.cpSync(srcPath, destPath, { recursive: true, force: true });
|
||||
console.log(`Copied directory: ${file.name}`);
|
||||
} else {
|
||||
fs.copyFileSync(srcPath, destPath);
|
||||
console.log(`Copied: ${file.name}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Main function
|
||||
function preparePlatformBinaries() {
|
||||
const targetPlatform = process.env.ELECTRON_PLATFORM || process.platform;
|
||||
|
||||
console.log(`Preparing binaries for platform: ${targetPlatform}`);
|
||||
|
||||
// First copy platform-specific files if needed
|
||||
copyPlatformFiles(targetPlatform);
|
||||
|
||||
// Then clean up cross-platform files
|
||||
cleanBinDirectory(targetPlatform);
|
||||
|
||||
console.log('Platform binary preparation complete');
|
||||
}
|
||||
|
||||
// Run if called directly
|
||||
if (require.main === module) {
|
||||
preparePlatformBinaries();
|
||||
}
|
||||
|
||||
module.exports = { preparePlatformBinaries };
|
||||
@@ -64,6 +64,7 @@ export type ContextManageResponse = {
|
||||
|
||||
export type CreateScheduleRequest = {
|
||||
cron: string;
|
||||
execution_mode?: string | null;
|
||||
id: string;
|
||||
recipe_source: string;
|
||||
};
|
||||
@@ -316,6 +317,7 @@ export type ScheduledJob = {
|
||||
cron: string;
|
||||
current_session_id?: string | null;
|
||||
currently_running?: boolean;
|
||||
execution_mode?: string | null;
|
||||
id: string;
|
||||
last_run?: string | null;
|
||||
paused?: boolean;
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Enable strict mode to exit on errors and unset variables
|
||||
set -euo pipefail
|
||||
|
||||
# Set log file
|
||||
LOG_FILE="/tmp/mcp.log"
|
||||
|
||||
# Clear the log file at the start
|
||||
> "$LOG_FILE"
|
||||
|
||||
# Function for logging
|
||||
log() {
|
||||
local MESSAGE="$1"
|
||||
echo "$(date +'%Y-%m-%d %H:%M:%S') - $MESSAGE" | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
# Trap errors and log them before exiting
|
||||
trap 'log "An error occurred. Exiting with status $?."' ERR
|
||||
|
||||
log "Starting jbang setup script."
|
||||
|
||||
# Ensure ~/.config/goose/mcp-hermit/bin exists
|
||||
log "Creating directory ~/.config/goose/mcp-hermit/bin if it does not exist."
|
||||
mkdir -p ~/.config/goose/mcp-hermit/bin
|
||||
|
||||
# Change to the ~/.config/goose/mcp-hermit directory
|
||||
log "Changing to directory ~/.config/goose/mcp-hermit."
|
||||
cd ~/.config/goose/mcp-hermit
|
||||
|
||||
# Check if hermit binary exists and download if not
|
||||
if [ ! -f ~/.config/goose/mcp-hermit/bin/hermit ]; then
|
||||
log "Hermit binary not found. Downloading hermit binary."
|
||||
curl -fsSL "https://github.com/cashapp/hermit/releases/download/stable/hermit-$(uname -s | tr '[:upper:]' '[:lower:]')-$(uname -m | sed 's/x86_64/amd64/' | sed 's/aarch64/arm64/').gz" \
|
||||
| gzip -dc > ~/.config/goose/mcp-hermit/bin/hermit && chmod +x ~/.config/goose/mcp-hermit/bin/hermit
|
||||
log "Hermit binary downloaded and made executable."
|
||||
else
|
||||
log "Hermit binary already exists. Skipping download."
|
||||
fi
|
||||
|
||||
log "setting hermit cache to be local for MCP servers"
|
||||
mkdir -p ~/.config/goose/mcp-hermit/cache
|
||||
export HERMIT_STATE_DIR=~/.config/goose/mcp-hermit/cache
|
||||
|
||||
# Update PATH
|
||||
export PATH=~/.config/goose/mcp-hermit/bin:$PATH
|
||||
log "Updated PATH to include ~/.config/goose/mcp-hermit/bin."
|
||||
|
||||
# Initialize hermit
|
||||
log "Initializing hermit."
|
||||
hermit init >> "$LOG_FILE"
|
||||
|
||||
# Install OpenJDK using hermit
|
||||
log "Installing OpenJDK with hermit."
|
||||
hermit install openjdk@17 >> "$LOG_FILE"
|
||||
|
||||
# Download and install jbang if not present
|
||||
if [ ! -f ~/.config/goose/mcp-hermit/bin/jbang ]; then
|
||||
log "Downloading and installing jbang."
|
||||
curl -Ls https://sh.jbang.dev | bash -s - app setup
|
||||
cp ~/.jbang/bin/jbang ~/.config/goose/mcp-hermit/bin/
|
||||
fi
|
||||
|
||||
# Verify installations
|
||||
log "Verifying installation locations:"
|
||||
log "hermit: $(which hermit)"
|
||||
log "java: $(which java)"
|
||||
log "jbang: $(which jbang)"
|
||||
|
||||
# Check for custom registry settings
|
||||
log "Checking for GOOSE_JBANG_REGISTRY environment variable for custom jbang registry setup..."
|
||||
if [ -n "${GOOSE_JBANG_REGISTRY:-}" ] && curl -s --head --fail "$GOOSE_JBANG_REGISTRY" > /dev/null; then
|
||||
log "Checking custom goose registry availability: $GOOSE_JBANG_REGISTRY"
|
||||
log "$GOOSE_JBANG_REGISTRY is accessible. Setting it as JBANG_REPO."
|
||||
export JBANG_REPO="$GOOSE_JBANG_REGISTRY"
|
||||
else
|
||||
log "GOOSE_JBANG_REGISTRY is not set or not accessible. Using default jbang repository."
|
||||
fi
|
||||
|
||||
# Trust all jbang scripts that a user might install. Without this, Jbang will attempt to
|
||||
# prompt the user to trust each script. However, Goose does not surfact this modal and without
|
||||
# user input, the addExtension method will timeout and fail.
|
||||
jbang --quiet trust add *
|
||||
|
||||
# Final step: Execute jbang with passed arguments, always including --fresh and --quiet
|
||||
log "Executing 'jbang' command with arguments: $*"
|
||||
jbang --fresh --quiet "$@" || log "Failed to execute 'jbang' with arguments: $*"
|
||||
|
||||
log "jbang setup script completed successfully."
|
||||
@@ -1,105 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Enable strict mode to exit on errors and unset variables
|
||||
set -euo pipefail
|
||||
|
||||
# Set log file
|
||||
LOG_FILE="/tmp/mcp.log"
|
||||
|
||||
# Clear the log file at the start
|
||||
> "$LOG_FILE"
|
||||
|
||||
# Function for logging
|
||||
log() {
|
||||
local MESSAGE="$1"
|
||||
echo "$(date +'%Y-%m-%d %H:%M:%S') - $MESSAGE" | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
# Trap errors and log them before exiting
|
||||
trap 'log "An error occurred. Exiting with status $?."' ERR
|
||||
|
||||
log "Starting npx setup script."
|
||||
|
||||
# Ensure ~/.config/goose/mcp-hermit/bin exists
|
||||
log "Creating directory ~/.config/goose/mcp-hermit/bin if it does not exist."
|
||||
mkdir -p ~/.config/goose/mcp-hermit/bin
|
||||
|
||||
# Change to the ~/.config/goose/mcp-hermit directory
|
||||
log "Changing to directory ~/.config/goose/mcp-hermit."
|
||||
cd ~/.config/goose/mcp-hermit
|
||||
|
||||
|
||||
# Check if hermit binary exists and download if not
|
||||
if [ ! -f ~/.config/goose/mcp-hermit/bin/hermit ]; then
|
||||
log "Hermit binary not found. Downloading hermit binary."
|
||||
curl -fsSL "https://github.com/cashapp/hermit/releases/download/stable/hermit-$(uname -s | tr '[:upper:]' '[:lower:]')-$(uname -m | sed 's/x86_64/amd64/' | sed 's/aarch64/arm64/').gz" \
|
||||
| gzip -dc > ~/.config/goose/mcp-hermit/bin/hermit && chmod +x ~/.config/goose/mcp-hermit/bin/hermit
|
||||
log "Hermit binary downloaded and made executable."
|
||||
else
|
||||
log "Hermit binary already exists. Skipping download."
|
||||
fi
|
||||
|
||||
|
||||
log "setting hermit cache to be local for MCP servers"
|
||||
mkdir -p ~/.config/goose/mcp-hermit/cache
|
||||
export HERMIT_STATE_DIR=~/.config/goose/mcp-hermit/cache
|
||||
|
||||
|
||||
# Update PATH
|
||||
export PATH=~/.config/goose/mcp-hermit/bin:$PATH
|
||||
log "Updated PATH to include ~/.config/goose/mcp-hermit/bin."
|
||||
|
||||
|
||||
# Verify hermit installation
|
||||
log "Checking for hermit in PATH."
|
||||
which hermit >> "$LOG_FILE"
|
||||
|
||||
# Initialize hermit
|
||||
log "Initializing hermit."
|
||||
hermit init >> "$LOG_FILE"
|
||||
|
||||
# Install Node.js using hermit
|
||||
log "Installing Node.js with hermit."
|
||||
hermit install node >> "$LOG_FILE"
|
||||
|
||||
# Verify installations
|
||||
log "Verifying installation locations:"
|
||||
log "hermit: $(which hermit)"
|
||||
log "node: $(which node)"
|
||||
log "npx: $(which npx)"
|
||||
|
||||
|
||||
log "Checking for GOOSE_NPM_REGISTRY and GOOSE_NPM_CERT environment variables for custom npm registry setup..."
|
||||
# Check if GOOSE_NPM_REGISTRY is set and accessible
|
||||
if [ -n "${GOOSE_NPM_REGISTRY:-}" ] && curl -s --head --fail "$GOOSE_NPM_REGISTRY" > /dev/null; then
|
||||
log "Checking custom goose registry availability: $GOOSE_NPM_REGISTRY"
|
||||
log "$GOOSE_NPM_REGISTRY is accessible. Using it for npm registry."
|
||||
export NPM_CONFIG_REGISTRY="$GOOSE_NPM_REGISTRY"
|
||||
|
||||
# Check if GOOSE_NPM_CERT is set and accessible
|
||||
if [ -n "${GOOSE_NPM_CERT:-}" ] && curl -s --head --fail "$GOOSE_NPM_CERT" > /dev/null; then
|
||||
log "Downloading certificate from: $GOOSE_NPM_CERT"
|
||||
curl -sSL -o ~/.config/goose/mcp-hermit/cert.pem "$GOOSE_NPM_CERT"
|
||||
if [ $? -eq 0 ]; then
|
||||
log "Certificate downloaded successfully."
|
||||
export NODE_EXTRA_CA_CERTS=~/.config/goose/mcp-hermit/cert.pem
|
||||
else
|
||||
log "Unable to download the certificate. Skipping certificate setup."
|
||||
fi
|
||||
else
|
||||
log "GOOSE_NPM_CERT is either not set or not accessible. Skipping certificate setup."
|
||||
fi
|
||||
|
||||
else
|
||||
log "GOOSE_NPM_REGISTRY is either not set or not accessible. Falling back to default npm registry."
|
||||
export NPM_CONFIG_REGISTRY="https://registry.npmjs.org/"
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
# Final step: Execute npx with passed arguments
|
||||
log "Executing 'npx' command with arguments: $*"
|
||||
npx "$@" || log "Failed to execute 'npx' with arguments: $*"
|
||||
|
||||
log "npx setup script completed successfully."
|
||||
@@ -1,89 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Enable strict mode to exit on errors and unset variables
|
||||
set -euo pipefail
|
||||
|
||||
# Set log file
|
||||
LOG_FILE="/tmp/mcp.log"
|
||||
|
||||
# Clear the log file at the start
|
||||
> "$LOG_FILE"
|
||||
|
||||
# Function for logging
|
||||
log() {
|
||||
local MESSAGE="$1"
|
||||
echo "$(date +'%Y-%m-%d %H:%M:%S') - $MESSAGE" | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
# Trap errors and log them before exiting
|
||||
trap 'log "An error occurred. Exiting with status $?."' ERR
|
||||
|
||||
log "Starting uvx setup script."
|
||||
|
||||
# Ensure ~/.config/goose/mcp-hermit/bin exists
|
||||
log "Creating directory ~/.config/goose/mcp-hermit/bin if it does not exist."
|
||||
mkdir -p ~/.config/goose/mcp-hermit/bin
|
||||
|
||||
# Change to the ~/.config/goose/mcp-hermit directory
|
||||
log "Changing to directory ~/.config/goose/mcp-hermit."
|
||||
cd ~/.config/goose/mcp-hermit
|
||||
|
||||
# Check if hermit binary exists and download if not
|
||||
if [ ! -f ~/.config/goose/mcp-hermit/bin/hermit ]; then
|
||||
log "Hermit binary not found. Downloading hermit binary."
|
||||
curl -fsSL "https://github.com/cashapp/hermit/releases/download/stable/hermit-$(uname -s | tr '[:upper:]' '[:lower:]')-$(uname -m | sed 's/x86_64/amd64/' | sed 's/aarch64/arm64/').gz" \
|
||||
| gzip -dc > ~/.config/goose/mcp-hermit/bin/hermit && chmod +x ~/.config/goose/mcp-hermit/bin/hermit
|
||||
log "Hermit binary downloaded and made executable."
|
||||
else
|
||||
log "Hermit binary already exists. Skipping download."
|
||||
fi
|
||||
|
||||
|
||||
log "setting hermit cache to be local for MCP servers"
|
||||
mkdir -p ~/.config/goose/mcp-hermit/cache
|
||||
export HERMIT_STATE_DIR=~/.config/goose/mcp-hermit/cache
|
||||
|
||||
# Update PATH
|
||||
export PATH=~/.config/goose/mcp-hermit/bin:$PATH
|
||||
log "Updated PATH to include ~/.config/goose/mcp-hermit/bin."
|
||||
|
||||
|
||||
# Verify hermit installation
|
||||
log "Checking for hermit in PATH."
|
||||
which hermit >> "$LOG_FILE"
|
||||
|
||||
# Initialize hermit
|
||||
log "Initializing hermit."
|
||||
hermit init >> "$LOG_FILE"
|
||||
|
||||
# Initialize python >= 3.10
|
||||
log "hermit install python 3.10"
|
||||
hermit install python3@3.10 >> "$LOG_FILE"
|
||||
|
||||
# Install UV for python using hermit
|
||||
log "Installing UV with hermit."
|
||||
hermit install uv >> "$LOG_FILE"
|
||||
|
||||
# Verify installations
|
||||
log "Verifying installation locations:"
|
||||
log "hermit: $(which hermit)"
|
||||
log "uv: $(which uv)"
|
||||
log "uvx: $(which uvx)"
|
||||
|
||||
|
||||
log "Checking for GOOSE_UV_REGISTRY environment variable for custom python/pip/UV registry setup..."
|
||||
# Check if GOOSE_UV_REGISTRY is set and accessible
|
||||
if [ -n "${GOOSE_UV_REGISTRY:-}" ] && curl -s --head --fail "$GOOSE_UV_REGISTRY" > /dev/null; then
|
||||
log "Checking custom goose registry availability: $GOOSE_UV_REGISTRY"
|
||||
log "$GOOSE_UV_REGISTRY is accessible, setting it as UV_INDEX_URL. Setting UV_NATIVE_TLS to true."
|
||||
export UV_INDEX_URL="$GOOSE_UV_REGISTRY"
|
||||
export UV_NATIVE_TLS=true
|
||||
else
|
||||
log "Neither GOOSE_UV_REGISTRY nor UV_INDEX_URL is set. Falling back to default configuration."
|
||||
fi
|
||||
|
||||
# Final step: Execute uvx with passed arguments
|
||||
log "Executing 'uvx' command with arguments: $*"
|
||||
uvx "$@" || log "Failed to execute 'uvx' with arguments: $*"
|
||||
|
||||
log "uvx setup script completed successfully."
|
||||
@@ -1,4 +1,12 @@
|
||||
import React, { useEffect, useRef, useState, useMemo, useCallback, createContext, useContext } from 'react';
|
||||
import React, {
|
||||
useEffect,
|
||||
useRef,
|
||||
useState,
|
||||
useMemo,
|
||||
useCallback,
|
||||
createContext,
|
||||
useContext,
|
||||
} from 'react';
|
||||
import { getApiUrl } from '../config';
|
||||
import FlappyGoose from './FlappyGoose';
|
||||
import GooseMessage from './GooseMessage';
|
||||
@@ -100,6 +108,7 @@ function ChatContent({
|
||||
const [sessionTokenCount, setSessionTokenCount] = useState<number>(0);
|
||||
const [ancestorMessages, setAncestorMessages] = useState<Message[]>([]);
|
||||
const [droppedFiles, setDroppedFiles] = useState<string[]>([]);
|
||||
const [readyForAutoUserPrompt, setReadyForAutoUserPrompt] = useState(false);
|
||||
|
||||
const scrollRef = useRef<ScrollAreaHandle>(null);
|
||||
|
||||
@@ -119,6 +128,8 @@ function ChatContent({
|
||||
window.electron.logInfo(
|
||||
'Initial messages when resuming session: ' + JSON.stringify(chat.messages, null, 2)
|
||||
);
|
||||
// Set ready for auto user prompt after component initialization
|
||||
setReadyForAutoUserPrompt(true);
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []); // Empty dependency array means this runs once on mount;
|
||||
|
||||
@@ -152,7 +163,11 @@ function ChatContent({
|
||||
} = useMessageStream({
|
||||
api: getApiUrl('/reply'),
|
||||
initialMessages: chat.messages,
|
||||
body: { session_id: chat.id, session_working_dir: window.appConfig.get('GOOSE_WORKING_DIR') },
|
||||
body: {
|
||||
session_id: chat.id,
|
||||
session_working_dir: window.appConfig.get('GOOSE_WORKING_DIR'),
|
||||
...(recipeConfig?.scheduledJobId && { scheduled_job_id: recipeConfig.scheduledJobId }),
|
||||
},
|
||||
onFinish: async (_message, _reason) => {
|
||||
window.electron.stopPowerSaveBlocker();
|
||||
|
||||
@@ -297,6 +312,40 @@ function ChatContent({
|
||||
return recipeConfig?.prompt || '';
|
||||
}, [recipeConfig?.prompt]);
|
||||
|
||||
// Auto-send the prompt for scheduled executions
|
||||
useEffect(() => {
|
||||
if (
|
||||
recipeConfig?.isScheduledExecution &&
|
||||
recipeConfig?.prompt &&
|
||||
messages.length === 0 &&
|
||||
!isLoading &&
|
||||
readyForAutoUserPrompt
|
||||
) {
|
||||
console.log('Auto-sending prompt for scheduled execution:', recipeConfig.prompt);
|
||||
|
||||
// Create and send the user message
|
||||
const userMessage = createUserMessage(recipeConfig.prompt);
|
||||
setLastInteractionTime(Date.now());
|
||||
window.electron.startPowerSaveBlocker();
|
||||
append(userMessage);
|
||||
|
||||
// Scroll to bottom after sending
|
||||
setTimeout(() => {
|
||||
if (scrollRef.current?.scrollToBottom) {
|
||||
scrollRef.current.scrollToBottom();
|
||||
}
|
||||
}, 100);
|
||||
}
|
||||
}, [
|
||||
recipeConfig?.isScheduledExecution,
|
||||
recipeConfig?.prompt,
|
||||
messages.length,
|
||||
isLoading,
|
||||
readyForAutoUserPrompt,
|
||||
append,
|
||||
setLastInteractionTime,
|
||||
]);
|
||||
|
||||
// Handle submit
|
||||
const handleSubmit = (e: React.FormEvent) => {
|
||||
window.electron.startPowerSaveBlocker();
|
||||
@@ -512,148 +561,148 @@ function ChatContent({
|
||||
return (
|
||||
<CurrentModelContext.Provider value={currentModelInfo}>
|
||||
<div className="flex flex-col w-full h-screen items-center justify-center">
|
||||
{/* Loader when generating recipe */}
|
||||
{isGeneratingRecipe && <LayingEggLoader />}
|
||||
<MoreMenuLayout
|
||||
hasMessages={hasMessages}
|
||||
setView={setView}
|
||||
setIsGoosehintsModalOpen={setIsGoosehintsModalOpen}
|
||||
/>
|
||||
{/* Loader when generating recipe */}
|
||||
{isGeneratingRecipe && <LayingEggLoader />}
|
||||
<MoreMenuLayout
|
||||
hasMessages={hasMessages}
|
||||
setView={setView}
|
||||
setIsGoosehintsModalOpen={setIsGoosehintsModalOpen}
|
||||
/>
|
||||
|
||||
<Card
|
||||
className="flex flex-col flex-1 rounded-none h-[calc(100vh-95px)] w-full bg-bgApp mt-0 border-none relative"
|
||||
onDrop={handleDrop}
|
||||
onDragOver={handleDragOver}
|
||||
>
|
||||
{recipeConfig?.title && messages.length > 0 && (
|
||||
<AgentHeader
|
||||
title={recipeConfig.title}
|
||||
profileInfo={
|
||||
recipeConfig.profile
|
||||
? `${recipeConfig.profile} - ${recipeConfig.mcps || 12} MCPs`
|
||||
: undefined
|
||||
}
|
||||
onChangeProfile={() => {
|
||||
// Handle profile change
|
||||
console.log('Change profile clicked');
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
{messages.length === 0 ? (
|
||||
<Splash
|
||||
append={append}
|
||||
activities={Array.isArray(recipeConfig?.activities) ? recipeConfig!.activities : null}
|
||||
title={recipeConfig?.title}
|
||||
/>
|
||||
) : (
|
||||
<ScrollArea ref={scrollRef} className="flex-1" autoScroll>
|
||||
<SearchView>
|
||||
{filteredMessages.map((message, index) => (
|
||||
<div
|
||||
key={message.id || index}
|
||||
className="mt-4 px-4"
|
||||
data-testid="message-container"
|
||||
>
|
||||
{isUserMessage(message) ? (
|
||||
<>
|
||||
{hasContextHandlerContent(message) ? (
|
||||
<ContextHandler
|
||||
messages={messages}
|
||||
messageId={message.id ?? message.created.toString()}
|
||||
chatId={chat.id}
|
||||
workingDir={window.appConfig.get('GOOSE_WORKING_DIR') as string}
|
||||
contextType={getContextHandlerType(message)}
|
||||
/>
|
||||
) : (
|
||||
<UserMessage message={message} />
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
{/* Only render GooseMessage if it's not a message invoking some context management */}
|
||||
{hasContextHandlerContent(message) ? (
|
||||
<ContextHandler
|
||||
messages={messages}
|
||||
messageId={message.id ?? message.created.toString()}
|
||||
chatId={chat.id}
|
||||
workingDir={window.appConfig.get('GOOSE_WORKING_DIR') as string}
|
||||
contextType={getContextHandlerType(message)}
|
||||
/>
|
||||
) : (
|
||||
<GooseMessage
|
||||
messageHistoryIndex={chat?.messageHistoryIndex}
|
||||
message={message}
|
||||
messages={messages}
|
||||
append={append}
|
||||
appendMessage={(newMessage) => {
|
||||
const updatedMessages = [...messages, newMessage];
|
||||
setMessages(updatedMessages);
|
||||
}}
|
||||
toolCallNotifications={toolCallNotifications}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
<Card
|
||||
className="flex flex-col flex-1 rounded-none h-[calc(100vh-95px)] w-full bg-bgApp mt-0 border-none relative"
|
||||
onDrop={handleDrop}
|
||||
onDragOver={handleDragOver}
|
||||
>
|
||||
{recipeConfig?.title && messages.length > 0 && (
|
||||
<AgentHeader
|
||||
title={recipeConfig.title}
|
||||
profileInfo={
|
||||
recipeConfig.profile
|
||||
? `${recipeConfig.profile} - ${recipeConfig.mcps || 12} MCPs`
|
||||
: undefined
|
||||
}
|
||||
onChangeProfile={() => {
|
||||
// Handle profile change
|
||||
console.log('Change profile clicked');
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
{messages.length === 0 ? (
|
||||
<Splash
|
||||
append={append}
|
||||
activities={Array.isArray(recipeConfig?.activities) ? recipeConfig!.activities : null}
|
||||
title={recipeConfig?.title}
|
||||
/>
|
||||
) : (
|
||||
<ScrollArea ref={scrollRef} className="flex-1" autoScroll>
|
||||
<SearchView>
|
||||
{filteredMessages.map((message, index) => (
|
||||
<div
|
||||
key={message.id || index}
|
||||
className="mt-4 px-4"
|
||||
data-testid="message-container"
|
||||
>
|
||||
{isUserMessage(message) ? (
|
||||
<>
|
||||
{hasContextHandlerContent(message) ? (
|
||||
<ContextHandler
|
||||
messages={messages}
|
||||
messageId={message.id ?? message.created.toString()}
|
||||
chatId={chat.id}
|
||||
workingDir={window.appConfig.get('GOOSE_WORKING_DIR') as string}
|
||||
contextType={getContextHandlerType(message)}
|
||||
/>
|
||||
) : (
|
||||
<UserMessage message={message} />
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
{/* Only render GooseMessage if it's not a message invoking some context management */}
|
||||
{hasContextHandlerContent(message) ? (
|
||||
<ContextHandler
|
||||
messages={messages}
|
||||
messageId={message.id ?? message.created.toString()}
|
||||
chatId={chat.id}
|
||||
workingDir={window.appConfig.get('GOOSE_WORKING_DIR') as string}
|
||||
contextType={getContextHandlerType(message)}
|
||||
/>
|
||||
) : (
|
||||
<GooseMessage
|
||||
messageHistoryIndex={chat?.messageHistoryIndex}
|
||||
message={message}
|
||||
messages={messages}
|
||||
append={append}
|
||||
appendMessage={(newMessage) => {
|
||||
const updatedMessages = [...messages, newMessage];
|
||||
setMessages(updatedMessages);
|
||||
}}
|
||||
toolCallNotifications={toolCallNotifications}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</SearchView>
|
||||
|
||||
{error && (
|
||||
<div className="flex flex-col items-center justify-center p-4">
|
||||
<div className="text-red-700 dark:text-red-300 bg-red-400/50 p-3 rounded-lg mb-2">
|
||||
{error.message || 'Honk! Goose experienced an error while responding'}
|
||||
</div>
|
||||
<div
|
||||
className="px-3 py-2 mt-2 text-center whitespace-nowrap cursor-pointer text-textStandard border border-borderSubtle hover:bg-bgSubtle rounded-full inline-block transition-all duration-150"
|
||||
onClick={async () => {
|
||||
// Find the last user message
|
||||
const lastUserMessage = messages.reduceRight(
|
||||
(found, m) => found || (m.role === 'user' ? m : null),
|
||||
null as Message | null
|
||||
);
|
||||
if (lastUserMessage) {
|
||||
append(lastUserMessage);
|
||||
}
|
||||
}}
|
||||
>
|
||||
Retry Last Message
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</SearchView>
|
||||
)}
|
||||
<div className="block h-8" />
|
||||
</ScrollArea>
|
||||
)}
|
||||
|
||||
{error && (
|
||||
<div className="flex flex-col items-center justify-center p-4">
|
||||
<div className="text-red-700 dark:text-red-300 bg-red-400/50 p-3 rounded-lg mb-2">
|
||||
{error.message || 'Honk! Goose experienced an error while responding'}
|
||||
</div>
|
||||
<div
|
||||
className="px-3 py-2 mt-2 text-center whitespace-nowrap cursor-pointer text-textStandard border border-borderSubtle hover:bg-bgSubtle rounded-full inline-block transition-all duration-150"
|
||||
onClick={async () => {
|
||||
// Find the last user message
|
||||
const lastUserMessage = messages.reduceRight(
|
||||
(found, m) => found || (m.role === 'user' ? m : null),
|
||||
null as Message | null
|
||||
);
|
||||
if (lastUserMessage) {
|
||||
append(lastUserMessage);
|
||||
}
|
||||
}}
|
||||
>
|
||||
Retry Last Message
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
<div className="block h-8" />
|
||||
</ScrollArea>
|
||||
)}
|
||||
<div className="relative p-4 pt-0 z-10 animate-[fadein_400ms_ease-in_forwards]">
|
||||
{isLoading && <LoadingGoose />}
|
||||
<ChatInput
|
||||
handleSubmit={handleSubmit}
|
||||
isLoading={isLoading}
|
||||
onStop={onStopGoose}
|
||||
commandHistory={commandHistory}
|
||||
initialValue={_input || (hasMessages ? _input : initialPrompt)}
|
||||
setView={setView}
|
||||
hasMessages={hasMessages}
|
||||
numTokens={sessionTokenCount}
|
||||
droppedFiles={droppedFiles}
|
||||
messages={messages}
|
||||
setMessages={setMessages}
|
||||
/>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
<div className="relative p-4 pt-0 z-10 animate-[fadein_400ms_ease-in_forwards]">
|
||||
{isLoading && <LoadingGoose />}
|
||||
<ChatInput
|
||||
handleSubmit={handleSubmit}
|
||||
isLoading={isLoading}
|
||||
onStop={onStopGoose}
|
||||
commandHistory={commandHistory}
|
||||
initialValue={_input || (hasMessages ? _input : initialPrompt)}
|
||||
setView={setView}
|
||||
hasMessages={hasMessages}
|
||||
numTokens={sessionTokenCount}
|
||||
droppedFiles={droppedFiles}
|
||||
messages={messages}
|
||||
setMessages={setMessages}
|
||||
/>
|
||||
</div>
|
||||
</Card>
|
||||
{showGame && <FlappyGoose onClose={() => setShowGame(false)} />}
|
||||
|
||||
{showGame && <FlappyGoose onClose={() => setShowGame(false)} />}
|
||||
|
||||
<SessionSummaryModal
|
||||
isOpen={isSummaryModalOpen}
|
||||
onClose={closeSummaryModal}
|
||||
onSave={(editedContent) => {
|
||||
updateSummary(editedContent);
|
||||
closeSummaryModal();
|
||||
}}
|
||||
summaryContent={summaryContent}
|
||||
/>
|
||||
</div>
|
||||
<SessionSummaryModal
|
||||
isOpen={isSummaryModalOpen}
|
||||
onClose={closeSummaryModal}
|
||||
onSave={(editedContent) => {
|
||||
updateSummary(editedContent);
|
||||
closeSummaryModal();
|
||||
}}
|
||||
summaryContent={summaryContent}
|
||||
/>
|
||||
</div>
|
||||
</CurrentModelContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -332,15 +332,13 @@ export default function RecipeEditor({ config }: RecipeEditorProps) {
|
||||
</div>
|
||||
{/* Action Buttons */}
|
||||
<div className="flex flex-col space-y-2 pt-1">
|
||||
{process.env.ALPHA && (
|
||||
<button
|
||||
onClick={() => setIsScheduleModalOpen(true)}
|
||||
disabled={!requiredFieldsAreFilled()}
|
||||
className="w-full h-[60px] rounded-none border-t text-gray-900 dark:text-white hover:bg-gray-50 dark:border-gray-600 text-lg font-medium"
|
||||
>
|
||||
Create Schedule from Recipe
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={() => setIsScheduleModalOpen(true)}
|
||||
disabled={!requiredFieldsAreFilled()}
|
||||
className="w-full h-[60px] rounded-none border-t text-gray-900 dark:text-white hover:bg-gray-50 dark:border-gray-600 text-lg font-medium"
|
||||
>
|
||||
Create Schedule from Recipe
|
||||
</button>
|
||||
<button
|
||||
onClick={() => {
|
||||
localStorage.removeItem('recipe_editor_extensions');
|
||||
|
||||
@@ -292,15 +292,13 @@ export default function MoreMenu({
|
||||
Session history
|
||||
</MenuButton>
|
||||
|
||||
{process.env.ALPHA && (
|
||||
<MenuButton
|
||||
onClick={() => setView('schedules')}
|
||||
subtitle="Manage scheduled runs"
|
||||
icon={<Time className="w-4 h-4" />}
|
||||
>
|
||||
Scheduler
|
||||
</MenuButton>
|
||||
)}
|
||||
<MenuButton
|
||||
onClick={() => setView('schedules')}
|
||||
subtitle="Manage scheduled runs"
|
||||
icon={<Time className="w-4 h-4" />}
|
||||
>
|
||||
Scheduler
|
||||
</MenuButton>
|
||||
|
||||
<MenuButton
|
||||
onClick={() => setIsGoosehintsModalOpen(true)}
|
||||
|
||||
@@ -9,7 +9,9 @@ import { Buffer } from 'buffer';
|
||||
import { Recipe } from '../../recipe';
|
||||
import ClockIcon from '../../assets/clock-icon.svg';
|
||||
|
||||
type FrequencyValue = 'once' | 'hourly' | 'daily' | 'weekly' | 'monthly';
|
||||
type FrequencyValue = 'once' | 'every' | 'daily' | 'weekly' | 'monthly';
|
||||
|
||||
type CustomIntervalUnit = 'minute' | 'hour' | 'day';
|
||||
|
||||
interface FrequencyOption {
|
||||
value: FrequencyValue;
|
||||
@@ -20,6 +22,7 @@ export interface NewSchedulePayload {
|
||||
id: string;
|
||||
recipe_source: string;
|
||||
cron: string;
|
||||
execution_mode?: string;
|
||||
}
|
||||
|
||||
interface CreateScheduleModalProps {
|
||||
@@ -61,14 +64,26 @@ interface CleanRecipe {
|
||||
contact?: string;
|
||||
metadata?: string;
|
||||
};
|
||||
schedule?: {
|
||||
foreground: boolean;
|
||||
fallback_to_background: boolean;
|
||||
window_title?: string;
|
||||
working_directory?: string;
|
||||
};
|
||||
}
|
||||
|
||||
const frequencies: FrequencyOption[] = [
|
||||
{ value: 'once', label: 'Once' },
|
||||
{ value: 'hourly', label: 'Hourly' },
|
||||
{ value: 'daily', label: 'Daily' },
|
||||
{ value: 'weekly', label: 'Weekly' },
|
||||
{ value: 'monthly', label: 'Monthly' },
|
||||
{ value: 'every', label: 'Every...' },
|
||||
{ value: 'daily', label: 'Daily (at specific time)' },
|
||||
{ value: 'weekly', label: 'Weekly (at specific time/days)' },
|
||||
{ value: 'monthly', label: 'Monthly (at specific time/day)' },
|
||||
];
|
||||
|
||||
const customIntervalUnits: { value: CustomIntervalUnit; label: string }[] = [
|
||||
{ value: 'minute', label: 'minute(s)' },
|
||||
{ value: 'hour', label: 'hour(s)' },
|
||||
{ value: 'day', label: 'day(s)' },
|
||||
];
|
||||
|
||||
const daysOfWeekOptions: { value: string; label: string }[] = [
|
||||
@@ -89,6 +104,7 @@ const checkboxInputClassName =
|
||||
'h-4 w-4 text-indigo-600 border-gray-300 dark:border-gray-600 rounded focus:ring-indigo-500 mr-2';
|
||||
|
||||
type SourceType = 'file' | 'deeplink';
|
||||
type ExecutionMode = 'background' | 'foreground';
|
||||
|
||||
// Function to parse deep link and extract recipe config
|
||||
function parseDeepLink(deepLink: string): Recipe | null {
|
||||
@@ -111,8 +127,8 @@ function parseDeepLink(deepLink: string): Recipe | null {
|
||||
}
|
||||
}
|
||||
|
||||
// Function to convert recipe to YAML
|
||||
function recipeToYaml(recipe: Recipe): string {
|
||||
// Function to convert recipe to YAML with schedule configuration
|
||||
function recipeToYaml(recipe: Recipe, executionMode: ExecutionMode): string {
|
||||
// Create a clean recipe object for YAML conversion
|
||||
const cleanRecipe: CleanRecipe = {
|
||||
title: recipe.title,
|
||||
@@ -230,6 +246,13 @@ function recipeToYaml(recipe: Recipe): string {
|
||||
cleanRecipe.author = recipe.author;
|
||||
}
|
||||
|
||||
// Add schedule configuration based on execution mode
|
||||
cleanRecipe.schedule = {
|
||||
foreground: executionMode === 'foreground',
|
||||
fallback_to_background: true, // Always allow fallback
|
||||
window_title: executionMode === 'foreground' ? `${recipe.title} - Scheduled` : undefined,
|
||||
};
|
||||
|
||||
return yaml.stringify(cleanRecipe);
|
||||
}
|
||||
|
||||
@@ -242,10 +265,13 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
}) => {
|
||||
const [scheduleId, setScheduleId] = useState<string>('');
|
||||
const [sourceType, setSourceType] = useState<SourceType>('file');
|
||||
const [executionMode, setExecutionMode] = useState<ExecutionMode>('background');
|
||||
const [recipeSourcePath, setRecipeSourcePath] = useState<string>('');
|
||||
const [deepLinkInput, setDeepLinkInput] = useState<string>('');
|
||||
const [parsedRecipe, setParsedRecipe] = useState<Recipe | null>(null);
|
||||
const [frequency, setFrequency] = useState<FrequencyValue>('daily');
|
||||
const [customIntervalValue, setCustomIntervalValue] = useState<number>(1);
|
||||
const [customIntervalUnit, setCustomIntervalUnit] = useState<CustomIntervalUnit>('minute');
|
||||
const [selectedDate, setSelectedDate] = useState<string>(
|
||||
() => new Date().toISOString().split('T')[0]
|
||||
);
|
||||
@@ -302,10 +328,13 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
const resetForm = () => {
|
||||
setScheduleId('');
|
||||
setSourceType('file');
|
||||
setExecutionMode('background');
|
||||
setRecipeSourcePath('');
|
||||
setDeepLinkInput('');
|
||||
setParsedRecipe(null);
|
||||
setFrequency('daily');
|
||||
setCustomIntervalValue(1);
|
||||
setCustomIntervalUnit('minute');
|
||||
setSelectedDate(new Date().toISOString().split('T')[0]);
|
||||
setSelectedTime('09:00');
|
||||
setSelectedMinute('0');
|
||||
@@ -336,14 +365,15 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
if (isNaN(parseInt(minutePart)) || isNaN(parseInt(hourPart))) {
|
||||
return 'Invalid time format.';
|
||||
}
|
||||
const secondsPart = '0';
|
||||
|
||||
// Temporal uses 5-field cron: minute hour day month dayofweek (no seconds)
|
||||
switch (frequency) {
|
||||
case 'once':
|
||||
if (selectedDate && selectedTime) {
|
||||
try {
|
||||
const dateObj = new Date(`${selectedDate}T${selectedTime}`);
|
||||
if (isNaN(dateObj.getTime())) return "Invalid date/time for 'once'.";
|
||||
return `${secondsPart} ${dateObj.getMinutes()} ${dateObj.getHours()} ${dateObj.getDate()} ${
|
||||
return `${dateObj.getMinutes()} ${dateObj.getHours()} ${dateObj.getDate()} ${
|
||||
dateObj.getMonth() + 1
|
||||
} *`;
|
||||
} catch (e) {
|
||||
@@ -351,15 +381,23 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
}
|
||||
}
|
||||
return 'Date and Time are required for "Once" frequency.';
|
||||
case 'hourly': {
|
||||
const sMinute = parseInt(selectedMinute, 10);
|
||||
if (isNaN(sMinute) || sMinute < 0 || sMinute > 59) {
|
||||
return 'Invalid minute (0-59) for hourly frequency.';
|
||||
case 'every': {
|
||||
if (customIntervalValue <= 0) {
|
||||
return 'Custom interval value must be greater than 0.';
|
||||
}
|
||||
switch (customIntervalUnit) {
|
||||
case 'minute':
|
||||
return `*/${customIntervalValue} * * * *`;
|
||||
case 'hour':
|
||||
return `0 */${customIntervalValue} * * *`;
|
||||
case 'day':
|
||||
return `0 0 */${customIntervalValue} * *`;
|
||||
default:
|
||||
return 'Invalid custom interval unit.';
|
||||
}
|
||||
return `${secondsPart} ${sMinute} * * * *`;
|
||||
}
|
||||
case 'daily':
|
||||
return `${secondsPart} ${minutePart} ${hourPart} * * *`;
|
||||
return `${minutePart} ${hourPart} * * *`;
|
||||
case 'weekly': {
|
||||
if (selectedDaysOfWeek.size === 0) {
|
||||
return 'Select at least one day for weekly frequency.';
|
||||
@@ -367,14 +405,14 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
const days = Array.from(selectedDaysOfWeek)
|
||||
.sort((a, b) => parseInt(a) - parseInt(b))
|
||||
.join(',');
|
||||
return `${secondsPart} ${minutePart} ${hourPart} * * ${days}`;
|
||||
return `${minutePart} ${hourPart} * * ${days}`;
|
||||
}
|
||||
case 'monthly': {
|
||||
const sDayOfMonth = parseInt(selectedDayOfMonth, 10);
|
||||
if (isNaN(sDayOfMonth) || sDayOfMonth < 1 || sDayOfMonth > 31) {
|
||||
return 'Invalid day of month (1-31) for monthly frequency.';
|
||||
}
|
||||
return `${secondsPart} ${minutePart} ${hourPart} ${sDayOfMonth} * *`;
|
||||
return `${minutePart} ${hourPart} ${sDayOfMonth} * *`;
|
||||
}
|
||||
default:
|
||||
return 'Invalid frequency selected.';
|
||||
@@ -398,6 +436,8 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
}
|
||||
}, [
|
||||
frequency,
|
||||
customIntervalValue,
|
||||
customIntervalUnit,
|
||||
selectedDate,
|
||||
selectedTime,
|
||||
selectedMinute,
|
||||
@@ -446,7 +486,7 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
|
||||
try {
|
||||
// Convert recipe to YAML and save to a temporary file
|
||||
const yamlContent = recipeToYaml(parsedRecipe);
|
||||
const yamlContent = recipeToYaml(parsedRecipe, executionMode);
|
||||
console.log('Generated YAML content:', yamlContent); // Debug log
|
||||
const tempFileName = `schedule-${scheduleId}-${Date.now()}.yaml`;
|
||||
const tempDir = window.electron.getConfig().GOOSE_WORKING_DIR || '.';
|
||||
@@ -486,6 +526,7 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
id: scheduleId.trim(),
|
||||
recipe_source: finalRecipeSource,
|
||||
cron: derivedCronExpression,
|
||||
execution_mode: executionMode,
|
||||
};
|
||||
|
||||
await onSubmit(newSchedulePayload);
|
||||
@@ -587,6 +628,19 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
Selected: {recipeSourcePath}
|
||||
</p>
|
||||
)}
|
||||
{executionMode === 'foreground' && (
|
||||
<div className="mt-2 p-2 bg-blue-50 dark:bg-blue-900/20 rounded-md border border-blue-200 dark:border-blue-800">
|
||||
<p className="text-xs text-blue-700 dark:text-blue-300">
|
||||
<strong>Note:</strong> For foreground execution with YAML files, add this to
|
||||
your recipe:
|
||||
</p>
|
||||
<pre className="text-xs text-blue-600 dark:text-blue-400 mt-1 font-mono bg-blue-100 dark:bg-blue-900/40 p-1 rounded">
|
||||
{`schedule:
|
||||
foreground: true
|
||||
fallback_to_background: true`}
|
||||
</pre>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -617,6 +671,50 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label className={modalLabelClassName}>Execution Mode:</label>
|
||||
<div className="space-y-2">
|
||||
<div className="flex bg-gray-100 dark:bg-gray-700 rounded-full p-1">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setExecutionMode('background')}
|
||||
className={`flex-1 px-4 py-2 text-sm font-medium rounded-full transition-all ${
|
||||
executionMode === 'background'
|
||||
? 'bg-white dark:bg-gray-800 text-gray-900 dark:text-white shadow-sm'
|
||||
: 'text-gray-600 dark:text-gray-400 hover:text-gray-900 dark:hover:text-white'
|
||||
}`}
|
||||
>
|
||||
Background
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setExecutionMode('foreground')}
|
||||
className={`flex-1 px-4 py-2 text-sm font-medium rounded-full transition-all ${
|
||||
executionMode === 'foreground'
|
||||
? 'bg-white dark:bg-gray-800 text-gray-900 dark:text-white shadow-sm'
|
||||
: 'text-gray-600 dark:text-gray-400 hover:text-gray-900 dark:hover:text-white'
|
||||
}`}
|
||||
>
|
||||
Foreground
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="text-xs text-gray-500 dark:text-gray-400 px-2">
|
||||
{executionMode === 'background' ? (
|
||||
<p>
|
||||
<strong>Background:</strong> Runs silently in the background without opening a
|
||||
window. Results are saved to session storage.
|
||||
</p>
|
||||
) : (
|
||||
<p>
|
||||
<strong>Foreground:</strong> Opens in a desktop window when the Goose app is
|
||||
running. Falls back to background if the app is not available.
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label htmlFor="frequency-modal" className={modalLabelClassName}>
|
||||
Frequency:
|
||||
@@ -633,6 +731,43 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
/>
|
||||
</div>
|
||||
|
||||
{frequency === 'every' && (
|
||||
<div className="grid grid-cols-2 gap-4">
|
||||
<div>
|
||||
<label htmlFor="customIntervalValue-modal" className={modalLabelClassName}>
|
||||
Every:
|
||||
</label>
|
||||
<Input
|
||||
type="number"
|
||||
id="customIntervalValue-modal"
|
||||
min="1"
|
||||
max="999"
|
||||
value={customIntervalValue}
|
||||
onChange={(e) => setCustomIntervalValue(parseInt(e.target.value) || 1)}
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label htmlFor="customIntervalUnit-modal" className={modalLabelClassName}>
|
||||
Unit:
|
||||
</label>
|
||||
<Select
|
||||
instanceId="custom-interval-unit-select-modal"
|
||||
options={customIntervalUnits}
|
||||
value={customIntervalUnits.find((u) => u.value === customIntervalUnit)}
|
||||
onChange={(newValue: unknown) => {
|
||||
const selectedUnit = newValue as {
|
||||
value: CustomIntervalUnit;
|
||||
label: string;
|
||||
} | null;
|
||||
if (selectedUnit) setCustomIntervalUnit(selectedUnit.value);
|
||||
}}
|
||||
placeholder="Select unit..."
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{frequency === 'once' && (
|
||||
<>
|
||||
<div>
|
||||
@@ -661,22 +796,6 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
{frequency === 'hourly' && (
|
||||
<div>
|
||||
<label htmlFor="hourlyMinute-modal" className={modalLabelClassName}>
|
||||
Minute of the hour (0-59):
|
||||
</label>
|
||||
<Input
|
||||
type="number"
|
||||
id="hourlyMinute-modal"
|
||||
min="0"
|
||||
max="59"
|
||||
value={selectedMinute}
|
||||
onChange={(e) => setSelectedMinute(e.target.value)}
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{(frequency === 'daily' || frequency === 'weekly' || frequency === 'monthly') && (
|
||||
<div>
|
||||
<label htmlFor="commonTime-modal" className={modalLabelClassName}>
|
||||
@@ -736,7 +855,9 @@ export const CreateScheduleModal: React.FC<CreateScheduleModalProps> = ({
|
||||
<p className={`${cronPreviewTextColor} mt-2`}>
|
||||
<b>Human Readable:</b> {readableCronExpression}
|
||||
</p>
|
||||
<p className={cronPreviewTextColor}>Syntax: S M H D M DoW. (S=0, DoW: 0/7=Sun)</p>
|
||||
<p className={cronPreviewTextColor}>
|
||||
Syntax: M H D M DoW (M=minute, H=hour, D=day, M=month, DoW=day of week: 0/7=Sun)
|
||||
</p>
|
||||
{frequency === 'once' && (
|
||||
<p className={cronPreviewSpecialNoteColor}>
|
||||
Note: "Once" schedules recur annually. True one-time tasks may need backend deletion
|
||||
|
||||
@@ -6,7 +6,9 @@ import { Select } from '../ui/Select';
|
||||
import { ScheduledJob } from '../../schedule';
|
||||
import cronstrue from 'cronstrue';
|
||||
|
||||
type FrequencyValue = 'once' | 'hourly' | 'daily' | 'weekly' | 'monthly';
|
||||
type FrequencyValue = 'once' | 'every' | 'daily' | 'weekly' | 'monthly';
|
||||
|
||||
type CustomIntervalUnit = 'minute' | 'hour' | 'day';
|
||||
|
||||
interface FrequencyOption {
|
||||
value: FrequencyValue;
|
||||
@@ -24,10 +26,16 @@ interface EditScheduleModalProps {
|
||||
|
||||
const frequencies: FrequencyOption[] = [
|
||||
{ value: 'once', label: 'Once' },
|
||||
{ value: 'hourly', label: 'Hourly' },
|
||||
{ value: 'daily', label: 'Daily' },
|
||||
{ value: 'weekly', label: 'Weekly' },
|
||||
{ value: 'monthly', label: 'Monthly' },
|
||||
{ value: 'every', label: 'Every...' },
|
||||
{ value: 'daily', label: 'Daily (at specific time)' },
|
||||
{ value: 'weekly', label: 'Weekly (at specific time/days)' },
|
||||
{ value: 'monthly', label: 'Monthly (at specific time/day)' },
|
||||
];
|
||||
|
||||
const customIntervalUnits: { value: CustomIntervalUnit; label: string }[] = [
|
||||
{ value: 'minute', label: 'minute(s)' },
|
||||
{ value: 'hour', label: 'hour(s)' },
|
||||
{ value: 'day', label: 'day(s)' },
|
||||
];
|
||||
|
||||
const daysOfWeekOptions: { value: string; label: string }[] = [
|
||||
@@ -50,22 +58,59 @@ const checkboxInputClassName =
|
||||
// Helper function to parse cron expression and determine frequency
|
||||
const parseCronExpression = (cron: string) => {
|
||||
const parts = cron.split(' ');
|
||||
if (parts.length !== 6) return null;
|
||||
if (parts.length !== 5 && parts.length !== 6) return null;
|
||||
|
||||
const [_seconds, minutes, hours, dayOfMonth, month, dayOfWeek] = parts;
|
||||
// Handle both 5-field and 6-field cron expressions
|
||||
const [minutes, hours, dayOfMonth, month, dayOfWeek] =
|
||||
parts.length === 5 ? parts : parts.slice(1); // Skip seconds if present
|
||||
|
||||
// Check for specific patterns
|
||||
if (dayOfMonth !== '*' && month !== '*' && dayOfWeek === '*') {
|
||||
return { frequency: 'once' as FrequencyValue, minutes, hours, dayOfMonth, month };
|
||||
}
|
||||
// Check for custom intervals (every X minutes/hours/days)
|
||||
if (
|
||||
minutes !== '*' &&
|
||||
minutes.startsWith('*/') &&
|
||||
hours === '*' &&
|
||||
dayOfMonth === '*' &&
|
||||
month === '*' &&
|
||||
dayOfWeek === '*'
|
||||
) {
|
||||
return { frequency: 'hourly' as FrequencyValue, minutes };
|
||||
const intervalValue = parseInt(minutes.substring(2));
|
||||
return {
|
||||
frequency: 'every' as FrequencyValue,
|
||||
customIntervalValue: intervalValue,
|
||||
customIntervalUnit: 'minute' as CustomIntervalUnit,
|
||||
};
|
||||
}
|
||||
if (
|
||||
minutes === '0' &&
|
||||
hours.startsWith('*/') &&
|
||||
dayOfMonth === '*' &&
|
||||
month === '*' &&
|
||||
dayOfWeek === '*'
|
||||
) {
|
||||
const intervalValue = parseInt(hours.substring(2));
|
||||
return {
|
||||
frequency: 'every' as FrequencyValue,
|
||||
customIntervalValue: intervalValue,
|
||||
customIntervalUnit: 'hour' as CustomIntervalUnit,
|
||||
};
|
||||
}
|
||||
if (
|
||||
minutes === '0' &&
|
||||
hours === '0' &&
|
||||
dayOfMonth.startsWith('*/') &&
|
||||
month === '*' &&
|
||||
dayOfWeek === '*'
|
||||
) {
|
||||
const intervalValue = parseInt(dayOfMonth.substring(2));
|
||||
return {
|
||||
frequency: 'every' as FrequencyValue,
|
||||
customIntervalValue: intervalValue,
|
||||
customIntervalUnit: 'day' as CustomIntervalUnit,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for specific patterns
|
||||
if (dayOfMonth !== '*' && month !== '*' && dayOfWeek === '*') {
|
||||
return { frequency: 'once' as FrequencyValue, minutes, hours, dayOfMonth, month };
|
||||
}
|
||||
if (
|
||||
minutes !== '*' &&
|
||||
@@ -107,11 +152,13 @@ export const EditScheduleModal: React.FC<EditScheduleModalProps> = ({
|
||||
apiErrorExternally = null,
|
||||
}) => {
|
||||
const [frequency, setFrequency] = useState<FrequencyValue>('daily');
|
||||
const [customIntervalValue, setCustomIntervalValue] = useState<number>(1);
|
||||
const [customIntervalUnit, setCustomIntervalUnit] = useState<CustomIntervalUnit>('minute');
|
||||
const [selectedDate, setSelectedDate] = useState<string>(
|
||||
() => new Date().toISOString().split('T')[0]
|
||||
);
|
||||
const [selectedTime, setSelectedTime] = useState<string>('09:00');
|
||||
const [selectedMinute, setSelectedMinute] = useState<string>('0');
|
||||
const [selectedMinute] = useState<string>('0');
|
||||
const [selectedDaysOfWeek, setSelectedDaysOfWeek] = useState<Set<string>>(new Set(['1']));
|
||||
const [selectedDayOfMonth, setSelectedDayOfMonth] = useState<string>('1');
|
||||
const [derivedCronExpression, setDerivedCronExpression] = useState<string>('');
|
||||
@@ -135,8 +182,13 @@ export const EditScheduleModal: React.FC<EditScheduleModalProps> = ({
|
||||
`${parsed.hours?.padStart(2, '0')}:${parsed.minutes?.padStart(2, '0')}`
|
||||
);
|
||||
break;
|
||||
case 'hourly':
|
||||
setSelectedMinute(parsed.minutes || '0');
|
||||
case 'every':
|
||||
if (parsed.customIntervalValue) {
|
||||
setCustomIntervalValue(parsed.customIntervalValue);
|
||||
}
|
||||
if (parsed.customIntervalUnit) {
|
||||
setCustomIntervalUnit(parsed.customIntervalUnit);
|
||||
}
|
||||
break;
|
||||
case 'daily':
|
||||
setSelectedTime(
|
||||
@@ -177,14 +229,13 @@ export const EditScheduleModal: React.FC<EditScheduleModalProps> = ({
|
||||
if (isNaN(parseInt(minutePart)) || isNaN(parseInt(hourPart))) {
|
||||
return 'Invalid time format.';
|
||||
}
|
||||
const secondsPart = '0';
|
||||
switch (frequency) {
|
||||
case 'once':
|
||||
if (selectedDate && selectedTime) {
|
||||
try {
|
||||
const dateObj = new Date(`${selectedDate}T${selectedTime}`);
|
||||
if (isNaN(dateObj.getTime())) return "Invalid date/time for 'once'.";
|
||||
return `${secondsPart} ${dateObj.getMinutes()} ${dateObj.getHours()} ${dateObj.getDate()} ${
|
||||
return `${dateObj.getMinutes()} ${dateObj.getHours()} ${dateObj.getDate()} ${
|
||||
dateObj.getMonth() + 1
|
||||
} *`;
|
||||
} catch (e) {
|
||||
@@ -192,15 +243,23 @@ export const EditScheduleModal: React.FC<EditScheduleModalProps> = ({
|
||||
}
|
||||
}
|
||||
return 'Date and Time are required for "Once" frequency.';
|
||||
case 'hourly': {
|
||||
const sMinute = parseInt(selectedMinute, 10);
|
||||
if (isNaN(sMinute) || sMinute < 0 || sMinute > 59) {
|
||||
return 'Invalid minute (0-59) for hourly frequency.';
|
||||
case 'every': {
|
||||
if (customIntervalValue <= 0) {
|
||||
return 'Custom interval value must be greater than 0.';
|
||||
}
|
||||
switch (customIntervalUnit) {
|
||||
case 'minute':
|
||||
return `*/${customIntervalValue} * * * *`;
|
||||
case 'hour':
|
||||
return `0 */${customIntervalValue} * * *`;
|
||||
case 'day':
|
||||
return `0 0 */${customIntervalValue} * *`;
|
||||
default:
|
||||
return 'Invalid custom interval unit.';
|
||||
}
|
||||
return `${secondsPart} ${sMinute} * * * *`;
|
||||
}
|
||||
case 'daily':
|
||||
return `${secondsPart} ${minutePart} ${hourPart} * * *`;
|
||||
return `${minutePart} ${hourPart} * * *`;
|
||||
case 'weekly': {
|
||||
if (selectedDaysOfWeek.size === 0) {
|
||||
return 'Select at least one day for weekly frequency.';
|
||||
@@ -208,14 +267,14 @@ export const EditScheduleModal: React.FC<EditScheduleModalProps> = ({
|
||||
const days = Array.from(selectedDaysOfWeek)
|
||||
.sort((a, b) => parseInt(a) - parseInt(b))
|
||||
.join(',');
|
||||
return `${secondsPart} ${minutePart} ${hourPart} * * ${days}`;
|
||||
return `${minutePart} ${hourPart} * * ${days}`;
|
||||
}
|
||||
case 'monthly': {
|
||||
const sDayOfMonth = parseInt(selectedDayOfMonth, 10);
|
||||
if (isNaN(sDayOfMonth) || sDayOfMonth < 1 || sDayOfMonth > 31) {
|
||||
return 'Invalid day of month (1-31) for monthly frequency.';
|
||||
}
|
||||
return `${secondsPart} ${minutePart} ${hourPart} ${sDayOfMonth} * *`;
|
||||
return `${minutePart} ${hourPart} ${sDayOfMonth} * *`;
|
||||
}
|
||||
default:
|
||||
return 'Invalid frequency selected.';
|
||||
@@ -239,6 +298,8 @@ export const EditScheduleModal: React.FC<EditScheduleModalProps> = ({
|
||||
}
|
||||
}, [
|
||||
frequency,
|
||||
customIntervalValue,
|
||||
customIntervalUnit,
|
||||
selectedDate,
|
||||
selectedTime,
|
||||
selectedMinute,
|
||||
@@ -327,6 +388,43 @@ export const EditScheduleModal: React.FC<EditScheduleModalProps> = ({
|
||||
/>
|
||||
</div>
|
||||
|
||||
{frequency === 'every' && (
|
||||
<div className="grid grid-cols-2 gap-4">
|
||||
<div>
|
||||
<label htmlFor="customIntervalValue-modal" className={modalLabelClassName}>
|
||||
Every:
|
||||
</label>
|
||||
<Input
|
||||
type="number"
|
||||
id="customIntervalValue-modal"
|
||||
min="1"
|
||||
max="999"
|
||||
value={customIntervalValue}
|
||||
onChange={(e) => setCustomIntervalValue(parseInt(e.target.value) || 1)}
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label htmlFor="customIntervalUnit-modal" className={modalLabelClassName}>
|
||||
Unit:
|
||||
</label>
|
||||
<Select
|
||||
instanceId="custom-interval-unit-select-modal"
|
||||
options={customIntervalUnits}
|
||||
value={customIntervalUnits.find((u) => u.value === customIntervalUnit)}
|
||||
onChange={(newValue: unknown) => {
|
||||
const selectedUnit = newValue as {
|
||||
value: CustomIntervalUnit;
|
||||
label: string;
|
||||
} | null;
|
||||
if (selectedUnit) setCustomIntervalUnit(selectedUnit.value);
|
||||
}}
|
||||
placeholder="Select unit..."
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{frequency === 'once' && (
|
||||
<>
|
||||
<div>
|
||||
@@ -355,22 +453,6 @@ export const EditScheduleModal: React.FC<EditScheduleModalProps> = ({
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
{frequency === 'hourly' && (
|
||||
<div>
|
||||
<label htmlFor="hourlyMinute-modal" className={modalLabelClassName}>
|
||||
Minute of the hour (0-59):
|
||||
</label>
|
||||
<Input
|
||||
type="number"
|
||||
id="hourlyMinute-modal"
|
||||
min="0"
|
||||
max="59"
|
||||
value={selectedMinute}
|
||||
onChange={(e) => setSelectedMinute(e.target.value)}
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{(frequency === 'daily' || frequency === 'weekly' || frequency === 'monthly') && (
|
||||
<div>
|
||||
<label htmlFor="commonTime-modal" className={modalLabelClassName}>
|
||||
@@ -430,7 +512,9 @@ export const EditScheduleModal: React.FC<EditScheduleModalProps> = ({
|
||||
<p className={`${cronPreviewTextColor} mt-2`}>
|
||||
<b>Human Readable:</b> {readableCronExpression}
|
||||
</p>
|
||||
<p className={cronPreviewTextColor}>Syntax: S M H D M DoW. (S=0, DoW: 0/7=Sun)</p>
|
||||
<p className={cronPreviewTextColor}>
|
||||
Syntax: M H D M DoW (M=minute, H=hour, D=day, M=month, DoW=day of week: 0/7=Sun)
|
||||
</p>
|
||||
{frequency === 'once' && (
|
||||
<p className={cronPreviewSpecialNoteColor}>
|
||||
Note: "Once" schedules recur annually. True one-time tasks may need backend deletion
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useState, useEffect, useCallback } from 'react';
|
||||
import React, { useState, useEffect, useCallback, useMemo } from 'react';
|
||||
import { Button } from '../ui/button';
|
||||
import { ScrollArea } from '../ui/scroll-area';
|
||||
import BackButton from '../ui/BackButton';
|
||||
@@ -21,6 +21,7 @@ import { EditScheduleModal } from './EditScheduleModal';
|
||||
import { toastError, toastSuccess } from '../../toasts';
|
||||
import { Loader2, Pause, Play, Edit, Square, Eye } from 'lucide-react';
|
||||
import cronstrue from 'cronstrue';
|
||||
import { formatToLocalDateWithTimezone } from '../../utils/date';
|
||||
|
||||
interface ScheduleSessionMeta {
|
||||
id: string;
|
||||
@@ -42,6 +43,95 @@ interface ScheduleDetailViewProps {
|
||||
onNavigateBack: () => void;
|
||||
}
|
||||
|
||||
// Memoized ScheduleInfoCard component to prevent unnecessary re-renders of static content
|
||||
const ScheduleInfoCard = React.memo<{
|
||||
scheduleDetails: ScheduledJob;
|
||||
}>(({ scheduleDetails }) => {
|
||||
const readableCron = useMemo(() => {
|
||||
try {
|
||||
return cronstrue.toString(scheduleDetails.cron);
|
||||
} catch (e) {
|
||||
console.warn(`Could not parse cron string "${scheduleDetails.cron}":`, e);
|
||||
return scheduleDetails.cron;
|
||||
}
|
||||
}, [scheduleDetails.cron]);
|
||||
|
||||
const formattedLastRun = useMemo(() => {
|
||||
return formatToLocalDateWithTimezone(scheduleDetails.last_run);
|
||||
}, [scheduleDetails.last_run]);
|
||||
|
||||
const formattedProcessStartTime = useMemo(() => {
|
||||
return scheduleDetails.process_start_time
|
||||
? formatToLocalDateWithTimezone(scheduleDetails.process_start_time)
|
||||
: null;
|
||||
}, [scheduleDetails.process_start_time]);
|
||||
|
||||
return (
|
||||
<Card className="p-4 bg-white dark:bg-gray-800 shadow mb-6">
|
||||
<div className="space-y-2">
|
||||
<div className="flex flex-col md:flex-row md:items-center justify-between">
|
||||
<h3 className="text-base font-semibold text-gray-900 dark:text-white">
|
||||
{scheduleDetails.id}
|
||||
</h3>
|
||||
<div className="mt-2 md:mt-0 flex items-center gap-2">
|
||||
{scheduleDetails.currently_running && (
|
||||
<div className="text-sm text-green-500 dark:text-green-400 font-semibold flex items-center">
|
||||
<span className="inline-block w-2 h-2 bg-green-500 dark:bg-green-400 rounded-full mr-1 animate-pulse"></span>
|
||||
Currently Running
|
||||
</div>
|
||||
)}
|
||||
{scheduleDetails.paused && (
|
||||
<div className="text-sm text-orange-500 dark:text-orange-400 font-semibold flex items-center">
|
||||
<Pause className="w-3 h-3 mr-1" />
|
||||
Paused
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Schedule:</span> {readableCron}
|
||||
</p>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Cron Expression:</span> {scheduleDetails.cron}
|
||||
</p>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Recipe Source:</span> {scheduleDetails.source}
|
||||
</p>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Last Run:</span> {formattedLastRun}
|
||||
</p>
|
||||
{scheduleDetails.execution_mode && (
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Execution Mode:</span>{' '}
|
||||
<span
|
||||
className={`inline-flex items-center px-2 py-1 rounded-full text-xs font-medium ${
|
||||
scheduleDetails.execution_mode === 'foreground'
|
||||
? 'bg-blue-100 text-blue-800 dark:bg-blue-900/30 dark:text-blue-300'
|
||||
: 'bg-gray-100 text-gray-800 dark:bg-gray-800 dark:text-gray-300'
|
||||
}`}
|
||||
>
|
||||
{scheduleDetails.execution_mode === 'foreground' ? '🖥️ Foreground' : '⚡ Background'}
|
||||
</span>
|
||||
</p>
|
||||
)}
|
||||
{scheduleDetails.currently_running && scheduleDetails.current_session_id && (
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Current Session:</span>{' '}
|
||||
{scheduleDetails.current_session_id}
|
||||
</p>
|
||||
)}
|
||||
{scheduleDetails.currently_running && formattedProcessStartTime && (
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Process Started:</span> {formattedProcessStartTime}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</Card>
|
||||
);
|
||||
});
|
||||
|
||||
ScheduleInfoCard.displayName = 'ScheduleInfoCard';
|
||||
|
||||
const ScheduleDetailView: React.FC<ScheduleDetailViewProps> = ({ scheduleId, onNavigateBack }) => {
|
||||
const [sessions, setSessions] = useState<ScheduleSessionMeta[]>([]);
|
||||
const [isLoadingSessions, setIsLoadingSessions] = useState(false);
|
||||
@@ -71,10 +161,14 @@ const ScheduleDetailView: React.FC<ScheduleDetailViewProps> = ({ scheduleId, onN
|
||||
setIsLoadingSessions(true);
|
||||
setSessionsError(null);
|
||||
try {
|
||||
const fetchedSessions = await getScheduleSessions(sId, 20); // MODIFIED
|
||||
// Assuming ScheduleSession from ../../schedule can be cast or mapped to ScheduleSessionMeta
|
||||
// You may need to transform/map fields if they differ significantly
|
||||
setSessions(fetchedSessions as ScheduleSessionMeta[]);
|
||||
const fetchedSessions = await getScheduleSessions(sId, 20);
|
||||
setSessions((prevSessions) => {
|
||||
// Only update if sessions actually changed to prevent unnecessary re-renders
|
||||
if (JSON.stringify(prevSessions) !== JSON.stringify(fetchedSessions)) {
|
||||
return fetchedSessions as ScheduleSessionMeta[];
|
||||
}
|
||||
return prevSessions;
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('Failed to fetch schedule sessions:', err);
|
||||
setSessionsError(err instanceof Error ? err.message : 'Failed to fetch schedule sessions');
|
||||
@@ -84,21 +178,26 @@ const ScheduleDetailView: React.FC<ScheduleDetailViewProps> = ({ scheduleId, onN
|
||||
}, []);
|
||||
|
||||
const fetchScheduleDetails = useCallback(
|
||||
async (sId: string) => {
|
||||
async (sId: string, isRefresh = false) => {
|
||||
if (!sId) return;
|
||||
setIsLoadingSchedule(true);
|
||||
if (!isRefresh) setIsLoadingSchedule(true);
|
||||
setScheduleError(null);
|
||||
try {
|
||||
const allSchedules = await listSchedules();
|
||||
const schedule = allSchedules.find((s) => s.id === sId);
|
||||
if (schedule) {
|
||||
// Only reset runNowLoading if we explicitly killed the job
|
||||
// This prevents interfering with natural job completion
|
||||
if (!schedule.currently_running && runNowLoading && jobWasKilled) {
|
||||
setRunNowLoading(false);
|
||||
setJobWasKilled(false); // Reset the flag
|
||||
}
|
||||
setScheduleDetails(schedule);
|
||||
setScheduleDetails((prevDetails) => {
|
||||
// Only update if schedule details actually changed
|
||||
if (!prevDetails || JSON.stringify(prevDetails) !== JSON.stringify(schedule)) {
|
||||
// Only reset runNowLoading if we explicitly killed the job
|
||||
if (!schedule.currently_running && runNowLoading && jobWasKilled) {
|
||||
setRunNowLoading(false);
|
||||
setJobWasKilled(false);
|
||||
}
|
||||
return schedule;
|
||||
}
|
||||
return prevDetails;
|
||||
});
|
||||
} else {
|
||||
setScheduleError('Schedule not found');
|
||||
}
|
||||
@@ -106,21 +205,12 @@ const ScheduleDetailView: React.FC<ScheduleDetailViewProps> = ({ scheduleId, onN
|
||||
console.error('Failed to fetch schedule details:', err);
|
||||
setScheduleError(err instanceof Error ? err.message : 'Failed to fetch schedule details');
|
||||
} finally {
|
||||
setIsLoadingSchedule(false);
|
||||
if (!isRefresh) setIsLoadingSchedule(false);
|
||||
}
|
||||
},
|
||||
[runNowLoading, jobWasKilled]
|
||||
);
|
||||
|
||||
const getReadableCron = (cronString: string) => {
|
||||
try {
|
||||
return cronstrue.toString(cronString);
|
||||
} catch (e) {
|
||||
console.warn(`Could not parse cron string "${cronString}":`, e);
|
||||
return cronString;
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (scheduleId && !selectedSessionDetails) {
|
||||
fetchScheduleSessions(scheduleId);
|
||||
@@ -289,25 +379,42 @@ const ScheduleDetailView: React.FC<ScheduleDetailViewProps> = ({ scheduleId, onN
|
||||
}
|
||||
};
|
||||
|
||||
// Add a periodic refresh for schedule details to keep the running status up to date
|
||||
// Optimized periodic refresh for schedule details to keep the running status up to date
|
||||
useEffect(() => {
|
||||
if (!scheduleId) return;
|
||||
|
||||
// Initial fetch
|
||||
fetchScheduleDetails(scheduleId);
|
||||
|
||||
// Set up periodic refresh every 5 seconds
|
||||
// Set up periodic refresh every 8 seconds (longer to reduce flashing)
|
||||
const intervalId = setInterval(() => {
|
||||
if (scheduleId) {
|
||||
fetchScheduleDetails(scheduleId);
|
||||
if (
|
||||
scheduleId &&
|
||||
!selectedSessionDetails &&
|
||||
!runNowLoading &&
|
||||
!pauseUnpauseLoading &&
|
||||
!killJobLoading &&
|
||||
!inspectJobLoading &&
|
||||
!isEditSubmitting
|
||||
) {
|
||||
fetchScheduleDetails(scheduleId, true); // Pass true to indicate this is a refresh
|
||||
}
|
||||
}, 5000);
|
||||
}, 8000);
|
||||
|
||||
// Clean up on unmount or when scheduleId changes
|
||||
return () => {
|
||||
clearInterval(intervalId);
|
||||
};
|
||||
}, [scheduleId, fetchScheduleDetails]);
|
||||
}, [
|
||||
scheduleId,
|
||||
fetchScheduleDetails,
|
||||
selectedSessionDetails,
|
||||
runNowLoading,
|
||||
pauseUnpauseLoading,
|
||||
killJobLoading,
|
||||
inspectJobLoading,
|
||||
isEditSubmitting,
|
||||
]);
|
||||
|
||||
// Monitor schedule state changes and reset loading states appropriately
|
||||
useEffect(() => {
|
||||
@@ -422,57 +529,7 @@ const ScheduleDetailView: React.FC<ScheduleDetailViewProps> = ({ scheduleId, onN
|
||||
</p>
|
||||
)}
|
||||
{!isLoadingSchedule && !scheduleError && scheduleDetails && (
|
||||
<Card className="p-4 bg-white dark:bg-gray-800 shadow mb-6">
|
||||
<div className="space-y-2">
|
||||
<div className="flex flex-col md:flex-row md:items-center justify-between">
|
||||
<h3 className="text-base font-semibold text-gray-900 dark:text-white">
|
||||
{scheduleDetails.id}
|
||||
</h3>
|
||||
<div className="mt-2 md:mt-0 flex items-center gap-2">
|
||||
{scheduleDetails.currently_running && (
|
||||
<div className="text-sm text-green-500 dark:text-green-400 font-semibold flex items-center">
|
||||
<span className="inline-block w-2 h-2 bg-green-500 dark:bg-green-400 rounded-full mr-1 animate-pulse"></span>
|
||||
Currently Running
|
||||
</div>
|
||||
)}
|
||||
{scheduleDetails.paused && (
|
||||
<div className="text-sm text-orange-500 dark:text-orange-400 font-semibold flex items-center">
|
||||
<Pause className="w-3 h-3 mr-1" />
|
||||
Paused
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Schedule:</span>{' '}
|
||||
{getReadableCron(scheduleDetails.cron)}
|
||||
</p>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Cron Expression:</span> {scheduleDetails.cron}
|
||||
</p>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Recipe Source:</span> {scheduleDetails.source}
|
||||
</p>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Last Run:</span>{' '}
|
||||
{scheduleDetails.last_run
|
||||
? new Date(scheduleDetails.last_run).toLocaleString()
|
||||
: 'Never'}
|
||||
</p>
|
||||
{scheduleDetails.currently_running && scheduleDetails.current_session_id && (
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Current Session:</span>{' '}
|
||||
{scheduleDetails.current_session_id}
|
||||
</p>
|
||||
)}
|
||||
{scheduleDetails.currently_running && scheduleDetails.process_start_time && (
|
||||
<p className="text-sm text-gray-600 dark:text-gray-300">
|
||||
<span className="font-semibold">Process Started:</span>{' '}
|
||||
{new Date(scheduleDetails.process_start_time).toLocaleString()}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</Card>
|
||||
<ScheduleInfoCard scheduleDetails={scheduleDetails} />
|
||||
)}
|
||||
</section>
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import React, { useState, useEffect, useCallback, useMemo } from 'react';
|
||||
import {
|
||||
listSchedules,
|
||||
createSchedule,
|
||||
@@ -23,11 +23,207 @@ import ScheduleDetailView from './ScheduleDetailView';
|
||||
import { toastError, toastSuccess } from '../../toasts';
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '../ui/popover';
|
||||
import cronstrue from 'cronstrue';
|
||||
import { formatToLocalDateWithTimezone } from '../../utils/date';
|
||||
|
||||
interface SchedulesViewProps {
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
// Memoized ScheduleCard component to prevent unnecessary re-renders
|
||||
const ScheduleCard = React.memo<{
|
||||
job: ScheduledJob;
|
||||
onNavigateToDetail: (id: string) => void;
|
||||
onEdit: (job: ScheduledJob) => void;
|
||||
onPause: (id: string) => void;
|
||||
onUnpause: (id: string) => void;
|
||||
onKill: (id: string) => void;
|
||||
onInspect: (id: string) => void;
|
||||
onDelete: (id: string) => void;
|
||||
isPausing: boolean;
|
||||
isDeleting: boolean;
|
||||
isKilling: boolean;
|
||||
isInspecting: boolean;
|
||||
isSubmitting: boolean;
|
||||
}>(
|
||||
({
|
||||
job,
|
||||
onNavigateToDetail,
|
||||
onEdit,
|
||||
onPause,
|
||||
onUnpause,
|
||||
onKill,
|
||||
onInspect,
|
||||
onDelete,
|
||||
isPausing,
|
||||
isDeleting,
|
||||
isKilling,
|
||||
isInspecting,
|
||||
isSubmitting,
|
||||
}) => {
|
||||
const readableCron = useMemo(() => {
|
||||
try {
|
||||
return cronstrue.toString(job.cron);
|
||||
} catch (e) {
|
||||
console.warn(`Could not parse cron string "${job.cron}":`, e);
|
||||
return job.cron;
|
||||
}
|
||||
}, [job.cron]);
|
||||
|
||||
const formattedLastRun = useMemo(() => {
|
||||
return formatToLocalDateWithTimezone(job.last_run);
|
||||
}, [job.last_run]);
|
||||
|
||||
return (
|
||||
<Card
|
||||
className="p-4 bg-white dark:bg-gray-800 shadow cursor-pointer hover:shadow-lg transition-shadow duration-200"
|
||||
onClick={() => onNavigateToDetail(job.id)}
|
||||
>
|
||||
<div className="flex justify-between items-start">
|
||||
<div className="flex-grow mr-2 overflow-hidden">
|
||||
<h3
|
||||
className="text-base font-semibold text-gray-900 dark:text-white truncate"
|
||||
title={job.id}
|
||||
>
|
||||
{job.id}
|
||||
</h3>
|
||||
<p
|
||||
className="text-xs text-gray-500 dark:text-gray-400 mt-1 break-all"
|
||||
title={job.source}
|
||||
>
|
||||
Source: {job.source}
|
||||
</p>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 mt-1" title={readableCron}>
|
||||
Schedule: {readableCron}
|
||||
</p>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 mt-1">
|
||||
Last Run: {formattedLastRun}
|
||||
</p>
|
||||
{job.execution_mode && (
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 mt-1">
|
||||
Mode:{' '}
|
||||
<span
|
||||
className={`inline-flex items-center px-2 py-0.5 rounded-full text-xs font-medium ${
|
||||
job.execution_mode === 'foreground'
|
||||
? 'bg-blue-100 text-blue-800 dark:bg-blue-900/30 dark:text-blue-300'
|
||||
: 'bg-gray-100 text-gray-800 dark:bg-gray-800 dark:text-gray-300'
|
||||
}`}
|
||||
>
|
||||
{job.execution_mode === 'foreground' ? '🖥️ Foreground' : '⚡ Background'}
|
||||
</span>
|
||||
</p>
|
||||
)}
|
||||
{job.currently_running && (
|
||||
<p className="text-xs text-green-500 dark:text-green-400 mt-1 font-semibold flex items-center">
|
||||
<span className="inline-block w-2 h-2 bg-green-500 dark:bg-green-400 rounded-full mr-1 animate-pulse"></span>
|
||||
Currently Running
|
||||
</p>
|
||||
)}
|
||||
{job.paused && (
|
||||
<p className="text-xs text-orange-500 dark:text-orange-400 mt-1 font-semibold flex items-center">
|
||||
<Pause className="w-3 h-3 mr-1" />
|
||||
Paused
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex-shrink-0">
|
||||
<Popover>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
}}
|
||||
className="text-gray-500 dark:text-gray-400 hover:text-gray-700 dark:hover:text-gray-300 hover:bg-gray-100/50 dark:hover:bg-gray-800/50"
|
||||
>
|
||||
<MoreHorizontal className="w-4 h-4" />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent
|
||||
className="w-48 p-1 bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-600 shadow-lg"
|
||||
align="end"
|
||||
>
|
||||
<div className="space-y-1">
|
||||
{!job.currently_running && (
|
||||
<>
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onEdit(job);
|
||||
}}
|
||||
disabled={isPausing || isDeleting || isSubmitting}
|
||||
className="w-full flex items-center justify-between px-3 py-2 text-sm text-gray-900 dark:text-white hover:bg-gray-100 dark:hover:bg-gray-700 rounded-md disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
<span>Edit</span>
|
||||
<Edit className="w-4 h-4" />
|
||||
</button>
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
if (job.paused) {
|
||||
onUnpause(job.id);
|
||||
} else {
|
||||
onPause(job.id);
|
||||
}
|
||||
}}
|
||||
disabled={isPausing || isDeleting}
|
||||
className="w-full flex items-center justify-between px-3 py-2 text-sm text-gray-900 dark:text-white hover:bg-gray-100 dark:hover:bg-gray-700 rounded-md disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
<span>{job.paused ? 'Resume schedule' : 'Stop schedule'}</span>
|
||||
{job.paused ? <Play className="w-4 h-4" /> : <Pause className="w-4 h-4" />}
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
{job.currently_running && (
|
||||
<>
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onInspect(job.id);
|
||||
}}
|
||||
disabled={isInspecting || isKilling}
|
||||
className="w-full flex items-center justify-between px-3 py-2 text-sm text-gray-900 dark:text-white hover:bg-gray-100 dark:hover:bg-gray-700 rounded-md disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
<span>Inspect</span>
|
||||
<Eye className="w-4 h-4" />
|
||||
</button>
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onKill(job.id);
|
||||
}}
|
||||
disabled={isKilling || isInspecting}
|
||||
className="w-full flex items-center justify-between px-3 py-2 text-sm text-gray-900 dark:text-white hover:bg-gray-100 dark:hover:bg-gray-700 rounded-md disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
<span>Kill job</span>
|
||||
<Square className="w-4 h-4" />
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
<hr className="border-gray-200 dark:border-gray-600 my-1" />
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onDelete(job.id);
|
||||
}}
|
||||
disabled={isPausing || isDeleting || isKilling || isInspecting}
|
||||
className="w-full flex items-center justify-between px-3 py-2 text-sm text-red-600 dark:text-red-400 hover:bg-red-50 dark:hover:bg-red-900/20 rounded-md disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
<span>Delete</span>
|
||||
<TrashIcon className="w-4 h-4" />
|
||||
</button>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
</div>
|
||||
</div>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
ScheduleCard.displayName = 'ScheduleCard';
|
||||
|
||||
const SchedulesView: React.FC<SchedulesViewProps> = ({ onClose }) => {
|
||||
const [schedules, setSchedules] = useState<ScheduledJob[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
@@ -47,12 +243,19 @@ const SchedulesView: React.FC<SchedulesViewProps> = ({ onClose }) => {
|
||||
|
||||
const [viewingScheduleId, setViewingScheduleId] = useState<string | null>(null);
|
||||
|
||||
const fetchSchedules = async () => {
|
||||
setIsLoading(true);
|
||||
// Memoized fetch function to prevent unnecessary re-creation
|
||||
const fetchSchedules = useCallback(async (isRefresh = false) => {
|
||||
if (!isRefresh) setIsLoading(true);
|
||||
setApiError(null);
|
||||
try {
|
||||
const fetchedSchedules = await listSchedules();
|
||||
setSchedules(fetchedSchedules);
|
||||
setSchedules((prevSchedules) => {
|
||||
// Only update if schedules actually changed to prevent unnecessary re-renders
|
||||
if (JSON.stringify(prevSchedules) !== JSON.stringify(fetchedSchedules)) {
|
||||
return fetchedSchedules;
|
||||
}
|
||||
return prevSchedules;
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch schedules:', error);
|
||||
setApiError(
|
||||
@@ -61,9 +264,9 @@ const SchedulesView: React.FC<SchedulesViewProps> = ({ onClose }) => {
|
||||
: 'An unknown error occurred while fetching schedules.'
|
||||
);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
if (!isRefresh) setIsLoading(false);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (viewingScheduleId === null) {
|
||||
@@ -77,38 +280,57 @@ const SchedulesView: React.FC<SchedulesViewProps> = ({ onClose }) => {
|
||||
// The CreateScheduleModal will handle the deep link
|
||||
}
|
||||
}
|
||||
}, [viewingScheduleId]);
|
||||
}, [viewingScheduleId, fetchSchedules]);
|
||||
|
||||
// Add a periodic refresh for schedules list to keep the running status up to date
|
||||
// Optimized periodic refresh - only refresh if not actively doing something
|
||||
useEffect(() => {
|
||||
if (viewingScheduleId !== null) return;
|
||||
|
||||
// Set up periodic refresh every 10 seconds
|
||||
// Set up periodic refresh every 15 seconds (increased from 8 to reduce flashing)
|
||||
const intervalId = setInterval(() => {
|
||||
if (viewingScheduleId === null && !isRefreshing && !isLoading) {
|
||||
fetchSchedules();
|
||||
if (
|
||||
viewingScheduleId === null &&
|
||||
!isRefreshing &&
|
||||
!isLoading &&
|
||||
!isSubmitting &&
|
||||
pausingScheduleIds.size === 0 &&
|
||||
deletingScheduleIds.size === 0 &&
|
||||
killingScheduleIds.size === 0 &&
|
||||
inspectingScheduleIds.size === 0
|
||||
) {
|
||||
fetchSchedules(true); // Pass true to indicate this is a refresh
|
||||
}
|
||||
}, 10000);
|
||||
}, 15000); // Increased from 8000 to 15000 (15 seconds)
|
||||
|
||||
// Clean up on unmount
|
||||
return () => {
|
||||
clearInterval(intervalId);
|
||||
};
|
||||
}, [viewingScheduleId, isRefreshing, isLoading]);
|
||||
}, [
|
||||
viewingScheduleId,
|
||||
isRefreshing,
|
||||
isLoading,
|
||||
isSubmitting,
|
||||
pausingScheduleIds.size,
|
||||
deletingScheduleIds.size,
|
||||
killingScheduleIds.size,
|
||||
inspectingScheduleIds.size,
|
||||
fetchSchedules,
|
||||
]);
|
||||
|
||||
const handleOpenCreateModal = () => {
|
||||
setSubmitApiError(null);
|
||||
setIsCreateModalOpen(true);
|
||||
};
|
||||
|
||||
const handleRefresh = async () => {
|
||||
const handleRefresh = useCallback(async () => {
|
||||
setIsRefreshing(true);
|
||||
try {
|
||||
await fetchSchedules();
|
||||
} finally {
|
||||
setIsRefreshing(false);
|
||||
}
|
||||
};
|
||||
}, [fetchSchedules]);
|
||||
|
||||
const handleCloseCreateModal = () => {
|
||||
setIsCreateModalOpen(false);
|
||||
@@ -341,15 +563,6 @@ const SchedulesView: React.FC<SchedulesViewProps> = ({ onClose }) => {
|
||||
setViewingScheduleId(null);
|
||||
};
|
||||
|
||||
const getReadableCron = (cronString: string) => {
|
||||
try {
|
||||
return cronstrue.toString(cronString);
|
||||
} catch (e) {
|
||||
console.warn(`Could not parse cron string "${cronString}":`, e);
|
||||
return cronString;
|
||||
}
|
||||
};
|
||||
|
||||
if (viewingScheduleId) {
|
||||
return (
|
||||
<ScheduleDetailView
|
||||
@@ -412,163 +625,22 @@ const SchedulesView: React.FC<SchedulesViewProps> = ({ onClose }) => {
|
||||
{!isLoading && schedules.length > 0 && (
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||
{schedules.map((job) => (
|
||||
<Card
|
||||
<ScheduleCard
|
||||
key={job.id}
|
||||
className="p-4 bg-white dark:bg-gray-800 shadow cursor-pointer hover:shadow-lg transition-shadow duration-200"
|
||||
onClick={() => handleNavigateToScheduleDetail(job.id)}
|
||||
>
|
||||
<div className="flex justify-between items-start">
|
||||
<div className="flex-grow mr-2 overflow-hidden">
|
||||
<h3
|
||||
className="text-base font-semibold text-gray-900 dark:text-white truncate"
|
||||
title={job.id}
|
||||
>
|
||||
{job.id}
|
||||
</h3>
|
||||
<p
|
||||
className="text-xs text-gray-500 dark:text-gray-400 mt-1 break-all"
|
||||
title={job.source}
|
||||
>
|
||||
Source: {job.source}
|
||||
</p>
|
||||
<p
|
||||
className="text-xs text-gray-500 dark:text-gray-400 mt-1"
|
||||
title={getReadableCron(job.cron)}
|
||||
>
|
||||
Schedule: {getReadableCron(job.cron)}
|
||||
</p>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 mt-1">
|
||||
Last Run:{' '}
|
||||
{job.last_run ? new Date(job.last_run).toLocaleString() : 'Never'}
|
||||
</p>
|
||||
{job.currently_running && (
|
||||
<p className="text-xs text-green-500 dark:text-green-400 mt-1 font-semibold flex items-center">
|
||||
<span className="inline-block w-2 h-2 bg-green-500 dark:bg-green-400 rounded-full mr-1 animate-pulse"></span>
|
||||
Currently Running
|
||||
</p>
|
||||
)}
|
||||
{job.paused && (
|
||||
<p className="text-xs text-orange-500 dark:text-orange-400 mt-1 font-semibold flex items-center">
|
||||
<Pause className="w-3 h-3 mr-1" />
|
||||
Paused
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex-shrink-0">
|
||||
<Popover>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
}}
|
||||
className="text-gray-500 dark:text-gray-400 hover:text-gray-700 dark:hover:text-gray-300 hover:bg-gray-100/50 dark:hover:bg-gray-800/50"
|
||||
>
|
||||
<MoreHorizontal className="w-4 h-4" />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent
|
||||
className="w-48 p-1 bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-600 shadow-lg"
|
||||
align="end"
|
||||
>
|
||||
<div className="space-y-1">
|
||||
{!job.currently_running && (
|
||||
<>
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleOpenEditModal(job);
|
||||
}}
|
||||
disabled={
|
||||
pausingScheduleIds.has(job.id) ||
|
||||
deletingScheduleIds.has(job.id) ||
|
||||
isSubmitting
|
||||
}
|
||||
className="w-full flex items-center justify-between px-3 py-2 text-sm text-gray-900 dark:text-white hover:bg-gray-100 dark:hover:bg-gray-700 rounded-md disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
<span>Edit</span>
|
||||
<Edit className="w-4 h-4" />
|
||||
</button>
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
if (job.paused) {
|
||||
handleUnpauseSchedule(job.id);
|
||||
} else {
|
||||
handlePauseSchedule(job.id);
|
||||
}
|
||||
}}
|
||||
disabled={
|
||||
pausingScheduleIds.has(job.id) ||
|
||||
deletingScheduleIds.has(job.id)
|
||||
}
|
||||
className="w-full flex items-center justify-between px-3 py-2 text-sm text-gray-900 dark:text-white hover:bg-gray-100 dark:hover:bg-gray-700 rounded-md disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
<span>{job.paused ? 'Resume schedule' : 'Stop schedule'}</span>
|
||||
{job.paused ? (
|
||||
<Play className="w-4 h-4" />
|
||||
) : (
|
||||
<Pause className="w-4 h-4" />
|
||||
)}
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
{job.currently_running && (
|
||||
<>
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleInspectRunningJob(job.id);
|
||||
}}
|
||||
disabled={
|
||||
inspectingScheduleIds.has(job.id) ||
|
||||
killingScheduleIds.has(job.id)
|
||||
}
|
||||
className="w-full flex items-center justify-between px-3 py-2 text-sm text-gray-900 dark:text-white hover:bg-gray-100 dark:hover:bg-gray-700 rounded-md disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
<span>Inspect</span>
|
||||
<Eye className="w-4 h-4" />
|
||||
</button>
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleKillRunningJob(job.id);
|
||||
}}
|
||||
disabled={
|
||||
killingScheduleIds.has(job.id) ||
|
||||
inspectingScheduleIds.has(job.id)
|
||||
}
|
||||
className="w-full flex items-center justify-between px-3 py-2 text-sm text-gray-900 dark:text-white hover:bg-gray-100 dark:hover:bg-gray-700 rounded-md disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
<span>Kill job</span>
|
||||
<Square className="w-4 h-4" />
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
<hr className="border-gray-200 dark:border-gray-600 my-1" />
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleDeleteSchedule(job.id);
|
||||
}}
|
||||
disabled={
|
||||
pausingScheduleIds.has(job.id) ||
|
||||
deletingScheduleIds.has(job.id) ||
|
||||
killingScheduleIds.has(job.id) ||
|
||||
inspectingScheduleIds.has(job.id)
|
||||
}
|
||||
className="w-full flex items-center justify-between px-3 py-2 text-sm text-red-600 dark:text-red-400 hover:bg-red-50 dark:hover:bg-red-900/20 rounded-md disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
<span>Delete</span>
|
||||
<TrashIcon className="w-4 h-4" />
|
||||
</button>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
</div>
|
||||
</div>
|
||||
</Card>
|
||||
job={job}
|
||||
onNavigateToDetail={handleNavigateToScheduleDetail}
|
||||
onEdit={handleOpenEditModal}
|
||||
onPause={handlePauseSchedule}
|
||||
onUnpause={handleUnpauseSchedule}
|
||||
onKill={handleKillRunningJob}
|
||||
onInspect={handleInspectRunningJob}
|
||||
onDelete={handleDeleteSchedule}
|
||||
isPausing={pausingScheduleIds.has(job.id)}
|
||||
isDeleting={deletingScheduleIds.has(job.id)}
|
||||
isKilling={killingScheduleIds.has(job.id)}
|
||||
isInspecting={inspectingScheduleIds.has(job.id)}
|
||||
isSubmitting={isSubmitting}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -8,6 +8,7 @@ import { ToolSelectionStrategySection } from './tool_selection_strategy/ToolSele
|
||||
import SessionSharingSection from './sessions/SessionSharingSection';
|
||||
import { ResponseStylesSection } from './response_styles/ResponseStylesSection';
|
||||
import AppSettingsSection from './app/AppSettingsSection';
|
||||
import SchedulerSection from './scheduler/SchedulerSection';
|
||||
import { ExtensionConfig } from '../../api';
|
||||
import MoreMenuLayout from '../more_menu/MoreMenuLayout';
|
||||
|
||||
@@ -47,6 +48,8 @@ export default function SettingsView({
|
||||
deepLinkConfig={viewOptions.deepLinkConfig}
|
||||
showEnvVars={viewOptions.showEnvVars}
|
||||
/>
|
||||
{/* Scheduler Section */}
|
||||
<SchedulerSection />
|
||||
{/* Goose Modes */}
|
||||
<ModeSection setView={setView} />
|
||||
{/*Session sharing*/}
|
||||
|
||||
@@ -0,0 +1,104 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { SchedulingEngine, Settings } from '../../../utils/settings';
|
||||
|
||||
interface SchedulerSectionProps {
|
||||
onSchedulingEngineChange?: (engine: SchedulingEngine) => void;
|
||||
}
|
||||
|
||||
export default function SchedulerSection({ onSchedulingEngineChange }: SchedulerSectionProps) {
|
||||
const [schedulingEngine, setSchedulingEngine] = useState<SchedulingEngine>('builtin-cron');
|
||||
|
||||
useEffect(() => {
|
||||
// Load current scheduling engine setting
|
||||
const loadSchedulingEngine = async () => {
|
||||
try {
|
||||
const settings = (await window.electron.getSettings()) as Settings | null;
|
||||
if (settings?.schedulingEngine) {
|
||||
setSchedulingEngine(settings.schedulingEngine);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load scheduling engine setting:', error);
|
||||
}
|
||||
};
|
||||
|
||||
loadSchedulingEngine();
|
||||
}, []);
|
||||
|
||||
const handleEngineChange = async (engine: SchedulingEngine) => {
|
||||
try {
|
||||
setSchedulingEngine(engine);
|
||||
|
||||
// Save the setting
|
||||
await window.electron.setSchedulingEngine(engine);
|
||||
|
||||
// Notify parent component
|
||||
if (onSchedulingEngineChange) {
|
||||
onSchedulingEngineChange(engine);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to save scheduling engine setting:', error);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="px-8">
|
||||
<div className="mb-4">
|
||||
<h2 className="text-xl font-medium text-textStandard mb-2">Scheduling Engine</h2>
|
||||
<p className="text-sm text-textSubtle mb-4">
|
||||
Choose which scheduling backend to use for scheduled recipes and tasks.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="space-y-3">
|
||||
<div className="flex items-start space-x-3">
|
||||
<input
|
||||
type="radio"
|
||||
id="builtin-cron"
|
||||
name="schedulingEngine"
|
||||
value="builtin-cron"
|
||||
checked={schedulingEngine === 'builtin-cron'}
|
||||
onChange={() => handleEngineChange('builtin-cron')}
|
||||
className="mt-1 h-4 w-4 text-blue-600 focus:ring-blue-500 border-gray-300"
|
||||
/>
|
||||
<div className="flex-1">
|
||||
<label htmlFor="builtin-cron" className="block text-sm font-medium text-textStandard">
|
||||
Built-in Cron (Default)
|
||||
</label>
|
||||
<p className="text-xs text-textSubtle mt-1">
|
||||
Uses Goose's built-in cron scheduler. Simple and reliable for basic scheduling needs.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex items-start space-x-3">
|
||||
<input
|
||||
type="radio"
|
||||
id="temporal"
|
||||
name="schedulingEngine"
|
||||
value="temporal"
|
||||
checked={schedulingEngine === 'temporal'}
|
||||
onChange={() => handleEngineChange('temporal')}
|
||||
className="mt-1 h-4 w-4 text-blue-600 focus:ring-blue-500 border-gray-300"
|
||||
/>
|
||||
<div className="flex-1">
|
||||
<label htmlFor="temporal" className="block text-sm font-medium text-textStandard">
|
||||
Temporal
|
||||
</label>
|
||||
<p className="text-xs text-textSubtle mt-1">
|
||||
Uses Temporal workflow engine for advanced scheduling features. Requires Temporal CLI
|
||||
to be installed.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mt-4 p-3 bg-bgSubtle rounded-md">
|
||||
<p className="text-xs text-textSubtle">
|
||||
<strong>Note:</strong> Changing the scheduling engine will apply to new Goose sessions.
|
||||
You will need to restart Goose for the change to take full effect. <br />
|
||||
The scheduling engines do not share the list of schedules.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -32,6 +32,8 @@ import {
|
||||
loadSettings,
|
||||
saveSettings,
|
||||
updateEnvironmentVariables,
|
||||
updateSchedulingEngineEnvironment,
|
||||
SchedulingEngine,
|
||||
} from './utils/settings';
|
||||
import * as crypto from 'crypto';
|
||||
import * as electron from 'electron';
|
||||
@@ -155,6 +157,14 @@ if (process.platform === 'win32') {
|
||||
if (configParam) {
|
||||
try {
|
||||
recipeConfig = JSON.parse(Buffer.from(configParam, 'base64').toString('utf-8'));
|
||||
|
||||
// Check if this is a scheduled job
|
||||
const scheduledJobId = parsedUrl.searchParams.get('scheduledJob');
|
||||
if (scheduledJobId) {
|
||||
console.log(`[main] Opening scheduled job: ${scheduledJobId}`);
|
||||
recipeConfig.scheduledJobId = scheduledJobId;
|
||||
recipeConfig.isScheduledExecution = true;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to parse bot config:', e);
|
||||
}
|
||||
@@ -250,6 +260,14 @@ function processProtocolUrl(parsedUrl: URL, window: BrowserWindow) {
|
||||
if (configParam) {
|
||||
try {
|
||||
recipeConfig = JSON.parse(Buffer.from(configParam, 'base64').toString('utf-8'));
|
||||
|
||||
// Check if this is a scheduled job
|
||||
const scheduledJobId = parsedUrl.searchParams.get('scheduledJob');
|
||||
if (scheduledJobId) {
|
||||
console.log(`[main] Opening scheduled job: ${scheduledJobId}`);
|
||||
recipeConfig.scheduledJobId = scheduledJobId;
|
||||
recipeConfig.isScheduledExecution = true;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to parse bot config:', e);
|
||||
}
|
||||
@@ -274,6 +292,14 @@ app.on('open-url', async (_event, url) => {
|
||||
if (configParam) {
|
||||
try {
|
||||
recipeConfig = JSON.parse(Buffer.from(base64, 'base64').toString('utf-8'));
|
||||
|
||||
// Check if this is a scheduled job
|
||||
const scheduledJobId = parsedUrl.searchParams.get('scheduledJob');
|
||||
if (scheduledJobId) {
|
||||
console.log(`[main] Opening scheduled job: ${scheduledJobId}`);
|
||||
recipeConfig.scheduledJobId = scheduledJobId;
|
||||
recipeConfig.isScheduledExecution = true;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to parse bot config:', e);
|
||||
}
|
||||
@@ -422,8 +448,17 @@ const createChat = async (
|
||||
} else {
|
||||
// Apply current environment settings before creating chat
|
||||
updateEnvironmentVariables(envToggles);
|
||||
|
||||
// Apply scheduling engine setting
|
||||
const settings = loadSettings();
|
||||
updateSchedulingEngineEnvironment(settings.schedulingEngine);
|
||||
|
||||
// Start new Goosed process for regular windows
|
||||
const [newPort, newWorkingDir, newGoosedProcess] = await startGoosed(app, dir);
|
||||
// Pass through scheduling engine environment variables
|
||||
const envVars = {
|
||||
GOOSE_SCHEDULER_TYPE: process.env.GOOSE_SCHEDULER_TYPE,
|
||||
};
|
||||
const [newPort, newWorkingDir, newGoosedProcess] = await startGoosed(app, dir, envVars);
|
||||
port = newPort;
|
||||
working_dir = newWorkingDir;
|
||||
goosedProcess = newGoosedProcess;
|
||||
@@ -750,6 +785,33 @@ ipcMain.handle('directory-chooser', (_event, replace: boolean = false) => {
|
||||
return openDirectoryDialog(replace);
|
||||
});
|
||||
|
||||
// Handle scheduling engine settings
|
||||
ipcMain.handle('get-settings', () => {
|
||||
try {
|
||||
const settings = loadSettings();
|
||||
return settings;
|
||||
} catch (error) {
|
||||
console.error('Error getting settings:', error);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle('set-scheduling-engine', async (_event, engine: string) => {
|
||||
try {
|
||||
const settings = loadSettings();
|
||||
settings.schedulingEngine = engine as SchedulingEngine;
|
||||
saveSettings(settings);
|
||||
|
||||
// Update the environment variable immediately
|
||||
updateSchedulingEngineEnvironment(settings.schedulingEngine);
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('Error setting scheduling engine:', error);
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
// Handle menu bar icon visibility
|
||||
ipcMain.handle('set-menu-bar-icon', async (_event, show: boolean) => {
|
||||
try {
|
||||
|
||||
@@ -79,6 +79,8 @@ type ElectronAPI = {
|
||||
getMenuBarIconState: () => Promise<boolean>;
|
||||
setDockIcon: (show: boolean) => Promise<boolean>;
|
||||
getDockIconState: () => Promise<boolean>;
|
||||
getSettings: () => Promise<unknown | null>;
|
||||
setSchedulingEngine: (engine: string) => Promise<boolean>;
|
||||
setQuitConfirmation: (show: boolean) => Promise<boolean>;
|
||||
getQuitConfirmationState: () => Promise<boolean>;
|
||||
openNotificationsSettings: () => Promise<boolean>;
|
||||
@@ -157,6 +159,8 @@ const electronAPI: ElectronAPI = {
|
||||
getMenuBarIconState: () => ipcRenderer.invoke('get-menu-bar-icon-state'),
|
||||
setDockIcon: (show: boolean) => ipcRenderer.invoke('set-dock-icon', show),
|
||||
getDockIconState: () => ipcRenderer.invoke('get-dock-icon-state'),
|
||||
getSettings: () => ipcRenderer.invoke('get-settings'),
|
||||
setSchedulingEngine: (engine: string) => ipcRenderer.invoke('set-scheduling-engine', engine),
|
||||
setQuitConfirmation: (show: boolean) => ipcRenderer.invoke('set-quit-confirmation', show),
|
||||
getQuitConfirmationState: () => ipcRenderer.invoke('get-quit-confirmation-state'),
|
||||
openNotificationsSettings: () => ipcRenderer.invoke('open-notifications-settings'),
|
||||
|
||||
@@ -17,6 +17,9 @@ export interface Recipe {
|
||||
context?: string[];
|
||||
profile?: string;
|
||||
mcps?: number;
|
||||
// Properties added for scheduled execution
|
||||
scheduledJobId?: string;
|
||||
isScheduledExecution?: boolean;
|
||||
}
|
||||
|
||||
export interface CreateRecipeRequest {
|
||||
|
||||
@@ -20,6 +20,7 @@ export interface ScheduledJob {
|
||||
paused?: boolean;
|
||||
current_session_id?: string | null;
|
||||
process_start_time?: string | null;
|
||||
execution_mode?: string | null; // "foreground" or "background"
|
||||
}
|
||||
|
||||
export interface ScheduleSession {
|
||||
@@ -55,6 +56,7 @@ export async function createSchedule(request: {
|
||||
id: string;
|
||||
recipe_source: string;
|
||||
cron: string;
|
||||
execution_mode?: string;
|
||||
}): Promise<ScheduledJob> {
|
||||
try {
|
||||
const response = await apiCreateSchedule<true>({ body: request });
|
||||
|
||||
41
ui/desktop/src/utils/date.ts
Normal file
41
ui/desktop/src/utils/date.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
export const formatToLocalDateTime = (dateString?: string | null): string => {
|
||||
if (!dateString) {
|
||||
return 'N/A';
|
||||
}
|
||||
try {
|
||||
const date = new Date(dateString);
|
||||
// Check if the date is valid
|
||||
if (isNaN(date.getTime())) {
|
||||
return 'Invalid Date';
|
||||
}
|
||||
return date.toLocaleString(); // Uses user's locale and timezone
|
||||
} catch (e) {
|
||||
console.error('Error formatting date:', e);
|
||||
return 'Invalid Date';
|
||||
}
|
||||
};
|
||||
|
||||
export const formatToLocalDateWithTimezone = (dateString?: string | null): string => {
|
||||
if (!dateString) {
|
||||
return 'N/A';
|
||||
}
|
||||
try {
|
||||
const date = new Date(dateString);
|
||||
if (isNaN(date.getTime())) {
|
||||
return 'Invalid Date';
|
||||
}
|
||||
// Format: Jan 1, 2023, 10:00:00 AM PST (example)
|
||||
return date.toLocaleString(undefined, {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: 'numeric',
|
||||
minute: '2-digit',
|
||||
second: '2-digit',
|
||||
timeZoneName: 'short',
|
||||
});
|
||||
} catch (e) {
|
||||
console.error('Error formatting date with timezone:', e);
|
||||
return 'Invalid Date';
|
||||
}
|
||||
};
|
||||
@@ -8,10 +8,13 @@ export interface EnvToggles {
|
||||
GOOSE_SERVER__COMPUTER_CONTROLLER: boolean;
|
||||
}
|
||||
|
||||
export type SchedulingEngine = 'builtin-cron' | 'temporal';
|
||||
|
||||
export interface Settings {
|
||||
envToggles: EnvToggles;
|
||||
showMenuBarIcon: boolean;
|
||||
showDockIcon: boolean;
|
||||
schedulingEngine: SchedulingEngine;
|
||||
showQuitConfirmation: boolean;
|
||||
}
|
||||
|
||||
@@ -25,6 +28,7 @@ const defaultSettings: Settings = {
|
||||
},
|
||||
showMenuBarIcon: true,
|
||||
showDockIcon: true,
|
||||
schedulingEngine: 'builtin-cron',
|
||||
showQuitConfirmation: true,
|
||||
};
|
||||
|
||||
@@ -64,6 +68,15 @@ export function updateEnvironmentVariables(envToggles: EnvToggles): void {
|
||||
}
|
||||
}
|
||||
|
||||
export function updateSchedulingEngineEnvironment(schedulingEngine: SchedulingEngine): void {
|
||||
// Set GOOSE_SCHEDULER_TYPE based on the scheduling engine setting
|
||||
if (schedulingEngine === 'temporal') {
|
||||
process.env.GOOSE_SCHEDULER_TYPE = 'temporal';
|
||||
} else {
|
||||
process.env.GOOSE_SCHEDULER_TYPE = 'legacy';
|
||||
}
|
||||
}
|
||||
|
||||
// Menu management
|
||||
export function createEnvironmentMenu(
|
||||
envToggles: EnvToggles,
|
||||
|
||||
Reference in New Issue
Block a user